Fix issue for pointers to anonymous types with -fdump-ada-spec
[official-gcc.git] / gcc / fold-const.cc
blobb647e5305aac3055ab0fb8bcb8c488b13efe0344
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-vrp.h"
79 #include "tree-ssanames.h"
80 #include "selftest.h"
81 #include "stringpool.h"
82 #include "attribs.h"
83 #include "tree-vector-builder.h"
84 #include "vec-perm-indices.h"
85 #include "asan.h"
86 #include "gimple-range.h"
88 /* Nonzero if we are folding constants inside an initializer or a C++
89 manifestly-constant-evaluated context; zero otherwise.
90 Should be used when folding in initializer enables additional
91 optimizations. */
92 int folding_initializer = 0;
94 /* Nonzero if we are folding C++ manifestly-constant-evaluated context; zero
95 otherwise.
96 Should be used when certain constructs shouldn't be optimized
97 during folding in that context. */
98 bool folding_cxx_constexpr = false;
100 /* The following constants represent a bit based encoding of GCC's
101 comparison operators. This encoding simplifies transformations
102 on relational comparison operators, such as AND and OR. */
103 enum comparison_code {
104 COMPCODE_FALSE = 0,
105 COMPCODE_LT = 1,
106 COMPCODE_EQ = 2,
107 COMPCODE_LE = 3,
108 COMPCODE_GT = 4,
109 COMPCODE_LTGT = 5,
110 COMPCODE_GE = 6,
111 COMPCODE_ORD = 7,
112 COMPCODE_UNORD = 8,
113 COMPCODE_UNLT = 9,
114 COMPCODE_UNEQ = 10,
115 COMPCODE_UNLE = 11,
116 COMPCODE_UNGT = 12,
117 COMPCODE_NE = 13,
118 COMPCODE_UNGE = 14,
119 COMPCODE_TRUE = 15
122 static bool negate_expr_p (tree);
123 static tree negate_expr (tree);
124 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
125 static enum comparison_code comparison_to_compcode (enum tree_code);
126 static enum tree_code compcode_to_comparison (enum comparison_code);
127 static bool twoval_comparison_p (tree, tree *, tree *);
128 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
129 static tree optimize_bit_field_compare (location_t, enum tree_code,
130 tree, tree, tree);
131 static bool simple_operand_p (const_tree);
132 static bool simple_operand_p_2 (tree);
133 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
134 static tree range_predecessor (tree);
135 static tree range_successor (tree);
136 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
137 static tree fold_cond_expr_with_comparison (location_t, tree, enum tree_code,
138 tree, tree, tree, tree);
139 static tree unextend (tree, int, int, tree);
140 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
141 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
142 static tree fold_binary_op_with_conditional_arg (location_t,
143 enum tree_code, tree,
144 tree, tree,
145 tree, tree, int);
146 static tree fold_negate_const (tree, tree);
147 static tree fold_not_const (const_tree, tree);
148 static tree fold_relational_const (enum tree_code, tree, tree, tree);
149 static tree fold_convert_const (enum tree_code, tree, tree);
150 static tree fold_view_convert_expr (tree, tree);
151 static tree fold_negate_expr (location_t, tree);
154 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
155 Otherwise, return LOC. */
157 static location_t
158 expr_location_or (tree t, location_t loc)
160 location_t tloc = EXPR_LOCATION (t);
161 return tloc == UNKNOWN_LOCATION ? loc : tloc;
164 /* Similar to protected_set_expr_location, but never modify x in place,
165 if location can and needs to be set, unshare it. */
167 static inline tree
168 protected_set_expr_location_unshare (tree x, location_t loc)
170 if (CAN_HAVE_LOCATION_P (x)
171 && EXPR_LOCATION (x) != loc
172 && !(TREE_CODE (x) == SAVE_EXPR
173 || TREE_CODE (x) == TARGET_EXPR
174 || TREE_CODE (x) == BIND_EXPR))
176 x = copy_node (x);
177 SET_EXPR_LOCATION (x, loc);
179 return x;
182 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
183 division and returns the quotient. Otherwise returns
184 NULL_TREE. */
186 tree
187 div_if_zero_remainder (const_tree arg1, const_tree arg2)
189 widest_int quo;
191 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
192 SIGNED, &quo))
193 return wide_int_to_tree (TREE_TYPE (arg1), quo);
195 return NULL_TREE;
198 /* This is nonzero if we should defer warnings about undefined
199 overflow. This facility exists because these warnings are a
200 special case. The code to estimate loop iterations does not want
201 to issue any warnings, since it works with expressions which do not
202 occur in user code. Various bits of cleanup code call fold(), but
203 only use the result if it has certain characteristics (e.g., is a
204 constant); that code only wants to issue a warning if the result is
205 used. */
207 static int fold_deferring_overflow_warnings;
209 /* If a warning about undefined overflow is deferred, this is the
210 warning. Note that this may cause us to turn two warnings into
211 one, but that is fine since it is sufficient to only give one
212 warning per expression. */
214 static const char* fold_deferred_overflow_warning;
216 /* If a warning about undefined overflow is deferred, this is the
217 level at which the warning should be emitted. */
219 static enum warn_strict_overflow_code fold_deferred_overflow_code;
221 /* Start deferring overflow warnings. We could use a stack here to
222 permit nested calls, but at present it is not necessary. */
224 void
225 fold_defer_overflow_warnings (void)
227 ++fold_deferring_overflow_warnings;
230 /* Stop deferring overflow warnings. If there is a pending warning,
231 and ISSUE is true, then issue the warning if appropriate. STMT is
232 the statement with which the warning should be associated (used for
233 location information); STMT may be NULL. CODE is the level of the
234 warning--a warn_strict_overflow_code value. This function will use
235 the smaller of CODE and the deferred code when deciding whether to
236 issue the warning. CODE may be zero to mean to always use the
237 deferred code. */
239 void
240 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
242 const char *warnmsg;
243 location_t locus;
245 gcc_assert (fold_deferring_overflow_warnings > 0);
246 --fold_deferring_overflow_warnings;
247 if (fold_deferring_overflow_warnings > 0)
249 if (fold_deferred_overflow_warning != NULL
250 && code != 0
251 && code < (int) fold_deferred_overflow_code)
252 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
253 return;
256 warnmsg = fold_deferred_overflow_warning;
257 fold_deferred_overflow_warning = NULL;
259 if (!issue || warnmsg == NULL)
260 return;
262 if (warning_suppressed_p (stmt, OPT_Wstrict_overflow))
263 return;
265 /* Use the smallest code level when deciding to issue the
266 warning. */
267 if (code == 0 || code > (int) fold_deferred_overflow_code)
268 code = fold_deferred_overflow_code;
270 if (!issue_strict_overflow_warning (code))
271 return;
273 if (stmt == NULL)
274 locus = input_location;
275 else
276 locus = gimple_location (stmt);
277 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
280 /* Stop deferring overflow warnings, ignoring any deferred
281 warnings. */
283 void
284 fold_undefer_and_ignore_overflow_warnings (void)
286 fold_undefer_overflow_warnings (false, NULL, 0);
289 /* Whether we are deferring overflow warnings. */
291 bool
292 fold_deferring_overflow_warnings_p (void)
294 return fold_deferring_overflow_warnings > 0;
297 /* This is called when we fold something based on the fact that signed
298 overflow is undefined. */
300 void
301 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
303 if (fold_deferring_overflow_warnings > 0)
305 if (fold_deferred_overflow_warning == NULL
306 || wc < fold_deferred_overflow_code)
308 fold_deferred_overflow_warning = gmsgid;
309 fold_deferred_overflow_code = wc;
312 else if (issue_strict_overflow_warning (wc))
313 warning (OPT_Wstrict_overflow, gmsgid);
316 /* Return true if the built-in mathematical function specified by CODE
317 is odd, i.e. -f(x) == f(-x). */
319 bool
320 negate_mathfn_p (combined_fn fn)
322 switch (fn)
324 CASE_CFN_ASIN:
325 CASE_CFN_ASINH:
326 CASE_CFN_ATAN:
327 CASE_CFN_ATANH:
328 CASE_CFN_CASIN:
329 CASE_CFN_CASINH:
330 CASE_CFN_CATAN:
331 CASE_CFN_CATANH:
332 CASE_CFN_CBRT:
333 CASE_CFN_CPROJ:
334 CASE_CFN_CSIN:
335 CASE_CFN_CSINH:
336 CASE_CFN_CTAN:
337 CASE_CFN_CTANH:
338 CASE_CFN_ERF:
339 CASE_CFN_LLROUND:
340 CASE_CFN_LROUND:
341 CASE_CFN_ROUND:
342 CASE_CFN_ROUNDEVEN:
343 CASE_CFN_ROUNDEVEN_FN:
344 CASE_CFN_SIN:
345 CASE_CFN_SINH:
346 CASE_CFN_TAN:
347 CASE_CFN_TANH:
348 CASE_CFN_TRUNC:
349 return true;
351 CASE_CFN_LLRINT:
352 CASE_CFN_LRINT:
353 CASE_CFN_NEARBYINT:
354 CASE_CFN_RINT:
355 return !flag_rounding_math;
357 default:
358 break;
360 return false;
363 /* Check whether we may negate an integer constant T without causing
364 overflow. */
366 bool
367 may_negate_without_overflow_p (const_tree t)
369 tree type;
371 gcc_assert (TREE_CODE (t) == INTEGER_CST);
373 type = TREE_TYPE (t);
374 if (TYPE_UNSIGNED (type))
375 return false;
377 return !wi::only_sign_bit_p (wi::to_wide (t));
380 /* Determine whether an expression T can be cheaply negated using
381 the function negate_expr without introducing undefined overflow. */
383 static bool
384 negate_expr_p (tree t)
386 tree type;
388 if (t == 0)
389 return false;
391 type = TREE_TYPE (t);
393 STRIP_SIGN_NOPS (t);
394 switch (TREE_CODE (t))
396 case INTEGER_CST:
397 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
398 return true;
400 /* Check that -CST will not overflow type. */
401 return may_negate_without_overflow_p (t);
402 case BIT_NOT_EXPR:
403 return (INTEGRAL_TYPE_P (type)
404 && TYPE_OVERFLOW_WRAPS (type));
406 case FIXED_CST:
407 return true;
409 case NEGATE_EXPR:
410 return !TYPE_OVERFLOW_SANITIZED (type);
412 case REAL_CST:
413 /* We want to canonicalize to positive real constants. Pretend
414 that only negative ones can be easily negated. */
415 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
417 case COMPLEX_CST:
418 return negate_expr_p (TREE_REALPART (t))
419 && negate_expr_p (TREE_IMAGPART (t));
421 case VECTOR_CST:
423 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
424 return true;
426 /* Steps don't prevent negation. */
427 unsigned int count = vector_cst_encoded_nelts (t);
428 for (unsigned int i = 0; i < count; ++i)
429 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
430 return false;
432 return true;
435 case COMPLEX_EXPR:
436 return negate_expr_p (TREE_OPERAND (t, 0))
437 && negate_expr_p (TREE_OPERAND (t, 1));
439 case CONJ_EXPR:
440 return negate_expr_p (TREE_OPERAND (t, 0));
442 case PLUS_EXPR:
443 if (HONOR_SIGN_DEPENDENT_ROUNDING (type)
444 || HONOR_SIGNED_ZEROS (type)
445 || (ANY_INTEGRAL_TYPE_P (type)
446 && ! TYPE_OVERFLOW_WRAPS (type)))
447 return false;
448 /* -(A + B) -> (-B) - A. */
449 if (negate_expr_p (TREE_OPERAND (t, 1)))
450 return true;
451 /* -(A + B) -> (-A) - B. */
452 return negate_expr_p (TREE_OPERAND (t, 0));
454 case MINUS_EXPR:
455 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
456 return !HONOR_SIGN_DEPENDENT_ROUNDING (type)
457 && !HONOR_SIGNED_ZEROS (type)
458 && (! ANY_INTEGRAL_TYPE_P (type)
459 || TYPE_OVERFLOW_WRAPS (type));
461 case MULT_EXPR:
462 if (TYPE_UNSIGNED (type))
463 break;
464 /* INT_MIN/n * n doesn't overflow while negating one operand it does
465 if n is a (negative) power of two. */
466 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
467 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
468 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
469 && (wi::popcount
470 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
471 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
472 && (wi::popcount
473 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
474 break;
476 /* Fall through. */
478 case RDIV_EXPR:
479 if (! HONOR_SIGN_DEPENDENT_ROUNDING (t))
480 return negate_expr_p (TREE_OPERAND (t, 1))
481 || negate_expr_p (TREE_OPERAND (t, 0));
482 break;
484 case TRUNC_DIV_EXPR:
485 case ROUND_DIV_EXPR:
486 case EXACT_DIV_EXPR:
487 if (TYPE_UNSIGNED (type))
488 break;
489 /* In general we can't negate A in A / B, because if A is INT_MIN and
490 B is not 1 we change the sign of the result. */
491 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
492 && negate_expr_p (TREE_OPERAND (t, 0)))
493 return true;
494 /* In general we can't negate B in A / B, because if A is INT_MIN and
495 B is 1, we may turn this into INT_MIN / -1 which is undefined
496 and actually traps on some architectures. */
497 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
498 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
499 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
500 && ! integer_onep (TREE_OPERAND (t, 1))))
501 return negate_expr_p (TREE_OPERAND (t, 1));
502 break;
504 case NOP_EXPR:
505 /* Negate -((double)float) as (double)(-float). */
506 if (TREE_CODE (type) == REAL_TYPE)
508 tree tem = strip_float_extensions (t);
509 if (tem != t)
510 return negate_expr_p (tem);
512 break;
514 case CALL_EXPR:
515 /* Negate -f(x) as f(-x). */
516 if (negate_mathfn_p (get_call_combined_fn (t)))
517 return negate_expr_p (CALL_EXPR_ARG (t, 0));
518 break;
520 case RSHIFT_EXPR:
521 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
522 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
524 tree op1 = TREE_OPERAND (t, 1);
525 if (wi::to_wide (op1) == element_precision (type) - 1)
526 return true;
528 break;
530 default:
531 break;
533 return false;
536 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
537 simplification is possible.
538 If negate_expr_p would return true for T, NULL_TREE will never be
539 returned. */
541 static tree
542 fold_negate_expr_1 (location_t loc, tree t)
544 tree type = TREE_TYPE (t);
545 tree tem;
547 switch (TREE_CODE (t))
549 /* Convert - (~A) to A + 1. */
550 case BIT_NOT_EXPR:
551 if (INTEGRAL_TYPE_P (type))
552 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
553 build_one_cst (type));
554 break;
556 case INTEGER_CST:
557 tem = fold_negate_const (t, type);
558 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
559 || (ANY_INTEGRAL_TYPE_P (type)
560 && !TYPE_OVERFLOW_TRAPS (type)
561 && TYPE_OVERFLOW_WRAPS (type))
562 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
563 return tem;
564 break;
566 case POLY_INT_CST:
567 case REAL_CST:
568 case FIXED_CST:
569 tem = fold_negate_const (t, type);
570 return tem;
572 case COMPLEX_CST:
574 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
575 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
576 if (rpart && ipart)
577 return build_complex (type, rpart, ipart);
579 break;
581 case VECTOR_CST:
583 tree_vector_builder elts;
584 elts.new_unary_operation (type, t, true);
585 unsigned int count = elts.encoded_nelts ();
586 for (unsigned int i = 0; i < count; ++i)
588 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
589 if (elt == NULL_TREE)
590 return NULL_TREE;
591 elts.quick_push (elt);
594 return elts.build ();
597 case COMPLEX_EXPR:
598 if (negate_expr_p (t))
599 return fold_build2_loc (loc, COMPLEX_EXPR, type,
600 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
601 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
602 break;
604 case CONJ_EXPR:
605 if (negate_expr_p (t))
606 return fold_build1_loc (loc, CONJ_EXPR, type,
607 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
608 break;
610 case NEGATE_EXPR:
611 if (!TYPE_OVERFLOW_SANITIZED (type))
612 return TREE_OPERAND (t, 0);
613 break;
615 case PLUS_EXPR:
616 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
617 && !HONOR_SIGNED_ZEROS (type))
619 /* -(A + B) -> (-B) - A. */
620 if (negate_expr_p (TREE_OPERAND (t, 1)))
622 tem = negate_expr (TREE_OPERAND (t, 1));
623 return fold_build2_loc (loc, MINUS_EXPR, type,
624 tem, TREE_OPERAND (t, 0));
627 /* -(A + B) -> (-A) - B. */
628 if (negate_expr_p (TREE_OPERAND (t, 0)))
630 tem = negate_expr (TREE_OPERAND (t, 0));
631 return fold_build2_loc (loc, MINUS_EXPR, type,
632 tem, TREE_OPERAND (t, 1));
635 break;
637 case MINUS_EXPR:
638 /* - (A - B) -> B - A */
639 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
640 && !HONOR_SIGNED_ZEROS (type))
641 return fold_build2_loc (loc, MINUS_EXPR, type,
642 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
643 break;
645 case MULT_EXPR:
646 if (TYPE_UNSIGNED (type))
647 break;
649 /* Fall through. */
651 case RDIV_EXPR:
652 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
654 tem = TREE_OPERAND (t, 1);
655 if (negate_expr_p (tem))
656 return fold_build2_loc (loc, TREE_CODE (t), type,
657 TREE_OPERAND (t, 0), negate_expr (tem));
658 tem = TREE_OPERAND (t, 0);
659 if (negate_expr_p (tem))
660 return fold_build2_loc (loc, TREE_CODE (t), type,
661 negate_expr (tem), TREE_OPERAND (t, 1));
663 break;
665 case TRUNC_DIV_EXPR:
666 case ROUND_DIV_EXPR:
667 case EXACT_DIV_EXPR:
668 if (TYPE_UNSIGNED (type))
669 break;
670 /* In general we can't negate A in A / B, because if A is INT_MIN and
671 B is not 1 we change the sign of the result. */
672 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
673 && negate_expr_p (TREE_OPERAND (t, 0)))
674 return fold_build2_loc (loc, TREE_CODE (t), type,
675 negate_expr (TREE_OPERAND (t, 0)),
676 TREE_OPERAND (t, 1));
677 /* In general we can't negate B in A / B, because if A is INT_MIN and
678 B is 1, we may turn this into INT_MIN / -1 which is undefined
679 and actually traps on some architectures. */
680 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
681 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
682 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
683 && ! integer_onep (TREE_OPERAND (t, 1))))
684 && negate_expr_p (TREE_OPERAND (t, 1)))
685 return fold_build2_loc (loc, TREE_CODE (t), type,
686 TREE_OPERAND (t, 0),
687 negate_expr (TREE_OPERAND (t, 1)));
688 break;
690 case NOP_EXPR:
691 /* Convert -((double)float) into (double)(-float). */
692 if (TREE_CODE (type) == REAL_TYPE)
694 tem = strip_float_extensions (t);
695 if (tem != t && negate_expr_p (tem))
696 return fold_convert_loc (loc, type, negate_expr (tem));
698 break;
700 case CALL_EXPR:
701 /* Negate -f(x) as f(-x). */
702 if (negate_mathfn_p (get_call_combined_fn (t))
703 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
705 tree fndecl, arg;
707 fndecl = get_callee_fndecl (t);
708 arg = negate_expr (CALL_EXPR_ARG (t, 0));
709 return build_call_expr_loc (loc, fndecl, 1, arg);
711 break;
713 case RSHIFT_EXPR:
714 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
715 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
717 tree op1 = TREE_OPERAND (t, 1);
718 if (wi::to_wide (op1) == element_precision (type) - 1)
720 tree ntype = TYPE_UNSIGNED (type)
721 ? signed_type_for (type)
722 : unsigned_type_for (type);
723 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
724 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
725 return fold_convert_loc (loc, type, temp);
728 break;
730 default:
731 break;
734 return NULL_TREE;
737 /* A wrapper for fold_negate_expr_1. */
739 static tree
740 fold_negate_expr (location_t loc, tree t)
742 tree type = TREE_TYPE (t);
743 STRIP_SIGN_NOPS (t);
744 tree tem = fold_negate_expr_1 (loc, t);
745 if (tem == NULL_TREE)
746 return NULL_TREE;
747 return fold_convert_loc (loc, type, tem);
750 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
751 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
752 return NULL_TREE. */
754 static tree
755 negate_expr (tree t)
757 tree type, tem;
758 location_t loc;
760 if (t == NULL_TREE)
761 return NULL_TREE;
763 loc = EXPR_LOCATION (t);
764 type = TREE_TYPE (t);
765 STRIP_SIGN_NOPS (t);
767 tem = fold_negate_expr (loc, t);
768 if (!tem)
769 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
770 return fold_convert_loc (loc, type, tem);
773 /* Split a tree IN into a constant, literal and variable parts that could be
774 combined with CODE to make IN. "constant" means an expression with
775 TREE_CONSTANT but that isn't an actual constant. CODE must be a
776 commutative arithmetic operation. Store the constant part into *CONP,
777 the literal in *LITP and return the variable part. If a part isn't
778 present, set it to null. If the tree does not decompose in this way,
779 return the entire tree as the variable part and the other parts as null.
781 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
782 case, we negate an operand that was subtracted. Except if it is a
783 literal for which we use *MINUS_LITP instead.
785 If NEGATE_P is true, we are negating all of IN, again except a literal
786 for which we use *MINUS_LITP instead. If a variable part is of pointer
787 type, it is negated after converting to TYPE. This prevents us from
788 generating illegal MINUS pointer expression. LOC is the location of
789 the converted variable part.
791 If IN is itself a literal or constant, return it as appropriate.
793 Note that we do not guarantee that any of the three values will be the
794 same type as IN, but they will have the same signedness and mode. */
796 static tree
797 split_tree (tree in, tree type, enum tree_code code,
798 tree *minus_varp, tree *conp, tree *minus_conp,
799 tree *litp, tree *minus_litp, int negate_p)
801 tree var = 0;
802 *minus_varp = 0;
803 *conp = 0;
804 *minus_conp = 0;
805 *litp = 0;
806 *minus_litp = 0;
808 /* Strip any conversions that don't change the machine mode or signedness. */
809 STRIP_SIGN_NOPS (in);
811 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
812 || TREE_CODE (in) == FIXED_CST)
813 *litp = in;
814 else if (TREE_CODE (in) == code
815 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
816 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
817 /* We can associate addition and subtraction together (even
818 though the C standard doesn't say so) for integers because
819 the value is not affected. For reals, the value might be
820 affected, so we can't. */
821 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
822 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
823 || (code == MINUS_EXPR
824 && (TREE_CODE (in) == PLUS_EXPR
825 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
827 tree op0 = TREE_OPERAND (in, 0);
828 tree op1 = TREE_OPERAND (in, 1);
829 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
830 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
832 /* First see if either of the operands is a literal, then a constant. */
833 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
834 || TREE_CODE (op0) == FIXED_CST)
835 *litp = op0, op0 = 0;
836 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
837 || TREE_CODE (op1) == FIXED_CST)
838 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
840 if (op0 != 0 && TREE_CONSTANT (op0))
841 *conp = op0, op0 = 0;
842 else if (op1 != 0 && TREE_CONSTANT (op1))
843 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
845 /* If we haven't dealt with either operand, this is not a case we can
846 decompose. Otherwise, VAR is either of the ones remaining, if any. */
847 if (op0 != 0 && op1 != 0)
848 var = in;
849 else if (op0 != 0)
850 var = op0;
851 else
852 var = op1, neg_var_p = neg1_p;
854 /* Now do any needed negations. */
855 if (neg_litp_p)
856 *minus_litp = *litp, *litp = 0;
857 if (neg_conp_p && *conp)
858 *minus_conp = *conp, *conp = 0;
859 if (neg_var_p && var)
860 *minus_varp = var, var = 0;
862 else if (TREE_CONSTANT (in))
863 *conp = in;
864 else if (TREE_CODE (in) == BIT_NOT_EXPR
865 && code == PLUS_EXPR)
867 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
868 when IN is constant. */
869 *litp = build_minus_one_cst (type);
870 *minus_varp = TREE_OPERAND (in, 0);
872 else
873 var = in;
875 if (negate_p)
877 if (*litp)
878 *minus_litp = *litp, *litp = 0;
879 else if (*minus_litp)
880 *litp = *minus_litp, *minus_litp = 0;
881 if (*conp)
882 *minus_conp = *conp, *conp = 0;
883 else if (*minus_conp)
884 *conp = *minus_conp, *minus_conp = 0;
885 if (var)
886 *minus_varp = var, var = 0;
887 else if (*minus_varp)
888 var = *minus_varp, *minus_varp = 0;
891 if (*litp
892 && TREE_OVERFLOW_P (*litp))
893 *litp = drop_tree_overflow (*litp);
894 if (*minus_litp
895 && TREE_OVERFLOW_P (*minus_litp))
896 *minus_litp = drop_tree_overflow (*minus_litp);
898 return var;
901 /* Re-associate trees split by the above function. T1 and T2 are
902 either expressions to associate or null. Return the new
903 expression, if any. LOC is the location of the new expression. If
904 we build an operation, do it in TYPE and with CODE. */
906 static tree
907 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
909 if (t1 == 0)
911 gcc_assert (t2 == 0 || code != MINUS_EXPR);
912 return t2;
914 else if (t2 == 0)
915 return t1;
917 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
918 try to fold this since we will have infinite recursion. But do
919 deal with any NEGATE_EXPRs. */
920 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
921 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
922 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
924 if (code == PLUS_EXPR)
926 if (TREE_CODE (t1) == NEGATE_EXPR)
927 return build2_loc (loc, MINUS_EXPR, type,
928 fold_convert_loc (loc, type, t2),
929 fold_convert_loc (loc, type,
930 TREE_OPERAND (t1, 0)));
931 else if (TREE_CODE (t2) == NEGATE_EXPR)
932 return build2_loc (loc, MINUS_EXPR, type,
933 fold_convert_loc (loc, type, t1),
934 fold_convert_loc (loc, type,
935 TREE_OPERAND (t2, 0)));
936 else if (integer_zerop (t2))
937 return fold_convert_loc (loc, type, t1);
939 else if (code == MINUS_EXPR)
941 if (integer_zerop (t2))
942 return fold_convert_loc (loc, type, t1);
945 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
946 fold_convert_loc (loc, type, t2));
949 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
950 fold_convert_loc (loc, type, t2));
953 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
954 for use in int_const_binop, size_binop and size_diffop. */
956 static bool
957 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
959 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
960 return false;
961 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
962 return false;
964 switch (code)
966 case LSHIFT_EXPR:
967 case RSHIFT_EXPR:
968 case LROTATE_EXPR:
969 case RROTATE_EXPR:
970 return true;
972 default:
973 break;
976 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
977 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
978 && TYPE_MODE (type1) == TYPE_MODE (type2);
981 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
982 a new constant in RES. Return FALSE if we don't know how to
983 evaluate CODE at compile-time. */
985 bool
986 wide_int_binop (wide_int &res,
987 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
988 signop sign, wi::overflow_type *overflow)
990 wide_int tmp;
991 *overflow = wi::OVF_NONE;
992 switch (code)
994 case BIT_IOR_EXPR:
995 res = wi::bit_or (arg1, arg2);
996 break;
998 case BIT_XOR_EXPR:
999 res = wi::bit_xor (arg1, arg2);
1000 break;
1002 case BIT_AND_EXPR:
1003 res = wi::bit_and (arg1, arg2);
1004 break;
1006 case LSHIFT_EXPR:
1007 if (wi::neg_p (arg2))
1008 return false;
1009 res = wi::lshift (arg1, arg2);
1010 break;
1012 case RSHIFT_EXPR:
1013 if (wi::neg_p (arg2))
1014 return false;
1015 /* It's unclear from the C standard whether shifts can overflow.
1016 The following code ignores overflow; perhaps a C standard
1017 interpretation ruling is needed. */
1018 res = wi::rshift (arg1, arg2, sign);
1019 break;
1021 case RROTATE_EXPR:
1022 case LROTATE_EXPR:
1023 if (wi::neg_p (arg2))
1025 tmp = -arg2;
1026 if (code == RROTATE_EXPR)
1027 code = LROTATE_EXPR;
1028 else
1029 code = RROTATE_EXPR;
1031 else
1032 tmp = arg2;
1034 if (code == RROTATE_EXPR)
1035 res = wi::rrotate (arg1, tmp);
1036 else
1037 res = wi::lrotate (arg1, tmp);
1038 break;
1040 case PLUS_EXPR:
1041 res = wi::add (arg1, arg2, sign, overflow);
1042 break;
1044 case MINUS_EXPR:
1045 res = wi::sub (arg1, arg2, sign, overflow);
1046 break;
1048 case MULT_EXPR:
1049 res = wi::mul (arg1, arg2, sign, overflow);
1050 break;
1052 case MULT_HIGHPART_EXPR:
1053 res = wi::mul_high (arg1, arg2, sign);
1054 break;
1056 case TRUNC_DIV_EXPR:
1057 case EXACT_DIV_EXPR:
1058 if (arg2 == 0)
1059 return false;
1060 res = wi::div_trunc (arg1, arg2, sign, overflow);
1061 break;
1063 case FLOOR_DIV_EXPR:
1064 if (arg2 == 0)
1065 return false;
1066 res = wi::div_floor (arg1, arg2, sign, overflow);
1067 break;
1069 case CEIL_DIV_EXPR:
1070 if (arg2 == 0)
1071 return false;
1072 res = wi::div_ceil (arg1, arg2, sign, overflow);
1073 break;
1075 case ROUND_DIV_EXPR:
1076 if (arg2 == 0)
1077 return false;
1078 res = wi::div_round (arg1, arg2, sign, overflow);
1079 break;
1081 case TRUNC_MOD_EXPR:
1082 if (arg2 == 0)
1083 return false;
1084 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1085 break;
1087 case FLOOR_MOD_EXPR:
1088 if (arg2 == 0)
1089 return false;
1090 res = wi::mod_floor (arg1, arg2, sign, overflow);
1091 break;
1093 case CEIL_MOD_EXPR:
1094 if (arg2 == 0)
1095 return false;
1096 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1097 break;
1099 case ROUND_MOD_EXPR:
1100 if (arg2 == 0)
1101 return false;
1102 res = wi::mod_round (arg1, arg2, sign, overflow);
1103 break;
1105 case MIN_EXPR:
1106 res = wi::min (arg1, arg2, sign);
1107 break;
1109 case MAX_EXPR:
1110 res = wi::max (arg1, arg2, sign);
1111 break;
1113 default:
1114 return false;
1116 return true;
1119 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1120 produce a new constant in RES. Return FALSE if we don't know how
1121 to evaluate CODE at compile-time. */
1123 static bool
1124 poly_int_binop (poly_wide_int &res, enum tree_code code,
1125 const_tree arg1, const_tree arg2,
1126 signop sign, wi::overflow_type *overflow)
1128 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1129 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1130 switch (code)
1132 case PLUS_EXPR:
1133 res = wi::add (wi::to_poly_wide (arg1),
1134 wi::to_poly_wide (arg2), sign, overflow);
1135 break;
1137 case MINUS_EXPR:
1138 res = wi::sub (wi::to_poly_wide (arg1),
1139 wi::to_poly_wide (arg2), sign, overflow);
1140 break;
1142 case MULT_EXPR:
1143 if (TREE_CODE (arg2) == INTEGER_CST)
1144 res = wi::mul (wi::to_poly_wide (arg1),
1145 wi::to_wide (arg2), sign, overflow);
1146 else if (TREE_CODE (arg1) == INTEGER_CST)
1147 res = wi::mul (wi::to_poly_wide (arg2),
1148 wi::to_wide (arg1), sign, overflow);
1149 else
1150 return NULL_TREE;
1151 break;
1153 case LSHIFT_EXPR:
1154 if (TREE_CODE (arg2) == INTEGER_CST)
1155 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1156 else
1157 return false;
1158 break;
1160 case BIT_IOR_EXPR:
1161 if (TREE_CODE (arg2) != INTEGER_CST
1162 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1163 &res))
1164 return false;
1165 break;
1167 default:
1168 return false;
1170 return true;
1173 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1174 produce a new constant. Return NULL_TREE if we don't know how to
1175 evaluate CODE at compile-time. */
1177 tree
1178 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1179 int overflowable)
1181 poly_wide_int poly_res;
1182 tree type = TREE_TYPE (arg1);
1183 signop sign = TYPE_SIGN (type);
1184 wi::overflow_type overflow = wi::OVF_NONE;
1186 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1188 wide_int warg1 = wi::to_wide (arg1), res;
1189 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1190 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1191 return NULL_TREE;
1192 poly_res = res;
1194 else if (!poly_int_tree_p (arg1)
1195 || !poly_int_tree_p (arg2)
1196 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1197 return NULL_TREE;
1198 return force_fit_type (type, poly_res, overflowable,
1199 (((sign == SIGNED || overflowable == -1)
1200 && overflow)
1201 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1204 /* Return true if binary operation OP distributes over addition in operand
1205 OPNO, with the other operand being held constant. OPNO counts from 1. */
1207 static bool
1208 distributes_over_addition_p (tree_code op, int opno)
1210 switch (op)
1212 case PLUS_EXPR:
1213 case MINUS_EXPR:
1214 case MULT_EXPR:
1215 return true;
1217 case LSHIFT_EXPR:
1218 return opno == 1;
1220 default:
1221 return false;
1225 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1226 constant. We assume ARG1 and ARG2 have the same data type, or at least
1227 are the same kind of constant and the same machine mode. Return zero if
1228 combining the constants is not allowed in the current operating mode. */
1230 static tree
1231 const_binop (enum tree_code code, tree arg1, tree arg2)
1233 /* Sanity check for the recursive cases. */
1234 if (!arg1 || !arg2)
1235 return NULL_TREE;
1237 STRIP_NOPS (arg1);
1238 STRIP_NOPS (arg2);
1240 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1242 if (code == POINTER_PLUS_EXPR)
1243 return int_const_binop (PLUS_EXPR,
1244 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1246 return int_const_binop (code, arg1, arg2);
1249 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1251 machine_mode mode;
1252 REAL_VALUE_TYPE d1;
1253 REAL_VALUE_TYPE d2;
1254 REAL_VALUE_TYPE value;
1255 REAL_VALUE_TYPE result;
1256 bool inexact;
1257 tree t, type;
1259 /* The following codes are handled by real_arithmetic. */
1260 switch (code)
1262 case PLUS_EXPR:
1263 case MINUS_EXPR:
1264 case MULT_EXPR:
1265 case RDIV_EXPR:
1266 case MIN_EXPR:
1267 case MAX_EXPR:
1268 break;
1270 default:
1271 return NULL_TREE;
1274 d1 = TREE_REAL_CST (arg1);
1275 d2 = TREE_REAL_CST (arg2);
1277 type = TREE_TYPE (arg1);
1278 mode = TYPE_MODE (type);
1280 /* Don't perform operation if we honor signaling NaNs and
1281 either operand is a signaling NaN. */
1282 if (HONOR_SNANS (mode)
1283 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1284 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1285 return NULL_TREE;
1287 /* Don't perform operation if it would raise a division
1288 by zero exception. */
1289 if (code == RDIV_EXPR
1290 && real_equal (&d2, &dconst0)
1291 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1292 return NULL_TREE;
1294 /* If either operand is a NaN, just return it. Otherwise, set up
1295 for floating-point trap; we return an overflow. */
1296 if (REAL_VALUE_ISNAN (d1))
1298 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1299 is off. */
1300 d1.signalling = 0;
1301 t = build_real (type, d1);
1302 return t;
1304 else if (REAL_VALUE_ISNAN (d2))
1306 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1307 is off. */
1308 d2.signalling = 0;
1309 t = build_real (type, d2);
1310 return t;
1313 inexact = real_arithmetic (&value, code, &d1, &d2);
1314 real_convert (&result, mode, &value);
1316 /* Don't constant fold this floating point operation if
1317 both operands are not NaN but the result is NaN, and
1318 flag_trapping_math. Such operations should raise an
1319 invalid operation exception. */
1320 if (flag_trapping_math
1321 && MODE_HAS_NANS (mode)
1322 && REAL_VALUE_ISNAN (result)
1323 && !REAL_VALUE_ISNAN (d1)
1324 && !REAL_VALUE_ISNAN (d2))
1325 return NULL_TREE;
1327 /* Don't constant fold this floating point operation if
1328 the result has overflowed and flag_trapping_math. */
1329 if (flag_trapping_math
1330 && MODE_HAS_INFINITIES (mode)
1331 && REAL_VALUE_ISINF (result)
1332 && !REAL_VALUE_ISINF (d1)
1333 && !REAL_VALUE_ISINF (d2))
1334 return NULL_TREE;
1336 /* Don't constant fold this floating point operation if the
1337 result may dependent upon the run-time rounding mode and
1338 flag_rounding_math is set, or if GCC's software emulation
1339 is unable to accurately represent the result. */
1340 if ((flag_rounding_math
1341 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1342 && (inexact || !real_identical (&result, &value)))
1343 return NULL_TREE;
1345 t = build_real (type, result);
1347 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1348 return t;
1351 if (TREE_CODE (arg1) == FIXED_CST)
1353 FIXED_VALUE_TYPE f1;
1354 FIXED_VALUE_TYPE f2;
1355 FIXED_VALUE_TYPE result;
1356 tree t, type;
1357 int sat_p;
1358 bool overflow_p;
1360 /* The following codes are handled by fixed_arithmetic. */
1361 switch (code)
1363 case PLUS_EXPR:
1364 case MINUS_EXPR:
1365 case MULT_EXPR:
1366 case TRUNC_DIV_EXPR:
1367 if (TREE_CODE (arg2) != FIXED_CST)
1368 return NULL_TREE;
1369 f2 = TREE_FIXED_CST (arg2);
1370 break;
1372 case LSHIFT_EXPR:
1373 case RSHIFT_EXPR:
1375 if (TREE_CODE (arg2) != INTEGER_CST)
1376 return NULL_TREE;
1377 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1378 f2.data.high = w2.elt (1);
1379 f2.data.low = w2.ulow ();
1380 f2.mode = SImode;
1382 break;
1384 default:
1385 return NULL_TREE;
1388 f1 = TREE_FIXED_CST (arg1);
1389 type = TREE_TYPE (arg1);
1390 sat_p = TYPE_SATURATING (type);
1391 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1392 t = build_fixed (type, result);
1393 /* Propagate overflow flags. */
1394 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1395 TREE_OVERFLOW (t) = 1;
1396 return t;
1399 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1401 tree type = TREE_TYPE (arg1);
1402 tree r1 = TREE_REALPART (arg1);
1403 tree i1 = TREE_IMAGPART (arg1);
1404 tree r2 = TREE_REALPART (arg2);
1405 tree i2 = TREE_IMAGPART (arg2);
1406 tree real, imag;
1408 switch (code)
1410 case PLUS_EXPR:
1411 case MINUS_EXPR:
1412 real = const_binop (code, r1, r2);
1413 imag = const_binop (code, i1, i2);
1414 break;
1416 case MULT_EXPR:
1417 if (COMPLEX_FLOAT_TYPE_P (type))
1418 return do_mpc_arg2 (arg1, arg2, type,
1419 /* do_nonfinite= */ folding_initializer,
1420 mpc_mul);
1422 real = const_binop (MINUS_EXPR,
1423 const_binop (MULT_EXPR, r1, r2),
1424 const_binop (MULT_EXPR, i1, i2));
1425 imag = const_binop (PLUS_EXPR,
1426 const_binop (MULT_EXPR, r1, i2),
1427 const_binop (MULT_EXPR, i1, r2));
1428 break;
1430 case RDIV_EXPR:
1431 if (COMPLEX_FLOAT_TYPE_P (type))
1432 return do_mpc_arg2 (arg1, arg2, type,
1433 /* do_nonfinite= */ folding_initializer,
1434 mpc_div);
1435 /* Fallthru. */
1436 case TRUNC_DIV_EXPR:
1437 case CEIL_DIV_EXPR:
1438 case FLOOR_DIV_EXPR:
1439 case ROUND_DIV_EXPR:
1440 if (flag_complex_method == 0)
1442 /* Keep this algorithm in sync with
1443 tree-complex.cc:expand_complex_div_straight().
1445 Expand complex division to scalars, straightforward algorithm.
1446 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1447 t = br*br + bi*bi
1449 tree magsquared
1450 = const_binop (PLUS_EXPR,
1451 const_binop (MULT_EXPR, r2, r2),
1452 const_binop (MULT_EXPR, i2, i2));
1453 tree t1
1454 = const_binop (PLUS_EXPR,
1455 const_binop (MULT_EXPR, r1, r2),
1456 const_binop (MULT_EXPR, i1, i2));
1457 tree t2
1458 = const_binop (MINUS_EXPR,
1459 const_binop (MULT_EXPR, i1, r2),
1460 const_binop (MULT_EXPR, r1, i2));
1462 real = const_binop (code, t1, magsquared);
1463 imag = const_binop (code, t2, magsquared);
1465 else
1467 /* Keep this algorithm in sync with
1468 tree-complex.cc:expand_complex_div_wide().
1470 Expand complex division to scalars, modified algorithm to minimize
1471 overflow with wide input ranges. */
1472 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1473 fold_abs_const (r2, TREE_TYPE (type)),
1474 fold_abs_const (i2, TREE_TYPE (type)));
1476 if (integer_nonzerop (compare))
1478 /* In the TRUE branch, we compute
1479 ratio = br/bi;
1480 div = (br * ratio) + bi;
1481 tr = (ar * ratio) + ai;
1482 ti = (ai * ratio) - ar;
1483 tr = tr / div;
1484 ti = ti / div; */
1485 tree ratio = const_binop (code, r2, i2);
1486 tree div = const_binop (PLUS_EXPR, i2,
1487 const_binop (MULT_EXPR, r2, ratio));
1488 real = const_binop (MULT_EXPR, r1, ratio);
1489 real = const_binop (PLUS_EXPR, real, i1);
1490 real = const_binop (code, real, div);
1492 imag = const_binop (MULT_EXPR, i1, ratio);
1493 imag = const_binop (MINUS_EXPR, imag, r1);
1494 imag = const_binop (code, imag, div);
1496 else
1498 /* In the FALSE branch, we compute
1499 ratio = d/c;
1500 divisor = (d * ratio) + c;
1501 tr = (b * ratio) + a;
1502 ti = b - (a * ratio);
1503 tr = tr / div;
1504 ti = ti / div; */
1505 tree ratio = const_binop (code, i2, r2);
1506 tree div = const_binop (PLUS_EXPR, r2,
1507 const_binop (MULT_EXPR, i2, ratio));
1509 real = const_binop (MULT_EXPR, i1, ratio);
1510 real = const_binop (PLUS_EXPR, real, r1);
1511 real = const_binop (code, real, div);
1513 imag = const_binop (MULT_EXPR, r1, ratio);
1514 imag = const_binop (MINUS_EXPR, i1, imag);
1515 imag = const_binop (code, imag, div);
1518 break;
1520 default:
1521 return NULL_TREE;
1524 if (real && imag)
1525 return build_complex (type, real, imag);
1528 if (TREE_CODE (arg1) == VECTOR_CST
1529 && TREE_CODE (arg2) == VECTOR_CST
1530 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1531 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1533 tree type = TREE_TYPE (arg1);
1534 bool step_ok_p;
1535 if (VECTOR_CST_STEPPED_P (arg1)
1536 && VECTOR_CST_STEPPED_P (arg2))
1537 /* We can operate directly on the encoding if:
1539 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1540 implies
1541 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1543 Addition and subtraction are the supported operators
1544 for which this is true. */
1545 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1546 else if (VECTOR_CST_STEPPED_P (arg1))
1547 /* We can operate directly on stepped encodings if:
1549 a3 - a2 == a2 - a1
1550 implies:
1551 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1553 which is true if (x -> x op c) distributes over addition. */
1554 step_ok_p = distributes_over_addition_p (code, 1);
1555 else
1556 /* Similarly in reverse. */
1557 step_ok_p = distributes_over_addition_p (code, 2);
1558 tree_vector_builder elts;
1559 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1560 return NULL_TREE;
1561 unsigned int count = elts.encoded_nelts ();
1562 for (unsigned int i = 0; i < count; ++i)
1564 tree elem1 = VECTOR_CST_ELT (arg1, i);
1565 tree elem2 = VECTOR_CST_ELT (arg2, i);
1567 tree elt = const_binop (code, elem1, elem2);
1569 /* It is possible that const_binop cannot handle the given
1570 code and return NULL_TREE */
1571 if (elt == NULL_TREE)
1572 return NULL_TREE;
1573 elts.quick_push (elt);
1576 return elts.build ();
1579 /* Shifts allow a scalar offset for a vector. */
1580 if (TREE_CODE (arg1) == VECTOR_CST
1581 && TREE_CODE (arg2) == INTEGER_CST)
1583 tree type = TREE_TYPE (arg1);
1584 bool step_ok_p = distributes_over_addition_p (code, 1);
1585 tree_vector_builder elts;
1586 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1587 return NULL_TREE;
1588 unsigned int count = elts.encoded_nelts ();
1589 for (unsigned int i = 0; i < count; ++i)
1591 tree elem1 = VECTOR_CST_ELT (arg1, i);
1593 tree elt = const_binop (code, elem1, arg2);
1595 /* It is possible that const_binop cannot handle the given
1596 code and return NULL_TREE. */
1597 if (elt == NULL_TREE)
1598 return NULL_TREE;
1599 elts.quick_push (elt);
1602 return elts.build ();
1604 return NULL_TREE;
1607 /* Overload that adds a TYPE parameter to be able to dispatch
1608 to fold_relational_const. */
1610 tree
1611 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1613 if (TREE_CODE_CLASS (code) == tcc_comparison)
1614 return fold_relational_const (code, type, arg1, arg2);
1616 /* ??? Until we make the const_binop worker take the type of the
1617 result as argument put those cases that need it here. */
1618 switch (code)
1620 case VEC_SERIES_EXPR:
1621 if (CONSTANT_CLASS_P (arg1)
1622 && CONSTANT_CLASS_P (arg2))
1623 return build_vec_series (type, arg1, arg2);
1624 return NULL_TREE;
1626 case COMPLEX_EXPR:
1627 if ((TREE_CODE (arg1) == REAL_CST
1628 && TREE_CODE (arg2) == REAL_CST)
1629 || (TREE_CODE (arg1) == INTEGER_CST
1630 && TREE_CODE (arg2) == INTEGER_CST))
1631 return build_complex (type, arg1, arg2);
1632 return NULL_TREE;
1634 case POINTER_DIFF_EXPR:
1635 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1637 poly_offset_int res = (wi::to_poly_offset (arg1)
1638 - wi::to_poly_offset (arg2));
1639 return force_fit_type (type, res, 1,
1640 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1642 return NULL_TREE;
1644 case VEC_PACK_TRUNC_EXPR:
1645 case VEC_PACK_FIX_TRUNC_EXPR:
1646 case VEC_PACK_FLOAT_EXPR:
1648 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1650 if (TREE_CODE (arg1) != VECTOR_CST
1651 || TREE_CODE (arg2) != VECTOR_CST)
1652 return NULL_TREE;
1654 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1655 return NULL_TREE;
1657 out_nelts = in_nelts * 2;
1658 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1659 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1661 tree_vector_builder elts (type, out_nelts, 1);
1662 for (i = 0; i < out_nelts; i++)
1664 tree elt = (i < in_nelts
1665 ? VECTOR_CST_ELT (arg1, i)
1666 : VECTOR_CST_ELT (arg2, i - in_nelts));
1667 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1668 ? NOP_EXPR
1669 : code == VEC_PACK_FLOAT_EXPR
1670 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1671 TREE_TYPE (type), elt);
1672 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1673 return NULL_TREE;
1674 elts.quick_push (elt);
1677 return elts.build ();
1680 case VEC_WIDEN_MULT_LO_EXPR:
1681 case VEC_WIDEN_MULT_HI_EXPR:
1682 case VEC_WIDEN_MULT_EVEN_EXPR:
1683 case VEC_WIDEN_MULT_ODD_EXPR:
1685 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1687 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1688 return NULL_TREE;
1690 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1691 return NULL_TREE;
1692 out_nelts = in_nelts / 2;
1693 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1694 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1696 if (code == VEC_WIDEN_MULT_LO_EXPR)
1697 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1698 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1699 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1700 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1701 scale = 1, ofs = 0;
1702 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1703 scale = 1, ofs = 1;
1705 tree_vector_builder elts (type, out_nelts, 1);
1706 for (out = 0; out < out_nelts; out++)
1708 unsigned int in = (out << scale) + ofs;
1709 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1710 VECTOR_CST_ELT (arg1, in));
1711 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1712 VECTOR_CST_ELT (arg2, in));
1714 if (t1 == NULL_TREE || t2 == NULL_TREE)
1715 return NULL_TREE;
1716 tree elt = const_binop (MULT_EXPR, t1, t2);
1717 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1718 return NULL_TREE;
1719 elts.quick_push (elt);
1722 return elts.build ();
1725 default:;
1728 if (TREE_CODE_CLASS (code) != tcc_binary)
1729 return NULL_TREE;
1731 /* Make sure type and arg0 have the same saturating flag. */
1732 gcc_checking_assert (TYPE_SATURATING (type)
1733 == TYPE_SATURATING (TREE_TYPE (arg1)));
1735 return const_binop (code, arg1, arg2);
1738 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1739 Return zero if computing the constants is not possible. */
1741 tree
1742 const_unop (enum tree_code code, tree type, tree arg0)
1744 /* Don't perform the operation, other than NEGATE and ABS, if
1745 flag_signaling_nans is on and the operand is a signaling NaN. */
1746 if (TREE_CODE (arg0) == REAL_CST
1747 && HONOR_SNANS (arg0)
1748 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1749 && code != NEGATE_EXPR
1750 && code != ABS_EXPR
1751 && code != ABSU_EXPR)
1752 return NULL_TREE;
1754 switch (code)
1756 CASE_CONVERT:
1757 case FLOAT_EXPR:
1758 case FIX_TRUNC_EXPR:
1759 case FIXED_CONVERT_EXPR:
1760 return fold_convert_const (code, type, arg0);
1762 case ADDR_SPACE_CONVERT_EXPR:
1763 /* If the source address is 0, and the source address space
1764 cannot have a valid object at 0, fold to dest type null. */
1765 if (integer_zerop (arg0)
1766 && !(targetm.addr_space.zero_address_valid
1767 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1768 return fold_convert_const (code, type, arg0);
1769 break;
1771 case VIEW_CONVERT_EXPR:
1772 return fold_view_convert_expr (type, arg0);
1774 case NEGATE_EXPR:
1776 /* Can't call fold_negate_const directly here as that doesn't
1777 handle all cases and we might not be able to negate some
1778 constants. */
1779 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1780 if (tem && CONSTANT_CLASS_P (tem))
1781 return tem;
1782 break;
1785 case ABS_EXPR:
1786 case ABSU_EXPR:
1787 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1788 return fold_abs_const (arg0, type);
1789 break;
1791 case CONJ_EXPR:
1792 if (TREE_CODE (arg0) == COMPLEX_CST)
1794 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1795 TREE_TYPE (type));
1796 return build_complex (type, TREE_REALPART (arg0), ipart);
1798 break;
1800 case BIT_NOT_EXPR:
1801 if (TREE_CODE (arg0) == INTEGER_CST)
1802 return fold_not_const (arg0, type);
1803 else if (POLY_INT_CST_P (arg0))
1804 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1805 /* Perform BIT_NOT_EXPR on each element individually. */
1806 else if (TREE_CODE (arg0) == VECTOR_CST)
1808 tree elem;
1810 /* This can cope with stepped encodings because ~x == -1 - x. */
1811 tree_vector_builder elements;
1812 elements.new_unary_operation (type, arg0, true);
1813 unsigned int i, count = elements.encoded_nelts ();
1814 for (i = 0; i < count; ++i)
1816 elem = VECTOR_CST_ELT (arg0, i);
1817 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1818 if (elem == NULL_TREE)
1819 break;
1820 elements.quick_push (elem);
1822 if (i == count)
1823 return elements.build ();
1825 break;
1827 case TRUTH_NOT_EXPR:
1828 if (TREE_CODE (arg0) == INTEGER_CST)
1829 return constant_boolean_node (integer_zerop (arg0), type);
1830 break;
1832 case REALPART_EXPR:
1833 if (TREE_CODE (arg0) == COMPLEX_CST)
1834 return fold_convert (type, TREE_REALPART (arg0));
1835 break;
1837 case IMAGPART_EXPR:
1838 if (TREE_CODE (arg0) == COMPLEX_CST)
1839 return fold_convert (type, TREE_IMAGPART (arg0));
1840 break;
1842 case VEC_UNPACK_LO_EXPR:
1843 case VEC_UNPACK_HI_EXPR:
1844 case VEC_UNPACK_FLOAT_LO_EXPR:
1845 case VEC_UNPACK_FLOAT_HI_EXPR:
1846 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1847 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1849 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1850 enum tree_code subcode;
1852 if (TREE_CODE (arg0) != VECTOR_CST)
1853 return NULL_TREE;
1855 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1856 return NULL_TREE;
1857 out_nelts = in_nelts / 2;
1858 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1860 unsigned int offset = 0;
1861 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1862 || code == VEC_UNPACK_FLOAT_LO_EXPR
1863 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1864 offset = out_nelts;
1866 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1867 subcode = NOP_EXPR;
1868 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1869 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1870 subcode = FLOAT_EXPR;
1871 else
1872 subcode = FIX_TRUNC_EXPR;
1874 tree_vector_builder elts (type, out_nelts, 1);
1875 for (i = 0; i < out_nelts; i++)
1877 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1878 VECTOR_CST_ELT (arg0, i + offset));
1879 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1880 return NULL_TREE;
1881 elts.quick_push (elt);
1884 return elts.build ();
1887 case VEC_DUPLICATE_EXPR:
1888 if (CONSTANT_CLASS_P (arg0))
1889 return build_vector_from_val (type, arg0);
1890 return NULL_TREE;
1892 default:
1893 break;
1896 return NULL_TREE;
1899 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1900 indicates which particular sizetype to create. */
1902 tree
1903 size_int_kind (poly_int64 number, enum size_type_kind kind)
1905 return build_int_cst (sizetype_tab[(int) kind], number);
1908 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1909 is a tree code. The type of the result is taken from the operands.
1910 Both must be equivalent integer types, ala int_binop_types_match_p.
1911 If the operands are constant, so is the result. */
1913 tree
1914 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1916 tree type = TREE_TYPE (arg0);
1918 if (arg0 == error_mark_node || arg1 == error_mark_node)
1919 return error_mark_node;
1921 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1922 TREE_TYPE (arg1)));
1924 /* Handle the special case of two poly_int constants faster. */
1925 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1927 /* And some specific cases even faster than that. */
1928 if (code == PLUS_EXPR)
1930 if (integer_zerop (arg0)
1931 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1932 return arg1;
1933 if (integer_zerop (arg1)
1934 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1935 return arg0;
1937 else if (code == MINUS_EXPR)
1939 if (integer_zerop (arg1)
1940 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1941 return arg0;
1943 else if (code == MULT_EXPR)
1945 if (integer_onep (arg0)
1946 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1947 return arg1;
1950 /* Handle general case of two integer constants. For sizetype
1951 constant calculations we always want to know about overflow,
1952 even in the unsigned case. */
1953 tree res = int_const_binop (code, arg0, arg1, -1);
1954 if (res != NULL_TREE)
1955 return res;
1958 return fold_build2_loc (loc, code, type, arg0, arg1);
1961 /* Given two values, either both of sizetype or both of bitsizetype,
1962 compute the difference between the two values. Return the value
1963 in signed type corresponding to the type of the operands. */
1965 tree
1966 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1968 tree type = TREE_TYPE (arg0);
1969 tree ctype;
1971 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1972 TREE_TYPE (arg1)));
1974 /* If the type is already signed, just do the simple thing. */
1975 if (!TYPE_UNSIGNED (type))
1976 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1978 if (type == sizetype)
1979 ctype = ssizetype;
1980 else if (type == bitsizetype)
1981 ctype = sbitsizetype;
1982 else
1983 ctype = signed_type_for (type);
1985 /* If either operand is not a constant, do the conversions to the signed
1986 type and subtract. The hardware will do the right thing with any
1987 overflow in the subtraction. */
1988 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1989 return size_binop_loc (loc, MINUS_EXPR,
1990 fold_convert_loc (loc, ctype, arg0),
1991 fold_convert_loc (loc, ctype, arg1));
1993 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1994 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1995 overflow) and negate (which can't either). Special-case a result
1996 of zero while we're here. */
1997 if (tree_int_cst_equal (arg0, arg1))
1998 return build_int_cst (ctype, 0);
1999 else if (tree_int_cst_lt (arg1, arg0))
2000 return fold_convert_loc (loc, ctype,
2001 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2002 else
2003 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2004 fold_convert_loc (loc, ctype,
2005 size_binop_loc (loc,
2006 MINUS_EXPR,
2007 arg1, arg0)));
2010 /* A subroutine of fold_convert_const handling conversions of an
2011 INTEGER_CST to another integer type. */
2013 static tree
2014 fold_convert_const_int_from_int (tree type, const_tree arg1)
2016 /* Given an integer constant, make new constant with new type,
2017 appropriately sign-extended or truncated. Use widest_int
2018 so that any extension is done according ARG1's type. */
2019 return force_fit_type (type, wi::to_widest (arg1),
2020 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2021 TREE_OVERFLOW (arg1));
2024 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2025 to an integer type. */
2027 static tree
2028 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2030 bool overflow = false;
2031 tree t;
2033 /* The following code implements the floating point to integer
2034 conversion rules required by the Java Language Specification,
2035 that IEEE NaNs are mapped to zero and values that overflow
2036 the target precision saturate, i.e. values greater than
2037 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2038 are mapped to INT_MIN. These semantics are allowed by the
2039 C and C++ standards that simply state that the behavior of
2040 FP-to-integer conversion is unspecified upon overflow. */
2042 wide_int val;
2043 REAL_VALUE_TYPE r;
2044 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2046 switch (code)
2048 case FIX_TRUNC_EXPR:
2049 real_trunc (&r, VOIDmode, &x);
2050 break;
2052 default:
2053 gcc_unreachable ();
2056 /* If R is NaN, return zero and show we have an overflow. */
2057 if (REAL_VALUE_ISNAN (r))
2059 overflow = true;
2060 val = wi::zero (TYPE_PRECISION (type));
2063 /* See if R is less than the lower bound or greater than the
2064 upper bound. */
2066 if (! overflow)
2068 tree lt = TYPE_MIN_VALUE (type);
2069 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2070 if (real_less (&r, &l))
2072 overflow = true;
2073 val = wi::to_wide (lt);
2077 if (! overflow)
2079 tree ut = TYPE_MAX_VALUE (type);
2080 if (ut)
2082 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2083 if (real_less (&u, &r))
2085 overflow = true;
2086 val = wi::to_wide (ut);
2091 if (! overflow)
2092 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2094 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2095 return t;
2098 /* A subroutine of fold_convert_const handling conversions of a
2099 FIXED_CST to an integer type. */
2101 static tree
2102 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2104 tree t;
2105 double_int temp, temp_trunc;
2106 scalar_mode mode;
2108 /* Right shift FIXED_CST to temp by fbit. */
2109 temp = TREE_FIXED_CST (arg1).data;
2110 mode = TREE_FIXED_CST (arg1).mode;
2111 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2113 temp = temp.rshift (GET_MODE_FBIT (mode),
2114 HOST_BITS_PER_DOUBLE_INT,
2115 SIGNED_FIXED_POINT_MODE_P (mode));
2117 /* Left shift temp to temp_trunc by fbit. */
2118 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2119 HOST_BITS_PER_DOUBLE_INT,
2120 SIGNED_FIXED_POINT_MODE_P (mode));
2122 else
2124 temp = double_int_zero;
2125 temp_trunc = double_int_zero;
2128 /* If FIXED_CST is negative, we need to round the value toward 0.
2129 By checking if the fractional bits are not zero to add 1 to temp. */
2130 if (SIGNED_FIXED_POINT_MODE_P (mode)
2131 && temp_trunc.is_negative ()
2132 && TREE_FIXED_CST (arg1).data != temp_trunc)
2133 temp += double_int_one;
2135 /* Given a fixed-point constant, make new constant with new type,
2136 appropriately sign-extended or truncated. */
2137 t = force_fit_type (type, temp, -1,
2138 (temp.is_negative ()
2139 && (TYPE_UNSIGNED (type)
2140 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2141 | TREE_OVERFLOW (arg1));
2143 return t;
2146 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2147 to another floating point type. */
2149 static tree
2150 fold_convert_const_real_from_real (tree type, const_tree arg1)
2152 REAL_VALUE_TYPE value;
2153 tree t;
2155 /* Don't perform the operation if flag_signaling_nans is on
2156 and the operand is a signaling NaN. */
2157 if (HONOR_SNANS (arg1)
2158 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2159 return NULL_TREE;
2161 /* With flag_rounding_math we should respect the current rounding mode
2162 unless the conversion is exact. */
2163 if (HONOR_SIGN_DEPENDENT_ROUNDING (arg1)
2164 && !exact_real_truncate (TYPE_MODE (type), &TREE_REAL_CST (arg1)))
2165 return NULL_TREE;
2167 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2168 t = build_real (type, value);
2170 /* If converting an infinity or NAN to a representation that doesn't
2171 have one, set the overflow bit so that we can produce some kind of
2172 error message at the appropriate point if necessary. It's not the
2173 most user-friendly message, but it's better than nothing. */
2174 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2175 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2176 TREE_OVERFLOW (t) = 1;
2177 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2178 && !MODE_HAS_NANS (TYPE_MODE (type)))
2179 TREE_OVERFLOW (t) = 1;
2180 /* Regular overflow, conversion produced an infinity in a mode that
2181 can't represent them. */
2182 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2183 && REAL_VALUE_ISINF (value)
2184 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2185 TREE_OVERFLOW (t) = 1;
2186 else
2187 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2188 return t;
2191 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2192 to a floating point type. */
2194 static tree
2195 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2197 REAL_VALUE_TYPE value;
2198 tree t;
2200 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2201 &TREE_FIXED_CST (arg1));
2202 t = build_real (type, value);
2204 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2205 return t;
2208 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2209 to another fixed-point type. */
2211 static tree
2212 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2214 FIXED_VALUE_TYPE value;
2215 tree t;
2216 bool overflow_p;
2218 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2219 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2220 t = build_fixed (type, value);
2222 /* Propagate overflow flags. */
2223 if (overflow_p | TREE_OVERFLOW (arg1))
2224 TREE_OVERFLOW (t) = 1;
2225 return t;
2228 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2229 to a fixed-point type. */
2231 static tree
2232 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2234 FIXED_VALUE_TYPE value;
2235 tree t;
2236 bool overflow_p;
2237 double_int di;
2239 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2241 di.low = TREE_INT_CST_ELT (arg1, 0);
2242 if (TREE_INT_CST_NUNITS (arg1) == 1)
2243 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2244 else
2245 di.high = TREE_INT_CST_ELT (arg1, 1);
2247 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2248 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2249 TYPE_SATURATING (type));
2250 t = build_fixed (type, value);
2252 /* Propagate overflow flags. */
2253 if (overflow_p | TREE_OVERFLOW (arg1))
2254 TREE_OVERFLOW (t) = 1;
2255 return t;
2258 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2259 to a fixed-point type. */
2261 static tree
2262 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2264 FIXED_VALUE_TYPE value;
2265 tree t;
2266 bool overflow_p;
2268 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2269 &TREE_REAL_CST (arg1),
2270 TYPE_SATURATING (type));
2271 t = build_fixed (type, value);
2273 /* Propagate overflow flags. */
2274 if (overflow_p | TREE_OVERFLOW (arg1))
2275 TREE_OVERFLOW (t) = 1;
2276 return t;
2279 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2280 type TYPE. If no simplification can be done return NULL_TREE. */
2282 static tree
2283 fold_convert_const (enum tree_code code, tree type, tree arg1)
2285 tree arg_type = TREE_TYPE (arg1);
2286 if (arg_type == type)
2287 return arg1;
2289 /* We can't widen types, since the runtime value could overflow the
2290 original type before being extended to the new type. */
2291 if (POLY_INT_CST_P (arg1)
2292 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2293 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2294 return build_poly_int_cst (type,
2295 poly_wide_int::from (poly_int_cst_value (arg1),
2296 TYPE_PRECISION (type),
2297 TYPE_SIGN (arg_type)));
2299 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2300 || TREE_CODE (type) == OFFSET_TYPE)
2302 if (TREE_CODE (arg1) == INTEGER_CST)
2303 return fold_convert_const_int_from_int (type, arg1);
2304 else if (TREE_CODE (arg1) == REAL_CST)
2305 return fold_convert_const_int_from_real (code, type, arg1);
2306 else if (TREE_CODE (arg1) == FIXED_CST)
2307 return fold_convert_const_int_from_fixed (type, arg1);
2309 else if (TREE_CODE (type) == REAL_TYPE)
2311 if (TREE_CODE (arg1) == INTEGER_CST)
2313 tree res = build_real_from_int_cst (type, arg1);
2314 /* Avoid the folding if flag_rounding_math is on and the
2315 conversion is not exact. */
2316 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
2318 bool fail = false;
2319 wide_int w = real_to_integer (&TREE_REAL_CST (res), &fail,
2320 TYPE_PRECISION (TREE_TYPE (arg1)));
2321 if (fail || wi::ne_p (w, wi::to_wide (arg1)))
2322 return NULL_TREE;
2324 return res;
2326 else if (TREE_CODE (arg1) == REAL_CST)
2327 return fold_convert_const_real_from_real (type, arg1);
2328 else if (TREE_CODE (arg1) == FIXED_CST)
2329 return fold_convert_const_real_from_fixed (type, arg1);
2331 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2333 if (TREE_CODE (arg1) == FIXED_CST)
2334 return fold_convert_const_fixed_from_fixed (type, arg1);
2335 else if (TREE_CODE (arg1) == INTEGER_CST)
2336 return fold_convert_const_fixed_from_int (type, arg1);
2337 else if (TREE_CODE (arg1) == REAL_CST)
2338 return fold_convert_const_fixed_from_real (type, arg1);
2340 else if (TREE_CODE (type) == VECTOR_TYPE)
2342 if (TREE_CODE (arg1) == VECTOR_CST
2343 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2345 tree elttype = TREE_TYPE (type);
2346 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2347 /* We can't handle steps directly when extending, since the
2348 values need to wrap at the original precision first. */
2349 bool step_ok_p
2350 = (INTEGRAL_TYPE_P (elttype)
2351 && INTEGRAL_TYPE_P (arg1_elttype)
2352 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2353 tree_vector_builder v;
2354 if (!v.new_unary_operation (type, arg1, step_ok_p))
2355 return NULL_TREE;
2356 unsigned int len = v.encoded_nelts ();
2357 for (unsigned int i = 0; i < len; ++i)
2359 tree elt = VECTOR_CST_ELT (arg1, i);
2360 tree cvt = fold_convert_const (code, elttype, elt);
2361 if (cvt == NULL_TREE)
2362 return NULL_TREE;
2363 v.quick_push (cvt);
2365 return v.build ();
2368 return NULL_TREE;
2371 /* Construct a vector of zero elements of vector type TYPE. */
2373 static tree
2374 build_zero_vector (tree type)
2376 tree t;
2378 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2379 return build_vector_from_val (type, t);
2382 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2384 bool
2385 fold_convertible_p (const_tree type, const_tree arg)
2387 tree orig = TREE_TYPE (arg);
2389 if (type == orig)
2390 return true;
2392 if (TREE_CODE (arg) == ERROR_MARK
2393 || TREE_CODE (type) == ERROR_MARK
2394 || TREE_CODE (orig) == ERROR_MARK)
2395 return false;
2397 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2398 return true;
2400 switch (TREE_CODE (type))
2402 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2403 case POINTER_TYPE: case REFERENCE_TYPE:
2404 case OFFSET_TYPE:
2405 return (INTEGRAL_TYPE_P (orig)
2406 || (POINTER_TYPE_P (orig)
2407 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2408 || TREE_CODE (orig) == OFFSET_TYPE);
2410 case REAL_TYPE:
2411 case FIXED_POINT_TYPE:
2412 case VOID_TYPE:
2413 return TREE_CODE (type) == TREE_CODE (orig);
2415 case VECTOR_TYPE:
2416 return (VECTOR_TYPE_P (orig)
2417 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2418 TYPE_VECTOR_SUBPARTS (orig))
2419 && fold_convertible_p (TREE_TYPE (type), TREE_TYPE (orig)));
2421 default:
2422 return false;
2426 /* Convert expression ARG to type TYPE. Used by the middle-end for
2427 simple conversions in preference to calling the front-end's convert. */
2429 tree
2430 fold_convert_loc (location_t loc, tree type, tree arg)
2432 tree orig = TREE_TYPE (arg);
2433 tree tem;
2435 if (type == orig)
2436 return arg;
2438 if (TREE_CODE (arg) == ERROR_MARK
2439 || TREE_CODE (type) == ERROR_MARK
2440 || TREE_CODE (orig) == ERROR_MARK)
2441 return error_mark_node;
2443 switch (TREE_CODE (type))
2445 case POINTER_TYPE:
2446 case REFERENCE_TYPE:
2447 /* Handle conversions between pointers to different address spaces. */
2448 if (POINTER_TYPE_P (orig)
2449 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2450 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2451 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2452 /* fall through */
2454 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2455 case OFFSET_TYPE:
2456 if (TREE_CODE (arg) == INTEGER_CST)
2458 tem = fold_convert_const (NOP_EXPR, type, arg);
2459 if (tem != NULL_TREE)
2460 return tem;
2462 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2463 || TREE_CODE (orig) == OFFSET_TYPE)
2464 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2465 if (TREE_CODE (orig) == COMPLEX_TYPE)
2466 return fold_convert_loc (loc, type,
2467 fold_build1_loc (loc, REALPART_EXPR,
2468 TREE_TYPE (orig), arg));
2469 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2470 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2471 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2473 case REAL_TYPE:
2474 if (TREE_CODE (arg) == INTEGER_CST)
2476 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2477 if (tem != NULL_TREE)
2478 return tem;
2480 else if (TREE_CODE (arg) == REAL_CST)
2482 tem = fold_convert_const (NOP_EXPR, type, arg);
2483 if (tem != NULL_TREE)
2484 return tem;
2486 else if (TREE_CODE (arg) == FIXED_CST)
2488 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2489 if (tem != NULL_TREE)
2490 return tem;
2493 switch (TREE_CODE (orig))
2495 case INTEGER_TYPE:
2496 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2497 case POINTER_TYPE: case REFERENCE_TYPE:
2498 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2500 case REAL_TYPE:
2501 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2503 case FIXED_POINT_TYPE:
2504 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2506 case COMPLEX_TYPE:
2507 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2508 return fold_convert_loc (loc, type, tem);
2510 default:
2511 gcc_unreachable ();
2514 case FIXED_POINT_TYPE:
2515 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2516 || TREE_CODE (arg) == REAL_CST)
2518 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2519 if (tem != NULL_TREE)
2520 goto fold_convert_exit;
2523 switch (TREE_CODE (orig))
2525 case FIXED_POINT_TYPE:
2526 case INTEGER_TYPE:
2527 case ENUMERAL_TYPE:
2528 case BOOLEAN_TYPE:
2529 case REAL_TYPE:
2530 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2532 case COMPLEX_TYPE:
2533 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2534 return fold_convert_loc (loc, type, tem);
2536 default:
2537 gcc_unreachable ();
2540 case COMPLEX_TYPE:
2541 switch (TREE_CODE (orig))
2543 case INTEGER_TYPE:
2544 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2545 case POINTER_TYPE: case REFERENCE_TYPE:
2546 case REAL_TYPE:
2547 case FIXED_POINT_TYPE:
2548 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2549 fold_convert_loc (loc, TREE_TYPE (type), arg),
2550 fold_convert_loc (loc, TREE_TYPE (type),
2551 integer_zero_node));
2552 case COMPLEX_TYPE:
2554 tree rpart, ipart;
2556 if (TREE_CODE (arg) == COMPLEX_EXPR)
2558 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2559 TREE_OPERAND (arg, 0));
2560 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2561 TREE_OPERAND (arg, 1));
2562 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2565 arg = save_expr (arg);
2566 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2567 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2568 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2569 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2570 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2573 default:
2574 gcc_unreachable ();
2577 case VECTOR_TYPE:
2578 if (integer_zerop (arg))
2579 return build_zero_vector (type);
2580 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2581 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2582 || TREE_CODE (orig) == VECTOR_TYPE);
2583 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2585 case VOID_TYPE:
2586 tem = fold_ignored_result (arg);
2587 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2589 default:
2590 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2591 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2592 gcc_unreachable ();
2594 fold_convert_exit:
2595 protected_set_expr_location_unshare (tem, loc);
2596 return tem;
2599 /* Return false if expr can be assumed not to be an lvalue, true
2600 otherwise. */
2602 static bool
2603 maybe_lvalue_p (const_tree x)
2605 /* We only need to wrap lvalue tree codes. */
2606 switch (TREE_CODE (x))
2608 case VAR_DECL:
2609 case PARM_DECL:
2610 case RESULT_DECL:
2611 case LABEL_DECL:
2612 case FUNCTION_DECL:
2613 case SSA_NAME:
2615 case COMPONENT_REF:
2616 case MEM_REF:
2617 case INDIRECT_REF:
2618 case ARRAY_REF:
2619 case ARRAY_RANGE_REF:
2620 case BIT_FIELD_REF:
2621 case OBJ_TYPE_REF:
2623 case REALPART_EXPR:
2624 case IMAGPART_EXPR:
2625 case PREINCREMENT_EXPR:
2626 case PREDECREMENT_EXPR:
2627 case SAVE_EXPR:
2628 case TRY_CATCH_EXPR:
2629 case WITH_CLEANUP_EXPR:
2630 case COMPOUND_EXPR:
2631 case MODIFY_EXPR:
2632 case TARGET_EXPR:
2633 case COND_EXPR:
2634 case BIND_EXPR:
2635 case VIEW_CONVERT_EXPR:
2636 break;
2638 default:
2639 /* Assume the worst for front-end tree codes. */
2640 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2641 break;
2642 return false;
2645 return true;
2648 /* Return an expr equal to X but certainly not valid as an lvalue. */
2650 tree
2651 non_lvalue_loc (location_t loc, tree x)
2653 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2654 us. */
2655 if (in_gimple_form)
2656 return x;
2658 if (! maybe_lvalue_p (x))
2659 return x;
2660 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2663 /* Given a tree comparison code, return the code that is the logical inverse.
2664 It is generally not safe to do this for floating-point comparisons, except
2665 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2666 ERROR_MARK in this case. */
2668 enum tree_code
2669 invert_tree_comparison (enum tree_code code, bool honor_nans)
2671 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2672 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2673 return ERROR_MARK;
2675 switch (code)
2677 case EQ_EXPR:
2678 return NE_EXPR;
2679 case NE_EXPR:
2680 return EQ_EXPR;
2681 case GT_EXPR:
2682 return honor_nans ? UNLE_EXPR : LE_EXPR;
2683 case GE_EXPR:
2684 return honor_nans ? UNLT_EXPR : LT_EXPR;
2685 case LT_EXPR:
2686 return honor_nans ? UNGE_EXPR : GE_EXPR;
2687 case LE_EXPR:
2688 return honor_nans ? UNGT_EXPR : GT_EXPR;
2689 case LTGT_EXPR:
2690 return UNEQ_EXPR;
2691 case UNEQ_EXPR:
2692 return LTGT_EXPR;
2693 case UNGT_EXPR:
2694 return LE_EXPR;
2695 case UNGE_EXPR:
2696 return LT_EXPR;
2697 case UNLT_EXPR:
2698 return GE_EXPR;
2699 case UNLE_EXPR:
2700 return GT_EXPR;
2701 case ORDERED_EXPR:
2702 return UNORDERED_EXPR;
2703 case UNORDERED_EXPR:
2704 return ORDERED_EXPR;
2705 default:
2706 gcc_unreachable ();
2710 /* Similar, but return the comparison that results if the operands are
2711 swapped. This is safe for floating-point. */
2713 enum tree_code
2714 swap_tree_comparison (enum tree_code code)
2716 switch (code)
2718 case EQ_EXPR:
2719 case NE_EXPR:
2720 case ORDERED_EXPR:
2721 case UNORDERED_EXPR:
2722 case LTGT_EXPR:
2723 case UNEQ_EXPR:
2724 return code;
2725 case GT_EXPR:
2726 return LT_EXPR;
2727 case GE_EXPR:
2728 return LE_EXPR;
2729 case LT_EXPR:
2730 return GT_EXPR;
2731 case LE_EXPR:
2732 return GE_EXPR;
2733 case UNGT_EXPR:
2734 return UNLT_EXPR;
2735 case UNGE_EXPR:
2736 return UNLE_EXPR;
2737 case UNLT_EXPR:
2738 return UNGT_EXPR;
2739 case UNLE_EXPR:
2740 return UNGE_EXPR;
2741 default:
2742 gcc_unreachable ();
2747 /* Convert a comparison tree code from an enum tree_code representation
2748 into a compcode bit-based encoding. This function is the inverse of
2749 compcode_to_comparison. */
2751 static enum comparison_code
2752 comparison_to_compcode (enum tree_code code)
2754 switch (code)
2756 case LT_EXPR:
2757 return COMPCODE_LT;
2758 case EQ_EXPR:
2759 return COMPCODE_EQ;
2760 case LE_EXPR:
2761 return COMPCODE_LE;
2762 case GT_EXPR:
2763 return COMPCODE_GT;
2764 case NE_EXPR:
2765 return COMPCODE_NE;
2766 case GE_EXPR:
2767 return COMPCODE_GE;
2768 case ORDERED_EXPR:
2769 return COMPCODE_ORD;
2770 case UNORDERED_EXPR:
2771 return COMPCODE_UNORD;
2772 case UNLT_EXPR:
2773 return COMPCODE_UNLT;
2774 case UNEQ_EXPR:
2775 return COMPCODE_UNEQ;
2776 case UNLE_EXPR:
2777 return COMPCODE_UNLE;
2778 case UNGT_EXPR:
2779 return COMPCODE_UNGT;
2780 case LTGT_EXPR:
2781 return COMPCODE_LTGT;
2782 case UNGE_EXPR:
2783 return COMPCODE_UNGE;
2784 default:
2785 gcc_unreachable ();
2789 /* Convert a compcode bit-based encoding of a comparison operator back
2790 to GCC's enum tree_code representation. This function is the
2791 inverse of comparison_to_compcode. */
2793 static enum tree_code
2794 compcode_to_comparison (enum comparison_code code)
2796 switch (code)
2798 case COMPCODE_LT:
2799 return LT_EXPR;
2800 case COMPCODE_EQ:
2801 return EQ_EXPR;
2802 case COMPCODE_LE:
2803 return LE_EXPR;
2804 case COMPCODE_GT:
2805 return GT_EXPR;
2806 case COMPCODE_NE:
2807 return NE_EXPR;
2808 case COMPCODE_GE:
2809 return GE_EXPR;
2810 case COMPCODE_ORD:
2811 return ORDERED_EXPR;
2812 case COMPCODE_UNORD:
2813 return UNORDERED_EXPR;
2814 case COMPCODE_UNLT:
2815 return UNLT_EXPR;
2816 case COMPCODE_UNEQ:
2817 return UNEQ_EXPR;
2818 case COMPCODE_UNLE:
2819 return UNLE_EXPR;
2820 case COMPCODE_UNGT:
2821 return UNGT_EXPR;
2822 case COMPCODE_LTGT:
2823 return LTGT_EXPR;
2824 case COMPCODE_UNGE:
2825 return UNGE_EXPR;
2826 default:
2827 gcc_unreachable ();
2831 /* Return true if COND1 tests the opposite condition of COND2. */
2833 bool
2834 inverse_conditions_p (const_tree cond1, const_tree cond2)
2836 return (COMPARISON_CLASS_P (cond1)
2837 && COMPARISON_CLASS_P (cond2)
2838 && (invert_tree_comparison
2839 (TREE_CODE (cond1),
2840 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2841 && operand_equal_p (TREE_OPERAND (cond1, 0),
2842 TREE_OPERAND (cond2, 0), 0)
2843 && operand_equal_p (TREE_OPERAND (cond1, 1),
2844 TREE_OPERAND (cond2, 1), 0));
2847 /* Return a tree for the comparison which is the combination of
2848 doing the AND or OR (depending on CODE) of the two operations LCODE
2849 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2850 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2851 if this makes the transformation invalid. */
2853 tree
2854 combine_comparisons (location_t loc,
2855 enum tree_code code, enum tree_code lcode,
2856 enum tree_code rcode, tree truth_type,
2857 tree ll_arg, tree lr_arg)
2859 bool honor_nans = HONOR_NANS (ll_arg);
2860 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2861 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2862 int compcode;
2864 switch (code)
2866 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2867 compcode = lcompcode & rcompcode;
2868 break;
2870 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2871 compcode = lcompcode | rcompcode;
2872 break;
2874 default:
2875 return NULL_TREE;
2878 if (!honor_nans)
2880 /* Eliminate unordered comparisons, as well as LTGT and ORD
2881 which are not used unless the mode has NaNs. */
2882 compcode &= ~COMPCODE_UNORD;
2883 if (compcode == COMPCODE_LTGT)
2884 compcode = COMPCODE_NE;
2885 else if (compcode == COMPCODE_ORD)
2886 compcode = COMPCODE_TRUE;
2888 else if (flag_trapping_math)
2890 /* Check that the original operation and the optimized ones will trap
2891 under the same condition. */
2892 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2893 && (lcompcode != COMPCODE_EQ)
2894 && (lcompcode != COMPCODE_ORD);
2895 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2896 && (rcompcode != COMPCODE_EQ)
2897 && (rcompcode != COMPCODE_ORD);
2898 bool trap = (compcode & COMPCODE_UNORD) == 0
2899 && (compcode != COMPCODE_EQ)
2900 && (compcode != COMPCODE_ORD);
2902 /* In a short-circuited boolean expression the LHS might be
2903 such that the RHS, if evaluated, will never trap. For
2904 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2905 if neither x nor y is NaN. (This is a mixed blessing: for
2906 example, the expression above will never trap, hence
2907 optimizing it to x < y would be invalid). */
2908 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2909 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2910 rtrap = false;
2912 /* If the comparison was short-circuited, and only the RHS
2913 trapped, we may now generate a spurious trap. */
2914 if (rtrap && !ltrap
2915 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2916 return NULL_TREE;
2918 /* If we changed the conditions that cause a trap, we lose. */
2919 if ((ltrap || rtrap) != trap)
2920 return NULL_TREE;
2923 if (compcode == COMPCODE_TRUE)
2924 return constant_boolean_node (true, truth_type);
2925 else if (compcode == COMPCODE_FALSE)
2926 return constant_boolean_node (false, truth_type);
2927 else
2929 enum tree_code tcode;
2931 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2932 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2936 /* Return nonzero if two operands (typically of the same tree node)
2937 are necessarily equal. FLAGS modifies behavior as follows:
2939 If OEP_ONLY_CONST is set, only return nonzero for constants.
2940 This function tests whether the operands are indistinguishable;
2941 it does not test whether they are equal using C's == operation.
2942 The distinction is important for IEEE floating point, because
2943 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2944 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2946 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2947 even though it may hold multiple values during a function.
2948 This is because a GCC tree node guarantees that nothing else is
2949 executed between the evaluation of its "operands" (which may often
2950 be evaluated in arbitrary order). Hence if the operands themselves
2951 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2952 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2953 unset means assuming isochronic (or instantaneous) tree equivalence.
2954 Unless comparing arbitrary expression trees, such as from different
2955 statements, this flag can usually be left unset.
2957 If OEP_PURE_SAME is set, then pure functions with identical arguments
2958 are considered the same. It is used when the caller has other ways
2959 to ensure that global memory is unchanged in between.
2961 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2962 not values of expressions.
2964 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2965 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2967 If OEP_BITWISE is set, then require the values to be bitwise identical
2968 rather than simply numerically equal. Do not take advantage of things
2969 like math-related flags or undefined behavior; only return true for
2970 values that are provably bitwise identical in all circumstances.
2972 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2973 any operand with side effect. This is unnecesarily conservative in the
2974 case we know that arg0 and arg1 are in disjoint code paths (such as in
2975 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2976 addresses with TREE_CONSTANT flag set so we know that &var == &var
2977 even if var is volatile. */
2979 bool
2980 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
2981 unsigned int flags)
2983 bool r;
2984 if (verify_hash_value (arg0, arg1, flags, &r))
2985 return r;
2987 STRIP_ANY_LOCATION_WRAPPER (arg0);
2988 STRIP_ANY_LOCATION_WRAPPER (arg1);
2990 /* If either is ERROR_MARK, they aren't equal. */
2991 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2992 || TREE_TYPE (arg0) == error_mark_node
2993 || TREE_TYPE (arg1) == error_mark_node)
2994 return false;
2996 /* Similar, if either does not have a type (like a template id),
2997 they aren't equal. */
2998 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2999 return false;
3001 /* Bitwise identity makes no sense if the values have different layouts. */
3002 if ((flags & OEP_BITWISE)
3003 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3004 return false;
3006 /* We cannot consider pointers to different address space equal. */
3007 if (POINTER_TYPE_P (TREE_TYPE (arg0))
3008 && POINTER_TYPE_P (TREE_TYPE (arg1))
3009 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
3010 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
3011 return false;
3013 /* Check equality of integer constants before bailing out due to
3014 precision differences. */
3015 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3017 /* Address of INTEGER_CST is not defined; check that we did not forget
3018 to drop the OEP_ADDRESS_OF flags. */
3019 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3020 return tree_int_cst_equal (arg0, arg1);
3023 if (!(flags & OEP_ADDRESS_OF))
3025 /* If both types don't have the same signedness, then we can't consider
3026 them equal. We must check this before the STRIP_NOPS calls
3027 because they may change the signedness of the arguments. As pointers
3028 strictly don't have a signedness, require either two pointers or
3029 two non-pointers as well. */
3030 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3031 || POINTER_TYPE_P (TREE_TYPE (arg0))
3032 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3033 return false;
3035 /* If both types don't have the same precision, then it is not safe
3036 to strip NOPs. */
3037 if (element_precision (TREE_TYPE (arg0))
3038 != element_precision (TREE_TYPE (arg1)))
3039 return false;
3041 STRIP_NOPS (arg0);
3042 STRIP_NOPS (arg1);
3044 #if 0
3045 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3046 sanity check once the issue is solved. */
3047 else
3048 /* Addresses of conversions and SSA_NAMEs (and many other things)
3049 are not defined. Check that we did not forget to drop the
3050 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3051 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3052 && TREE_CODE (arg0) != SSA_NAME);
3053 #endif
3055 /* In case both args are comparisons but with different comparison
3056 code, try to swap the comparison operands of one arg to produce
3057 a match and compare that variant. */
3058 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3059 && COMPARISON_CLASS_P (arg0)
3060 && COMPARISON_CLASS_P (arg1))
3062 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3064 if (TREE_CODE (arg0) == swap_code)
3065 return operand_equal_p (TREE_OPERAND (arg0, 0),
3066 TREE_OPERAND (arg1, 1), flags)
3067 && operand_equal_p (TREE_OPERAND (arg0, 1),
3068 TREE_OPERAND (arg1, 0), flags);
3071 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3073 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3074 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3076 else if (flags & OEP_ADDRESS_OF)
3078 /* If we are interested in comparing addresses ignore
3079 MEM_REF wrappings of the base that can appear just for
3080 TBAA reasons. */
3081 if (TREE_CODE (arg0) == MEM_REF
3082 && DECL_P (arg1)
3083 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3084 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3085 && integer_zerop (TREE_OPERAND (arg0, 1)))
3086 return true;
3087 else if (TREE_CODE (arg1) == MEM_REF
3088 && DECL_P (arg0)
3089 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3090 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3091 && integer_zerop (TREE_OPERAND (arg1, 1)))
3092 return true;
3093 return false;
3095 else
3096 return false;
3099 /* When not checking adddresses, this is needed for conversions and for
3100 COMPONENT_REF. Might as well play it safe and always test this. */
3101 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3102 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3103 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3104 && !(flags & OEP_ADDRESS_OF)))
3105 return false;
3107 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3108 We don't care about side effects in that case because the SAVE_EXPR
3109 takes care of that for us. In all other cases, two expressions are
3110 equal if they have no side effects. If we have two identical
3111 expressions with side effects that should be treated the same due
3112 to the only side effects being identical SAVE_EXPR's, that will
3113 be detected in the recursive calls below.
3114 If we are taking an invariant address of two identical objects
3115 they are necessarily equal as well. */
3116 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3117 && (TREE_CODE (arg0) == SAVE_EXPR
3118 || (flags & OEP_MATCH_SIDE_EFFECTS)
3119 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3120 return true;
3122 /* Next handle constant cases, those for which we can return 1 even
3123 if ONLY_CONST is set. */
3124 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3125 switch (TREE_CODE (arg0))
3127 case INTEGER_CST:
3128 return tree_int_cst_equal (arg0, arg1);
3130 case FIXED_CST:
3131 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3132 TREE_FIXED_CST (arg1));
3134 case REAL_CST:
3135 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3136 return true;
3138 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3140 /* If we do not distinguish between signed and unsigned zero,
3141 consider them equal. */
3142 if (real_zerop (arg0) && real_zerop (arg1))
3143 return true;
3145 return false;
3147 case VECTOR_CST:
3149 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3150 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3151 return false;
3153 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3154 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3155 return false;
3157 unsigned int count = vector_cst_encoded_nelts (arg0);
3158 for (unsigned int i = 0; i < count; ++i)
3159 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3160 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3161 return false;
3162 return true;
3165 case COMPLEX_CST:
3166 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3167 flags)
3168 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3169 flags));
3171 case STRING_CST:
3172 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3173 && ! memcmp (TREE_STRING_POINTER (arg0),
3174 TREE_STRING_POINTER (arg1),
3175 TREE_STRING_LENGTH (arg0)));
3177 case ADDR_EXPR:
3178 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3179 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3180 flags | OEP_ADDRESS_OF
3181 | OEP_MATCH_SIDE_EFFECTS);
3182 case CONSTRUCTOR:
3183 /* In GIMPLE empty constructors are allowed in initializers of
3184 aggregates. */
3185 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3186 default:
3187 break;
3190 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3191 two instances of undefined behavior will give identical results. */
3192 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3193 return false;
3195 /* Define macros to test an operand from arg0 and arg1 for equality and a
3196 variant that allows null and views null as being different from any
3197 non-null value. In the latter case, if either is null, the both
3198 must be; otherwise, do the normal comparison. */
3199 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3200 TREE_OPERAND (arg1, N), flags)
3202 #define OP_SAME_WITH_NULL(N) \
3203 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3204 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3206 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3208 case tcc_unary:
3209 /* Two conversions are equal only if signedness and modes match. */
3210 switch (TREE_CODE (arg0))
3212 CASE_CONVERT:
3213 case FIX_TRUNC_EXPR:
3214 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3215 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3216 return false;
3217 break;
3218 default:
3219 break;
3222 return OP_SAME (0);
3225 case tcc_comparison:
3226 case tcc_binary:
3227 if (OP_SAME (0) && OP_SAME (1))
3228 return true;
3230 /* For commutative ops, allow the other order. */
3231 return (commutative_tree_code (TREE_CODE (arg0))
3232 && operand_equal_p (TREE_OPERAND (arg0, 0),
3233 TREE_OPERAND (arg1, 1), flags)
3234 && operand_equal_p (TREE_OPERAND (arg0, 1),
3235 TREE_OPERAND (arg1, 0), flags));
3237 case tcc_reference:
3238 /* If either of the pointer (or reference) expressions we are
3239 dereferencing contain a side effect, these cannot be equal,
3240 but their addresses can be. */
3241 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3242 && (TREE_SIDE_EFFECTS (arg0)
3243 || TREE_SIDE_EFFECTS (arg1)))
3244 return false;
3246 switch (TREE_CODE (arg0))
3248 case INDIRECT_REF:
3249 if (!(flags & OEP_ADDRESS_OF))
3251 if (TYPE_ALIGN (TREE_TYPE (arg0))
3252 != TYPE_ALIGN (TREE_TYPE (arg1)))
3253 return false;
3254 /* Verify that the access types are compatible. */
3255 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3256 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3257 return false;
3259 flags &= ~OEP_ADDRESS_OF;
3260 return OP_SAME (0);
3262 case IMAGPART_EXPR:
3263 /* Require the same offset. */
3264 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3265 TYPE_SIZE (TREE_TYPE (arg1)),
3266 flags & ~OEP_ADDRESS_OF))
3267 return false;
3269 /* Fallthru. */
3270 case REALPART_EXPR:
3271 case VIEW_CONVERT_EXPR:
3272 return OP_SAME (0);
3274 case TARGET_MEM_REF:
3275 case MEM_REF:
3276 if (!(flags & OEP_ADDRESS_OF))
3278 /* Require equal access sizes */
3279 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3280 && (!TYPE_SIZE (TREE_TYPE (arg0))
3281 || !TYPE_SIZE (TREE_TYPE (arg1))
3282 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3283 TYPE_SIZE (TREE_TYPE (arg1)),
3284 flags)))
3285 return false;
3286 /* Verify that access happens in similar types. */
3287 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3288 return false;
3289 /* Verify that accesses are TBAA compatible. */
3290 if (!alias_ptr_types_compatible_p
3291 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3292 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3293 || (MR_DEPENDENCE_CLIQUE (arg0)
3294 != MR_DEPENDENCE_CLIQUE (arg1))
3295 || (MR_DEPENDENCE_BASE (arg0)
3296 != MR_DEPENDENCE_BASE (arg1)))
3297 return false;
3298 /* Verify that alignment is compatible. */
3299 if (TYPE_ALIGN (TREE_TYPE (arg0))
3300 != TYPE_ALIGN (TREE_TYPE (arg1)))
3301 return false;
3303 flags &= ~OEP_ADDRESS_OF;
3304 return (OP_SAME (0) && OP_SAME (1)
3305 /* TARGET_MEM_REF require equal extra operands. */
3306 && (TREE_CODE (arg0) != TARGET_MEM_REF
3307 || (OP_SAME_WITH_NULL (2)
3308 && OP_SAME_WITH_NULL (3)
3309 && OP_SAME_WITH_NULL (4))));
3311 case ARRAY_REF:
3312 case ARRAY_RANGE_REF:
3313 if (!OP_SAME (0))
3314 return false;
3315 flags &= ~OEP_ADDRESS_OF;
3316 /* Compare the array index by value if it is constant first as we
3317 may have different types but same value here. */
3318 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3319 TREE_OPERAND (arg1, 1))
3320 || OP_SAME (1))
3321 && OP_SAME_WITH_NULL (2)
3322 && OP_SAME_WITH_NULL (3)
3323 /* Compare low bound and element size as with OEP_ADDRESS_OF
3324 we have to account for the offset of the ref. */
3325 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3326 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3327 || (operand_equal_p (array_ref_low_bound
3328 (CONST_CAST_TREE (arg0)),
3329 array_ref_low_bound
3330 (CONST_CAST_TREE (arg1)), flags)
3331 && operand_equal_p (array_ref_element_size
3332 (CONST_CAST_TREE (arg0)),
3333 array_ref_element_size
3334 (CONST_CAST_TREE (arg1)),
3335 flags))));
3337 case COMPONENT_REF:
3338 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3339 may be NULL when we're called to compare MEM_EXPRs. */
3340 if (!OP_SAME_WITH_NULL (0))
3341 return false;
3343 bool compare_address = flags & OEP_ADDRESS_OF;
3345 /* Most of time we only need to compare FIELD_DECLs for equality.
3346 However when determining address look into actual offsets.
3347 These may match for unions and unshared record types. */
3348 flags &= ~OEP_ADDRESS_OF;
3349 if (!OP_SAME (1))
3351 if (compare_address
3352 && (flags & OEP_ADDRESS_OF_SAME_FIELD) == 0)
3354 if (TREE_OPERAND (arg0, 2)
3355 || TREE_OPERAND (arg1, 2))
3356 return OP_SAME_WITH_NULL (2);
3357 tree field0 = TREE_OPERAND (arg0, 1);
3358 tree field1 = TREE_OPERAND (arg1, 1);
3360 /* Non-FIELD_DECL operands can appear in C++ templates. */
3361 if (TREE_CODE (field0) != FIELD_DECL
3362 || TREE_CODE (field1) != FIELD_DECL
3363 || !operand_equal_p (DECL_FIELD_OFFSET (field0),
3364 DECL_FIELD_OFFSET (field1), flags)
3365 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3366 DECL_FIELD_BIT_OFFSET (field1),
3367 flags))
3368 return false;
3370 else
3371 return false;
3374 return OP_SAME_WITH_NULL (2);
3376 case BIT_FIELD_REF:
3377 if (!OP_SAME (0))
3378 return false;
3379 flags &= ~OEP_ADDRESS_OF;
3380 return OP_SAME (1) && OP_SAME (2);
3382 default:
3383 return false;
3386 case tcc_expression:
3387 switch (TREE_CODE (arg0))
3389 case ADDR_EXPR:
3390 /* Be sure we pass right ADDRESS_OF flag. */
3391 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3392 return operand_equal_p (TREE_OPERAND (arg0, 0),
3393 TREE_OPERAND (arg1, 0),
3394 flags | OEP_ADDRESS_OF);
3396 case TRUTH_NOT_EXPR:
3397 return OP_SAME (0);
3399 case TRUTH_ANDIF_EXPR:
3400 case TRUTH_ORIF_EXPR:
3401 return OP_SAME (0) && OP_SAME (1);
3403 case WIDEN_MULT_PLUS_EXPR:
3404 case WIDEN_MULT_MINUS_EXPR:
3405 if (!OP_SAME (2))
3406 return false;
3407 /* The multiplcation operands are commutative. */
3408 /* FALLTHRU */
3410 case TRUTH_AND_EXPR:
3411 case TRUTH_OR_EXPR:
3412 case TRUTH_XOR_EXPR:
3413 if (OP_SAME (0) && OP_SAME (1))
3414 return true;
3416 /* Otherwise take into account this is a commutative operation. */
3417 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3418 TREE_OPERAND (arg1, 1), flags)
3419 && operand_equal_p (TREE_OPERAND (arg0, 1),
3420 TREE_OPERAND (arg1, 0), flags));
3422 case COND_EXPR:
3423 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3424 return false;
3425 flags &= ~OEP_ADDRESS_OF;
3426 return OP_SAME (0);
3428 case BIT_INSERT_EXPR:
3429 /* BIT_INSERT_EXPR has an implict operand as the type precision
3430 of op1. Need to check to make sure they are the same. */
3431 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3432 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3433 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3434 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3435 return false;
3436 /* FALLTHRU */
3438 case VEC_COND_EXPR:
3439 case DOT_PROD_EXPR:
3440 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3442 case MODIFY_EXPR:
3443 case INIT_EXPR:
3444 case COMPOUND_EXPR:
3445 case PREDECREMENT_EXPR:
3446 case PREINCREMENT_EXPR:
3447 case POSTDECREMENT_EXPR:
3448 case POSTINCREMENT_EXPR:
3449 if (flags & OEP_LEXICOGRAPHIC)
3450 return OP_SAME (0) && OP_SAME (1);
3451 return false;
3453 case CLEANUP_POINT_EXPR:
3454 case EXPR_STMT:
3455 case SAVE_EXPR:
3456 if (flags & OEP_LEXICOGRAPHIC)
3457 return OP_SAME (0);
3458 return false;
3460 case OBJ_TYPE_REF:
3461 /* Virtual table reference. */
3462 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3463 OBJ_TYPE_REF_EXPR (arg1), flags))
3464 return false;
3465 flags &= ~OEP_ADDRESS_OF;
3466 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3467 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3468 return false;
3469 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3470 OBJ_TYPE_REF_OBJECT (arg1), flags))
3471 return false;
3472 if (virtual_method_call_p (arg0))
3474 if (!virtual_method_call_p (arg1))
3475 return false;
3476 return types_same_for_odr (obj_type_ref_class (arg0),
3477 obj_type_ref_class (arg1));
3479 return false;
3481 default:
3482 return false;
3485 case tcc_vl_exp:
3486 switch (TREE_CODE (arg0))
3488 case CALL_EXPR:
3489 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3490 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3491 /* If not both CALL_EXPRs are either internal or normal function
3492 functions, then they are not equal. */
3493 return false;
3494 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3496 /* If the CALL_EXPRs call different internal functions, then they
3497 are not equal. */
3498 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3499 return false;
3501 else
3503 /* If the CALL_EXPRs call different functions, then they are not
3504 equal. */
3505 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3506 flags))
3507 return false;
3510 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3512 unsigned int cef = call_expr_flags (arg0);
3513 if (flags & OEP_PURE_SAME)
3514 cef &= ECF_CONST | ECF_PURE;
3515 else
3516 cef &= ECF_CONST;
3517 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3518 return false;
3521 /* Now see if all the arguments are the same. */
3523 const_call_expr_arg_iterator iter0, iter1;
3524 const_tree a0, a1;
3525 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3526 a1 = first_const_call_expr_arg (arg1, &iter1);
3527 a0 && a1;
3528 a0 = next_const_call_expr_arg (&iter0),
3529 a1 = next_const_call_expr_arg (&iter1))
3530 if (! operand_equal_p (a0, a1, flags))
3531 return false;
3533 /* If we get here and both argument lists are exhausted
3534 then the CALL_EXPRs are equal. */
3535 return ! (a0 || a1);
3537 default:
3538 return false;
3541 case tcc_declaration:
3542 /* Consider __builtin_sqrt equal to sqrt. */
3543 if (TREE_CODE (arg0) == FUNCTION_DECL)
3544 return (fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3545 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3546 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3547 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3549 if (DECL_P (arg0)
3550 && (flags & OEP_DECL_NAME)
3551 && (flags & OEP_LEXICOGRAPHIC))
3553 /* Consider decls with the same name equal. The caller needs
3554 to make sure they refer to the same entity (such as a function
3555 formal parameter). */
3556 tree a0name = DECL_NAME (arg0);
3557 tree a1name = DECL_NAME (arg1);
3558 const char *a0ns = a0name ? IDENTIFIER_POINTER (a0name) : NULL;
3559 const char *a1ns = a1name ? IDENTIFIER_POINTER (a1name) : NULL;
3560 return a0ns && a1ns && strcmp (a0ns, a1ns) == 0;
3562 return false;
3564 case tcc_exceptional:
3565 if (TREE_CODE (arg0) == CONSTRUCTOR)
3567 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3568 return false;
3570 /* In GIMPLE constructors are used only to build vectors from
3571 elements. Individual elements in the constructor must be
3572 indexed in increasing order and form an initial sequence.
3574 We make no effort to compare constructors in generic.
3575 (see sem_variable::equals in ipa-icf which can do so for
3576 constants). */
3577 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3578 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3579 return false;
3581 /* Be sure that vectors constructed have the same representation.
3582 We only tested element precision and modes to match.
3583 Vectors may be BLKmode and thus also check that the number of
3584 parts match. */
3585 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3586 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3587 return false;
3589 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3590 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3591 unsigned int len = vec_safe_length (v0);
3593 if (len != vec_safe_length (v1))
3594 return false;
3596 for (unsigned int i = 0; i < len; i++)
3598 constructor_elt *c0 = &(*v0)[i];
3599 constructor_elt *c1 = &(*v1)[i];
3601 if (!operand_equal_p (c0->value, c1->value, flags)
3602 /* In GIMPLE the indexes can be either NULL or matching i.
3603 Double check this so we won't get false
3604 positives for GENERIC. */
3605 || (c0->index
3606 && (TREE_CODE (c0->index) != INTEGER_CST
3607 || compare_tree_int (c0->index, i)))
3608 || (c1->index
3609 && (TREE_CODE (c1->index) != INTEGER_CST
3610 || compare_tree_int (c1->index, i))))
3611 return false;
3613 return true;
3615 else if (TREE_CODE (arg0) == STATEMENT_LIST
3616 && (flags & OEP_LEXICOGRAPHIC))
3618 /* Compare the STATEMENT_LISTs. */
3619 tree_stmt_iterator tsi1, tsi2;
3620 tree body1 = CONST_CAST_TREE (arg0);
3621 tree body2 = CONST_CAST_TREE (arg1);
3622 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3623 tsi_next (&tsi1), tsi_next (&tsi2))
3625 /* The lists don't have the same number of statements. */
3626 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3627 return false;
3628 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3629 return true;
3630 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3631 flags & (OEP_LEXICOGRAPHIC
3632 | OEP_NO_HASH_CHECK)))
3633 return false;
3636 return false;
3638 case tcc_statement:
3639 switch (TREE_CODE (arg0))
3641 case RETURN_EXPR:
3642 if (flags & OEP_LEXICOGRAPHIC)
3643 return OP_SAME_WITH_NULL (0);
3644 return false;
3645 case DEBUG_BEGIN_STMT:
3646 if (flags & OEP_LEXICOGRAPHIC)
3647 return true;
3648 return false;
3649 default:
3650 return false;
3653 default:
3654 return false;
3657 #undef OP_SAME
3658 #undef OP_SAME_WITH_NULL
3661 /* Generate a hash value for an expression. This can be used iteratively
3662 by passing a previous result as the HSTATE argument. */
3664 void
3665 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3666 unsigned int flags)
3668 int i;
3669 enum tree_code code;
3670 enum tree_code_class tclass;
3672 if (t == NULL_TREE || t == error_mark_node)
3674 hstate.merge_hash (0);
3675 return;
3678 STRIP_ANY_LOCATION_WRAPPER (t);
3680 if (!(flags & OEP_ADDRESS_OF))
3681 STRIP_NOPS (t);
3683 code = TREE_CODE (t);
3685 switch (code)
3687 /* Alas, constants aren't shared, so we can't rely on pointer
3688 identity. */
3689 case VOID_CST:
3690 hstate.merge_hash (0);
3691 return;
3692 case INTEGER_CST:
3693 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3694 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3695 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3696 return;
3697 case REAL_CST:
3699 unsigned int val2;
3700 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3701 val2 = rvc_zero;
3702 else
3703 val2 = real_hash (TREE_REAL_CST_PTR (t));
3704 hstate.merge_hash (val2);
3705 return;
3707 case FIXED_CST:
3709 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3710 hstate.merge_hash (val2);
3711 return;
3713 case STRING_CST:
3714 hstate.add ((const void *) TREE_STRING_POINTER (t),
3715 TREE_STRING_LENGTH (t));
3716 return;
3717 case COMPLEX_CST:
3718 hash_operand (TREE_REALPART (t), hstate, flags);
3719 hash_operand (TREE_IMAGPART (t), hstate, flags);
3720 return;
3721 case VECTOR_CST:
3723 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3724 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3725 unsigned int count = vector_cst_encoded_nelts (t);
3726 for (unsigned int i = 0; i < count; ++i)
3727 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3728 return;
3730 case SSA_NAME:
3731 /* We can just compare by pointer. */
3732 hstate.add_hwi (SSA_NAME_VERSION (t));
3733 return;
3734 case PLACEHOLDER_EXPR:
3735 /* The node itself doesn't matter. */
3736 return;
3737 case BLOCK:
3738 case OMP_CLAUSE:
3739 /* Ignore. */
3740 return;
3741 case TREE_LIST:
3742 /* A list of expressions, for a CALL_EXPR or as the elements of a
3743 VECTOR_CST. */
3744 for (; t; t = TREE_CHAIN (t))
3745 hash_operand (TREE_VALUE (t), hstate, flags);
3746 return;
3747 case CONSTRUCTOR:
3749 unsigned HOST_WIDE_INT idx;
3750 tree field, value;
3751 flags &= ~OEP_ADDRESS_OF;
3752 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3753 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3755 /* In GIMPLE the indexes can be either NULL or matching i. */
3756 if (field == NULL_TREE)
3757 field = bitsize_int (idx);
3758 hash_operand (field, hstate, flags);
3759 hash_operand (value, hstate, flags);
3761 return;
3763 case STATEMENT_LIST:
3765 tree_stmt_iterator i;
3766 for (i = tsi_start (CONST_CAST_TREE (t));
3767 !tsi_end_p (i); tsi_next (&i))
3768 hash_operand (tsi_stmt (i), hstate, flags);
3769 return;
3771 case TREE_VEC:
3772 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3773 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3774 return;
3775 case IDENTIFIER_NODE:
3776 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3777 return;
3778 case FUNCTION_DECL:
3779 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3780 Otherwise nodes that compare equal according to operand_equal_p might
3781 get different hash codes. However, don't do this for machine specific
3782 or front end builtins, since the function code is overloaded in those
3783 cases. */
3784 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3785 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3787 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3788 code = TREE_CODE (t);
3790 /* FALL THROUGH */
3791 default:
3792 if (POLY_INT_CST_P (t))
3794 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3795 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3796 return;
3798 tclass = TREE_CODE_CLASS (code);
3800 if (tclass == tcc_declaration)
3802 /* DECL's have a unique ID */
3803 hstate.add_hwi (DECL_UID (t));
3805 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3807 /* For comparisons that can be swapped, use the lower
3808 tree code. */
3809 enum tree_code ccode = swap_tree_comparison (code);
3810 if (code < ccode)
3811 ccode = code;
3812 hstate.add_object (ccode);
3813 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3814 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3816 else if (CONVERT_EXPR_CODE_P (code))
3818 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3819 operand_equal_p. */
3820 enum tree_code ccode = NOP_EXPR;
3821 hstate.add_object (ccode);
3823 /* Don't hash the type, that can lead to having nodes which
3824 compare equal according to operand_equal_p, but which
3825 have different hash codes. Make sure to include signedness
3826 in the hash computation. */
3827 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3828 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3830 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3831 else if (code == MEM_REF
3832 && (flags & OEP_ADDRESS_OF) != 0
3833 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3834 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3835 && integer_zerop (TREE_OPERAND (t, 1)))
3836 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3837 hstate, flags);
3838 /* Don't ICE on FE specific trees, or their arguments etc.
3839 during operand_equal_p hash verification. */
3840 else if (!IS_EXPR_CODE_CLASS (tclass))
3841 gcc_assert (flags & OEP_HASH_CHECK);
3842 else
3844 unsigned int sflags = flags;
3846 hstate.add_object (code);
3848 switch (code)
3850 case ADDR_EXPR:
3851 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3852 flags |= OEP_ADDRESS_OF;
3853 sflags = flags;
3854 break;
3856 case INDIRECT_REF:
3857 case MEM_REF:
3858 case TARGET_MEM_REF:
3859 flags &= ~OEP_ADDRESS_OF;
3860 sflags = flags;
3861 break;
3863 case COMPONENT_REF:
3864 if (sflags & OEP_ADDRESS_OF)
3866 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3867 if (TREE_OPERAND (t, 2))
3868 hash_operand (TREE_OPERAND (t, 2), hstate,
3869 flags & ~OEP_ADDRESS_OF);
3870 else
3872 tree field = TREE_OPERAND (t, 1);
3873 hash_operand (DECL_FIELD_OFFSET (field),
3874 hstate, flags & ~OEP_ADDRESS_OF);
3875 hash_operand (DECL_FIELD_BIT_OFFSET (field),
3876 hstate, flags & ~OEP_ADDRESS_OF);
3878 return;
3880 break;
3881 case ARRAY_REF:
3882 case ARRAY_RANGE_REF:
3883 case BIT_FIELD_REF:
3884 sflags &= ~OEP_ADDRESS_OF;
3885 break;
3887 case COND_EXPR:
3888 flags &= ~OEP_ADDRESS_OF;
3889 break;
3891 case WIDEN_MULT_PLUS_EXPR:
3892 case WIDEN_MULT_MINUS_EXPR:
3894 /* The multiplication operands are commutative. */
3895 inchash::hash one, two;
3896 hash_operand (TREE_OPERAND (t, 0), one, flags);
3897 hash_operand (TREE_OPERAND (t, 1), two, flags);
3898 hstate.add_commutative (one, two);
3899 hash_operand (TREE_OPERAND (t, 2), two, flags);
3900 return;
3903 case CALL_EXPR:
3904 if (CALL_EXPR_FN (t) == NULL_TREE)
3905 hstate.add_int (CALL_EXPR_IFN (t));
3906 break;
3908 case TARGET_EXPR:
3909 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3910 Usually different TARGET_EXPRs just should use
3911 different temporaries in their slots. */
3912 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
3913 return;
3915 case OBJ_TYPE_REF:
3916 /* Virtual table reference. */
3917 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
3918 flags &= ~OEP_ADDRESS_OF;
3919 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
3920 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
3921 if (!virtual_method_call_p (t))
3922 return;
3923 if (tree c = obj_type_ref_class (t))
3925 c = TYPE_NAME (TYPE_MAIN_VARIANT (c));
3926 /* We compute mangled names only when free_lang_data is run.
3927 In that case we can hash precisely. */
3928 if (TREE_CODE (c) == TYPE_DECL
3929 && DECL_ASSEMBLER_NAME_SET_P (c))
3930 hstate.add_object
3931 (IDENTIFIER_HASH_VALUE
3932 (DECL_ASSEMBLER_NAME (c)));
3934 return;
3935 default:
3936 break;
3939 /* Don't hash the type, that can lead to having nodes which
3940 compare equal according to operand_equal_p, but which
3941 have different hash codes. */
3942 if (code == NON_LVALUE_EXPR)
3944 /* Make sure to include signness in the hash computation. */
3945 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3946 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3949 else if (commutative_tree_code (code))
3951 /* It's a commutative expression. We want to hash it the same
3952 however it appears. We do this by first hashing both operands
3953 and then rehashing based on the order of their independent
3954 hashes. */
3955 inchash::hash one, two;
3956 hash_operand (TREE_OPERAND (t, 0), one, flags);
3957 hash_operand (TREE_OPERAND (t, 1), two, flags);
3958 hstate.add_commutative (one, two);
3960 else
3961 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3962 hash_operand (TREE_OPERAND (t, i), hstate,
3963 i == 0 ? flags : sflags);
3965 return;
3969 bool
3970 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
3971 unsigned int flags, bool *ret)
3973 /* When checking and unless comparing DECL names, verify that if
3974 the outermost operand_equal_p call returns non-zero then ARG0
3975 and ARG1 have the same hash value. */
3976 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
3978 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
3980 if (arg0 != arg1 && !(flags & OEP_DECL_NAME))
3982 inchash::hash hstate0 (0), hstate1 (0);
3983 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
3984 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
3985 hashval_t h0 = hstate0.end ();
3986 hashval_t h1 = hstate1.end ();
3987 gcc_assert (h0 == h1);
3989 *ret = true;
3991 else
3992 *ret = false;
3994 return true;
3997 return false;
4001 static operand_compare default_compare_instance;
4003 /* Conveinece wrapper around operand_compare class because usually we do
4004 not need to play with the valueizer. */
4006 bool
4007 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
4009 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
4012 namespace inchash
4015 /* Generate a hash value for an expression. This can be used iteratively
4016 by passing a previous result as the HSTATE argument.
4018 This function is intended to produce the same hash for expressions which
4019 would compare equal using operand_equal_p. */
4020 void
4021 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
4023 default_compare_instance.hash_operand (t, hstate, flags);
4028 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
4029 with a different signedness or a narrower precision. */
4031 static bool
4032 operand_equal_for_comparison_p (tree arg0, tree arg1)
4034 if (operand_equal_p (arg0, arg1, 0))
4035 return true;
4037 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
4038 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
4039 return false;
4041 /* Discard any conversions that don't change the modes of ARG0 and ARG1
4042 and see if the inner values are the same. This removes any
4043 signedness comparison, which doesn't matter here. */
4044 tree op0 = arg0;
4045 tree op1 = arg1;
4046 STRIP_NOPS (op0);
4047 STRIP_NOPS (op1);
4048 if (operand_equal_p (op0, op1, 0))
4049 return true;
4051 /* Discard a single widening conversion from ARG1 and see if the inner
4052 value is the same as ARG0. */
4053 if (CONVERT_EXPR_P (arg1)
4054 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4055 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4056 < TYPE_PRECISION (TREE_TYPE (arg1))
4057 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
4058 return true;
4060 return false;
4063 /* See if ARG is an expression that is either a comparison or is performing
4064 arithmetic on comparisons. The comparisons must only be comparing
4065 two different values, which will be stored in *CVAL1 and *CVAL2; if
4066 they are nonzero it means that some operands have already been found.
4067 No variables may be used anywhere else in the expression except in the
4068 comparisons.
4070 If this is true, return 1. Otherwise, return zero. */
4072 static bool
4073 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4075 enum tree_code code = TREE_CODE (arg);
4076 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4078 /* We can handle some of the tcc_expression cases here. */
4079 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4080 tclass = tcc_unary;
4081 else if (tclass == tcc_expression
4082 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4083 || code == COMPOUND_EXPR))
4084 tclass = tcc_binary;
4086 switch (tclass)
4088 case tcc_unary:
4089 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4091 case tcc_binary:
4092 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4093 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4095 case tcc_constant:
4096 return true;
4098 case tcc_expression:
4099 if (code == COND_EXPR)
4100 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4101 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4102 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4103 return false;
4105 case tcc_comparison:
4106 /* First see if we can handle the first operand, then the second. For
4107 the second operand, we know *CVAL1 can't be zero. It must be that
4108 one side of the comparison is each of the values; test for the
4109 case where this isn't true by failing if the two operands
4110 are the same. */
4112 if (operand_equal_p (TREE_OPERAND (arg, 0),
4113 TREE_OPERAND (arg, 1), 0))
4114 return false;
4116 if (*cval1 == 0)
4117 *cval1 = TREE_OPERAND (arg, 0);
4118 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4120 else if (*cval2 == 0)
4121 *cval2 = TREE_OPERAND (arg, 0);
4122 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4124 else
4125 return false;
4127 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4129 else if (*cval2 == 0)
4130 *cval2 = TREE_OPERAND (arg, 1);
4131 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4133 else
4134 return false;
4136 return true;
4138 default:
4139 return false;
4143 /* ARG is a tree that is known to contain just arithmetic operations and
4144 comparisons. Evaluate the operations in the tree substituting NEW0 for
4145 any occurrence of OLD0 as an operand of a comparison and likewise for
4146 NEW1 and OLD1. */
4148 static tree
4149 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4150 tree old1, tree new1)
4152 tree type = TREE_TYPE (arg);
4153 enum tree_code code = TREE_CODE (arg);
4154 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4156 /* We can handle some of the tcc_expression cases here. */
4157 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4158 tclass = tcc_unary;
4159 else if (tclass == tcc_expression
4160 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4161 tclass = tcc_binary;
4163 switch (tclass)
4165 case tcc_unary:
4166 return fold_build1_loc (loc, code, type,
4167 eval_subst (loc, TREE_OPERAND (arg, 0),
4168 old0, new0, old1, new1));
4170 case tcc_binary:
4171 return fold_build2_loc (loc, code, type,
4172 eval_subst (loc, TREE_OPERAND (arg, 0),
4173 old0, new0, old1, new1),
4174 eval_subst (loc, TREE_OPERAND (arg, 1),
4175 old0, new0, old1, new1));
4177 case tcc_expression:
4178 switch (code)
4180 case SAVE_EXPR:
4181 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4182 old1, new1);
4184 case COMPOUND_EXPR:
4185 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4186 old1, new1);
4188 case COND_EXPR:
4189 return fold_build3_loc (loc, code, type,
4190 eval_subst (loc, TREE_OPERAND (arg, 0),
4191 old0, new0, old1, new1),
4192 eval_subst (loc, TREE_OPERAND (arg, 1),
4193 old0, new0, old1, new1),
4194 eval_subst (loc, TREE_OPERAND (arg, 2),
4195 old0, new0, old1, new1));
4196 default:
4197 break;
4199 /* Fall through - ??? */
4201 case tcc_comparison:
4203 tree arg0 = TREE_OPERAND (arg, 0);
4204 tree arg1 = TREE_OPERAND (arg, 1);
4206 /* We need to check both for exact equality and tree equality. The
4207 former will be true if the operand has a side-effect. In that
4208 case, we know the operand occurred exactly once. */
4210 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4211 arg0 = new0;
4212 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4213 arg0 = new1;
4215 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4216 arg1 = new0;
4217 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4218 arg1 = new1;
4220 return fold_build2_loc (loc, code, type, arg0, arg1);
4223 default:
4224 return arg;
4228 /* Return a tree for the case when the result of an expression is RESULT
4229 converted to TYPE and OMITTED was previously an operand of the expression
4230 but is now not needed (e.g., we folded OMITTED * 0).
4232 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4233 the conversion of RESULT to TYPE. */
4235 tree
4236 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4238 tree t = fold_convert_loc (loc, type, result);
4240 /* If the resulting operand is an empty statement, just return the omitted
4241 statement casted to void. */
4242 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4243 return build1_loc (loc, NOP_EXPR, void_type_node,
4244 fold_ignored_result (omitted));
4246 if (TREE_SIDE_EFFECTS (omitted))
4247 return build2_loc (loc, COMPOUND_EXPR, type,
4248 fold_ignored_result (omitted), t);
4250 return non_lvalue_loc (loc, t);
4253 /* Return a tree for the case when the result of an expression is RESULT
4254 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4255 of the expression but are now not needed.
4257 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4258 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4259 evaluated before OMITTED2. Otherwise, if neither has side effects,
4260 just do the conversion of RESULT to TYPE. */
4262 tree
4263 omit_two_operands_loc (location_t loc, tree type, tree result,
4264 tree omitted1, tree omitted2)
4266 tree t = fold_convert_loc (loc, type, result);
4268 if (TREE_SIDE_EFFECTS (omitted2))
4269 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4270 if (TREE_SIDE_EFFECTS (omitted1))
4271 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4273 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4277 /* Return a simplified tree node for the truth-negation of ARG. This
4278 never alters ARG itself. We assume that ARG is an operation that
4279 returns a truth value (0 or 1).
4281 FIXME: one would think we would fold the result, but it causes
4282 problems with the dominator optimizer. */
4284 static tree
4285 fold_truth_not_expr (location_t loc, tree arg)
4287 tree type = TREE_TYPE (arg);
4288 enum tree_code code = TREE_CODE (arg);
4289 location_t loc1, loc2;
4291 /* If this is a comparison, we can simply invert it, except for
4292 floating-point non-equality comparisons, in which case we just
4293 enclose a TRUTH_NOT_EXPR around what we have. */
4295 if (TREE_CODE_CLASS (code) == tcc_comparison)
4297 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4298 if (FLOAT_TYPE_P (op_type)
4299 && flag_trapping_math
4300 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4301 && code != NE_EXPR && code != EQ_EXPR)
4302 return NULL_TREE;
4304 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4305 if (code == ERROR_MARK)
4306 return NULL_TREE;
4308 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4309 TREE_OPERAND (arg, 1));
4310 copy_warning (ret, arg);
4311 return ret;
4314 switch (code)
4316 case INTEGER_CST:
4317 return constant_boolean_node (integer_zerop (arg), type);
4319 case TRUTH_AND_EXPR:
4320 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4321 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4322 return build2_loc (loc, TRUTH_OR_EXPR, type,
4323 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4324 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4326 case TRUTH_OR_EXPR:
4327 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4328 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4329 return build2_loc (loc, TRUTH_AND_EXPR, type,
4330 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4331 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4333 case TRUTH_XOR_EXPR:
4334 /* Here we can invert either operand. We invert the first operand
4335 unless the second operand is a TRUTH_NOT_EXPR in which case our
4336 result is the XOR of the first operand with the inside of the
4337 negation of the second operand. */
4339 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4340 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4341 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4342 else
4343 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4344 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4345 TREE_OPERAND (arg, 1));
4347 case TRUTH_ANDIF_EXPR:
4348 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4349 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4350 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4351 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4352 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4354 case TRUTH_ORIF_EXPR:
4355 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4356 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4357 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4358 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4359 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4361 case TRUTH_NOT_EXPR:
4362 return TREE_OPERAND (arg, 0);
4364 case COND_EXPR:
4366 tree arg1 = TREE_OPERAND (arg, 1);
4367 tree arg2 = TREE_OPERAND (arg, 2);
4369 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4370 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4372 /* A COND_EXPR may have a throw as one operand, which
4373 then has void type. Just leave void operands
4374 as they are. */
4375 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4376 VOID_TYPE_P (TREE_TYPE (arg1))
4377 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4378 VOID_TYPE_P (TREE_TYPE (arg2))
4379 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4382 case COMPOUND_EXPR:
4383 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4384 return build2_loc (loc, COMPOUND_EXPR, type,
4385 TREE_OPERAND (arg, 0),
4386 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4388 case NON_LVALUE_EXPR:
4389 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4390 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4392 CASE_CONVERT:
4393 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4394 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4396 /* fall through */
4398 case FLOAT_EXPR:
4399 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4400 return build1_loc (loc, TREE_CODE (arg), type,
4401 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4403 case BIT_AND_EXPR:
4404 if (!integer_onep (TREE_OPERAND (arg, 1)))
4405 return NULL_TREE;
4406 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4408 case SAVE_EXPR:
4409 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4411 case CLEANUP_POINT_EXPR:
4412 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4413 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4414 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4416 default:
4417 return NULL_TREE;
4421 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4422 assume that ARG is an operation that returns a truth value (0 or 1
4423 for scalars, 0 or -1 for vectors). Return the folded expression if
4424 folding is successful. Otherwise, return NULL_TREE. */
4426 static tree
4427 fold_invert_truthvalue (location_t loc, tree arg)
4429 tree type = TREE_TYPE (arg);
4430 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4431 ? BIT_NOT_EXPR
4432 : TRUTH_NOT_EXPR,
4433 type, arg);
4436 /* Return a simplified tree node for the truth-negation of ARG. This
4437 never alters ARG itself. We assume that ARG is an operation that
4438 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4440 tree
4441 invert_truthvalue_loc (location_t loc, tree arg)
4443 if (TREE_CODE (arg) == ERROR_MARK)
4444 return arg;
4446 tree type = TREE_TYPE (arg);
4447 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4448 ? BIT_NOT_EXPR
4449 : TRUTH_NOT_EXPR,
4450 type, arg);
4453 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4454 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4455 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4456 is the original memory reference used to preserve the alias set of
4457 the access. */
4459 static tree
4460 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4461 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4462 int unsignedp, int reversep)
4464 tree result, bftype;
4466 /* Attempt not to lose the access path if possible. */
4467 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4469 tree ninner = TREE_OPERAND (orig_inner, 0);
4470 machine_mode nmode;
4471 poly_int64 nbitsize, nbitpos;
4472 tree noffset;
4473 int nunsignedp, nreversep, nvolatilep = 0;
4474 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4475 &noffset, &nmode, &nunsignedp,
4476 &nreversep, &nvolatilep);
4477 if (base == inner
4478 && noffset == NULL_TREE
4479 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4480 && !reversep
4481 && !nreversep
4482 && !nvolatilep)
4484 inner = ninner;
4485 bitpos -= nbitpos;
4489 alias_set_type iset = get_alias_set (orig_inner);
4490 if (iset == 0 && get_alias_set (inner) != iset)
4491 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4492 build_fold_addr_expr (inner),
4493 build_int_cst (ptr_type_node, 0));
4495 if (known_eq (bitpos, 0) && !reversep)
4497 tree size = TYPE_SIZE (TREE_TYPE (inner));
4498 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4499 || POINTER_TYPE_P (TREE_TYPE (inner)))
4500 && tree_fits_shwi_p (size)
4501 && tree_to_shwi (size) == bitsize)
4502 return fold_convert_loc (loc, type, inner);
4505 bftype = type;
4506 if (TYPE_PRECISION (bftype) != bitsize
4507 || TYPE_UNSIGNED (bftype) == !unsignedp)
4508 bftype = build_nonstandard_integer_type (bitsize, 0);
4510 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4511 bitsize_int (bitsize), bitsize_int (bitpos));
4512 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4514 if (bftype != type)
4515 result = fold_convert_loc (loc, type, result);
4517 return result;
4520 /* Optimize a bit-field compare.
4522 There are two cases: First is a compare against a constant and the
4523 second is a comparison of two items where the fields are at the same
4524 bit position relative to the start of a chunk (byte, halfword, word)
4525 large enough to contain it. In these cases we can avoid the shift
4526 implicit in bitfield extractions.
4528 For constants, we emit a compare of the shifted constant with the
4529 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4530 compared. For two fields at the same position, we do the ANDs with the
4531 similar mask and compare the result of the ANDs.
4533 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4534 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4535 are the left and right operands of the comparison, respectively.
4537 If the optimization described above can be done, we return the resulting
4538 tree. Otherwise we return zero. */
4540 static tree
4541 optimize_bit_field_compare (location_t loc, enum tree_code code,
4542 tree compare_type, tree lhs, tree rhs)
4544 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4545 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4546 tree type = TREE_TYPE (lhs);
4547 tree unsigned_type;
4548 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4549 machine_mode lmode, rmode;
4550 scalar_int_mode nmode;
4551 int lunsignedp, runsignedp;
4552 int lreversep, rreversep;
4553 int lvolatilep = 0, rvolatilep = 0;
4554 tree linner, rinner = NULL_TREE;
4555 tree mask;
4556 tree offset;
4558 /* Get all the information about the extractions being done. If the bit size
4559 is the same as the size of the underlying object, we aren't doing an
4560 extraction at all and so can do nothing. We also don't want to
4561 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4562 then will no longer be able to replace it. */
4563 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4564 &lunsignedp, &lreversep, &lvolatilep);
4565 if (linner == lhs
4566 || !known_size_p (plbitsize)
4567 || !plbitsize.is_constant (&lbitsize)
4568 || !plbitpos.is_constant (&lbitpos)
4569 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4570 || offset != 0
4571 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4572 || lvolatilep)
4573 return 0;
4575 if (const_p)
4576 rreversep = lreversep;
4577 else
4579 /* If this is not a constant, we can only do something if bit positions,
4580 sizes, signedness and storage order are the same. */
4581 rinner
4582 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4583 &runsignedp, &rreversep, &rvolatilep);
4585 if (rinner == rhs
4586 || maybe_ne (lbitpos, rbitpos)
4587 || maybe_ne (lbitsize, rbitsize)
4588 || lunsignedp != runsignedp
4589 || lreversep != rreversep
4590 || offset != 0
4591 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4592 || rvolatilep)
4593 return 0;
4596 /* Honor the C++ memory model and mimic what RTL expansion does. */
4597 poly_uint64 bitstart = 0;
4598 poly_uint64 bitend = 0;
4599 if (TREE_CODE (lhs) == COMPONENT_REF)
4601 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4602 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4603 return 0;
4606 /* See if we can find a mode to refer to this field. We should be able to,
4607 but fail if we can't. */
4608 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4609 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4610 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4611 TYPE_ALIGN (TREE_TYPE (rinner))),
4612 BITS_PER_WORD, false, &nmode))
4613 return 0;
4615 /* Set signed and unsigned types of the precision of this mode for the
4616 shifts below. */
4617 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4619 /* Compute the bit position and size for the new reference and our offset
4620 within it. If the new reference is the same size as the original, we
4621 won't optimize anything, so return zero. */
4622 nbitsize = GET_MODE_BITSIZE (nmode);
4623 nbitpos = lbitpos & ~ (nbitsize - 1);
4624 lbitpos -= nbitpos;
4625 if (nbitsize == lbitsize)
4626 return 0;
4628 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4629 lbitpos = nbitsize - lbitsize - lbitpos;
4631 /* Make the mask to be used against the extracted field. */
4632 mask = build_int_cst_type (unsigned_type, -1);
4633 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4634 mask = const_binop (RSHIFT_EXPR, mask,
4635 size_int (nbitsize - lbitsize - lbitpos));
4637 if (! const_p)
4639 if (nbitpos < 0)
4640 return 0;
4642 /* If not comparing with constant, just rework the comparison
4643 and return. */
4644 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4645 nbitsize, nbitpos, 1, lreversep);
4646 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4647 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4648 nbitsize, nbitpos, 1, rreversep);
4649 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4650 return fold_build2_loc (loc, code, compare_type, t1, t2);
4653 /* Otherwise, we are handling the constant case. See if the constant is too
4654 big for the field. Warn and return a tree for 0 (false) if so. We do
4655 this not only for its own sake, but to avoid having to test for this
4656 error case below. If we didn't, we might generate wrong code.
4658 For unsigned fields, the constant shifted right by the field length should
4659 be all zero. For signed fields, the high-order bits should agree with
4660 the sign bit. */
4662 if (lunsignedp)
4664 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4666 warning (0, "comparison is always %d due to width of bit-field",
4667 code == NE_EXPR);
4668 return constant_boolean_node (code == NE_EXPR, compare_type);
4671 else
4673 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4674 if (tem != 0 && tem != -1)
4676 warning (0, "comparison is always %d due to width of bit-field",
4677 code == NE_EXPR);
4678 return constant_boolean_node (code == NE_EXPR, compare_type);
4682 if (nbitpos < 0)
4683 return 0;
4685 /* Single-bit compares should always be against zero. */
4686 if (lbitsize == 1 && ! integer_zerop (rhs))
4688 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4689 rhs = build_int_cst (type, 0);
4692 /* Make a new bitfield reference, shift the constant over the
4693 appropriate number of bits and mask it with the computed mask
4694 (in case this was a signed field). If we changed it, make a new one. */
4695 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4696 nbitsize, nbitpos, 1, lreversep);
4698 rhs = const_binop (BIT_AND_EXPR,
4699 const_binop (LSHIFT_EXPR,
4700 fold_convert_loc (loc, unsigned_type, rhs),
4701 size_int (lbitpos)),
4702 mask);
4704 lhs = build2_loc (loc, code, compare_type,
4705 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4706 return lhs;
4709 /* Subroutine for fold_truth_andor_1: decode a field reference.
4711 If EXP is a comparison reference, we return the innermost reference.
4713 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4714 set to the starting bit number.
4716 If the innermost field can be completely contained in a mode-sized
4717 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4719 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4720 otherwise it is not changed.
4722 *PUNSIGNEDP is set to the signedness of the field.
4724 *PREVERSEP is set to the storage order of the field.
4726 *PMASK is set to the mask used. This is either contained in a
4727 BIT_AND_EXPR or derived from the width of the field.
4729 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4731 Return 0 if this is not a component reference or is one that we can't
4732 do anything with. */
4734 static tree
4735 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4736 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4737 int *punsignedp, int *preversep, int *pvolatilep,
4738 tree *pmask, tree *pand_mask)
4740 tree exp = *exp_;
4741 tree outer_type = 0;
4742 tree and_mask = 0;
4743 tree mask, inner, offset;
4744 tree unsigned_type;
4745 unsigned int precision;
4747 /* All the optimizations using this function assume integer fields.
4748 There are problems with FP fields since the type_for_size call
4749 below can fail for, e.g., XFmode. */
4750 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4751 return NULL_TREE;
4753 /* We are interested in the bare arrangement of bits, so strip everything
4754 that doesn't affect the machine mode. However, record the type of the
4755 outermost expression if it may matter below. */
4756 if (CONVERT_EXPR_P (exp)
4757 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4758 outer_type = TREE_TYPE (exp);
4759 STRIP_NOPS (exp);
4761 if (TREE_CODE (exp) == BIT_AND_EXPR)
4763 and_mask = TREE_OPERAND (exp, 1);
4764 exp = TREE_OPERAND (exp, 0);
4765 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4766 if (TREE_CODE (and_mask) != INTEGER_CST)
4767 return NULL_TREE;
4770 poly_int64 poly_bitsize, poly_bitpos;
4771 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4772 pmode, punsignedp, preversep, pvolatilep);
4773 if ((inner == exp && and_mask == 0)
4774 || !poly_bitsize.is_constant (pbitsize)
4775 || !poly_bitpos.is_constant (pbitpos)
4776 || *pbitsize < 0
4777 || offset != 0
4778 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4779 /* Reject out-of-bound accesses (PR79731). */
4780 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4781 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4782 *pbitpos + *pbitsize) < 0))
4783 return NULL_TREE;
4785 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4786 if (unsigned_type == NULL_TREE)
4787 return NULL_TREE;
4789 *exp_ = exp;
4791 /* If the number of bits in the reference is the same as the bitsize of
4792 the outer type, then the outer type gives the signedness. Otherwise
4793 (in case of a small bitfield) the signedness is unchanged. */
4794 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4795 *punsignedp = TYPE_UNSIGNED (outer_type);
4797 /* Compute the mask to access the bitfield. */
4798 precision = TYPE_PRECISION (unsigned_type);
4800 mask = build_int_cst_type (unsigned_type, -1);
4802 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4803 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4805 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4806 if (and_mask != 0)
4807 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4808 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4810 *pmask = mask;
4811 *pand_mask = and_mask;
4812 return inner;
4815 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4816 bit positions and MASK is SIGNED. */
4818 static bool
4819 all_ones_mask_p (const_tree mask, unsigned int size)
4821 tree type = TREE_TYPE (mask);
4822 unsigned int precision = TYPE_PRECISION (type);
4824 /* If this function returns true when the type of the mask is
4825 UNSIGNED, then there will be errors. In particular see
4826 gcc.c-torture/execute/990326-1.c. There does not appear to be
4827 any documentation paper trail as to why this is so. But the pre
4828 wide-int worked with that restriction and it has been preserved
4829 here. */
4830 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4831 return false;
4833 return wi::mask (size, false, precision) == wi::to_wide (mask);
4836 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4837 represents the sign bit of EXP's type. If EXP represents a sign
4838 or zero extension, also test VAL against the unextended type.
4839 The return value is the (sub)expression whose sign bit is VAL,
4840 or NULL_TREE otherwise. */
4842 tree
4843 sign_bit_p (tree exp, const_tree val)
4845 int width;
4846 tree t;
4848 /* Tree EXP must have an integral type. */
4849 t = TREE_TYPE (exp);
4850 if (! INTEGRAL_TYPE_P (t))
4851 return NULL_TREE;
4853 /* Tree VAL must be an integer constant. */
4854 if (TREE_CODE (val) != INTEGER_CST
4855 || TREE_OVERFLOW (val))
4856 return NULL_TREE;
4858 width = TYPE_PRECISION (t);
4859 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4860 return exp;
4862 /* Handle extension from a narrower type. */
4863 if (TREE_CODE (exp) == NOP_EXPR
4864 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4865 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4867 return NULL_TREE;
4870 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4871 to be evaluated unconditionally. */
4873 static bool
4874 simple_operand_p (const_tree exp)
4876 /* Strip any conversions that don't change the machine mode. */
4877 STRIP_NOPS (exp);
4879 return (CONSTANT_CLASS_P (exp)
4880 || TREE_CODE (exp) == SSA_NAME
4881 || (DECL_P (exp)
4882 && ! TREE_ADDRESSABLE (exp)
4883 && ! TREE_THIS_VOLATILE (exp)
4884 && ! DECL_NONLOCAL (exp)
4885 /* Don't regard global variables as simple. They may be
4886 allocated in ways unknown to the compiler (shared memory,
4887 #pragma weak, etc). */
4888 && ! TREE_PUBLIC (exp)
4889 && ! DECL_EXTERNAL (exp)
4890 /* Weakrefs are not safe to be read, since they can be NULL.
4891 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4892 have DECL_WEAK flag set. */
4893 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4894 /* Loading a static variable is unduly expensive, but global
4895 registers aren't expensive. */
4896 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4899 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4900 to be evaluated unconditionally.
4901 I addition to simple_operand_p, we assume that comparisons, conversions,
4902 and logic-not operations are simple, if their operands are simple, too. */
4904 static bool
4905 simple_operand_p_2 (tree exp)
4907 enum tree_code code;
4909 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
4910 return false;
4912 while (CONVERT_EXPR_P (exp))
4913 exp = TREE_OPERAND (exp, 0);
4915 code = TREE_CODE (exp);
4917 if (TREE_CODE_CLASS (code) == tcc_comparison)
4918 return (simple_operand_p (TREE_OPERAND (exp, 0))
4919 && simple_operand_p (TREE_OPERAND (exp, 1)));
4921 if (code == TRUTH_NOT_EXPR)
4922 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4924 return simple_operand_p (exp);
4928 /* The following functions are subroutines to fold_range_test and allow it to
4929 try to change a logical combination of comparisons into a range test.
4931 For example, both
4932 X == 2 || X == 3 || X == 4 || X == 5
4934 X >= 2 && X <= 5
4935 are converted to
4936 (unsigned) (X - 2) <= 3
4938 We describe each set of comparisons as being either inside or outside
4939 a range, using a variable named like IN_P, and then describe the
4940 range with a lower and upper bound. If one of the bounds is omitted,
4941 it represents either the highest or lowest value of the type.
4943 In the comments below, we represent a range by two numbers in brackets
4944 preceded by a "+" to designate being inside that range, or a "-" to
4945 designate being outside that range, so the condition can be inverted by
4946 flipping the prefix. An omitted bound is represented by a "-". For
4947 example, "- [-, 10]" means being outside the range starting at the lowest
4948 possible value and ending at 10, in other words, being greater than 10.
4949 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4950 always false.
4952 We set up things so that the missing bounds are handled in a consistent
4953 manner so neither a missing bound nor "true" and "false" need to be
4954 handled using a special case. */
4956 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4957 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4958 and UPPER1_P are nonzero if the respective argument is an upper bound
4959 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4960 must be specified for a comparison. ARG1 will be converted to ARG0's
4961 type if both are specified. */
4963 static tree
4964 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4965 tree arg1, int upper1_p)
4967 tree tem;
4968 int result;
4969 int sgn0, sgn1;
4971 /* If neither arg represents infinity, do the normal operation.
4972 Else, if not a comparison, return infinity. Else handle the special
4973 comparison rules. Note that most of the cases below won't occur, but
4974 are handled for consistency. */
4976 if (arg0 != 0 && arg1 != 0)
4978 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4979 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4980 STRIP_NOPS (tem);
4981 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4984 if (TREE_CODE_CLASS (code) != tcc_comparison)
4985 return 0;
4987 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4988 for neither. In real maths, we cannot assume open ended ranges are
4989 the same. But, this is computer arithmetic, where numbers are finite.
4990 We can therefore make the transformation of any unbounded range with
4991 the value Z, Z being greater than any representable number. This permits
4992 us to treat unbounded ranges as equal. */
4993 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4994 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4995 switch (code)
4997 case EQ_EXPR:
4998 result = sgn0 == sgn1;
4999 break;
5000 case NE_EXPR:
5001 result = sgn0 != sgn1;
5002 break;
5003 case LT_EXPR:
5004 result = sgn0 < sgn1;
5005 break;
5006 case LE_EXPR:
5007 result = sgn0 <= sgn1;
5008 break;
5009 case GT_EXPR:
5010 result = sgn0 > sgn1;
5011 break;
5012 case GE_EXPR:
5013 result = sgn0 >= sgn1;
5014 break;
5015 default:
5016 gcc_unreachable ();
5019 return constant_boolean_node (result, type);
5022 /* Helper routine for make_range. Perform one step for it, return
5023 new expression if the loop should continue or NULL_TREE if it should
5024 stop. */
5026 tree
5027 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
5028 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
5029 bool *strict_overflow_p)
5031 tree arg0_type = TREE_TYPE (arg0);
5032 tree n_low, n_high, low = *p_low, high = *p_high;
5033 int in_p = *p_in_p, n_in_p;
5035 switch (code)
5037 case TRUTH_NOT_EXPR:
5038 /* We can only do something if the range is testing for zero. */
5039 if (low == NULL_TREE || high == NULL_TREE
5040 || ! integer_zerop (low) || ! integer_zerop (high))
5041 return NULL_TREE;
5042 *p_in_p = ! in_p;
5043 return arg0;
5045 case EQ_EXPR: case NE_EXPR:
5046 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
5047 /* We can only do something if the range is testing for zero
5048 and if the second operand is an integer constant. Note that
5049 saying something is "in" the range we make is done by
5050 complementing IN_P since it will set in the initial case of
5051 being not equal to zero; "out" is leaving it alone. */
5052 if (low == NULL_TREE || high == NULL_TREE
5053 || ! integer_zerop (low) || ! integer_zerop (high)
5054 || TREE_CODE (arg1) != INTEGER_CST)
5055 return NULL_TREE;
5057 switch (code)
5059 case NE_EXPR: /* - [c, c] */
5060 low = high = arg1;
5061 break;
5062 case EQ_EXPR: /* + [c, c] */
5063 in_p = ! in_p, low = high = arg1;
5064 break;
5065 case GT_EXPR: /* - [-, c] */
5066 low = 0, high = arg1;
5067 break;
5068 case GE_EXPR: /* + [c, -] */
5069 in_p = ! in_p, low = arg1, high = 0;
5070 break;
5071 case LT_EXPR: /* - [c, -] */
5072 low = arg1, high = 0;
5073 break;
5074 case LE_EXPR: /* + [-, c] */
5075 in_p = ! in_p, low = 0, high = arg1;
5076 break;
5077 default:
5078 gcc_unreachable ();
5081 /* If this is an unsigned comparison, we also know that EXP is
5082 greater than or equal to zero. We base the range tests we make
5083 on that fact, so we record it here so we can parse existing
5084 range tests. We test arg0_type since often the return type
5085 of, e.g. EQ_EXPR, is boolean. */
5086 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5088 if (! merge_ranges (&n_in_p, &n_low, &n_high,
5089 in_p, low, high, 1,
5090 build_int_cst (arg0_type, 0),
5091 NULL_TREE))
5092 return NULL_TREE;
5094 in_p = n_in_p, low = n_low, high = n_high;
5096 /* If the high bound is missing, but we have a nonzero low
5097 bound, reverse the range so it goes from zero to the low bound
5098 minus 1. */
5099 if (high == 0 && low && ! integer_zerop (low))
5101 in_p = ! in_p;
5102 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5103 build_int_cst (TREE_TYPE (low), 1), 0);
5104 low = build_int_cst (arg0_type, 0);
5108 *p_low = low;
5109 *p_high = high;
5110 *p_in_p = in_p;
5111 return arg0;
5113 case NEGATE_EXPR:
5114 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5115 low and high are non-NULL, then normalize will DTRT. */
5116 if (!TYPE_UNSIGNED (arg0_type)
5117 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5119 if (low == NULL_TREE)
5120 low = TYPE_MIN_VALUE (arg0_type);
5121 if (high == NULL_TREE)
5122 high = TYPE_MAX_VALUE (arg0_type);
5125 /* (-x) IN [a,b] -> x in [-b, -a] */
5126 n_low = range_binop (MINUS_EXPR, exp_type,
5127 build_int_cst (exp_type, 0),
5128 0, high, 1);
5129 n_high = range_binop (MINUS_EXPR, exp_type,
5130 build_int_cst (exp_type, 0),
5131 0, low, 0);
5132 if (n_high != 0 && TREE_OVERFLOW (n_high))
5133 return NULL_TREE;
5134 goto normalize;
5136 case BIT_NOT_EXPR:
5137 /* ~ X -> -X - 1 */
5138 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5139 build_int_cst (exp_type, 1));
5141 case PLUS_EXPR:
5142 case MINUS_EXPR:
5143 if (TREE_CODE (arg1) != INTEGER_CST)
5144 return NULL_TREE;
5146 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5147 move a constant to the other side. */
5148 if (!TYPE_UNSIGNED (arg0_type)
5149 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5150 return NULL_TREE;
5152 /* If EXP is signed, any overflow in the computation is undefined,
5153 so we don't worry about it so long as our computations on
5154 the bounds don't overflow. For unsigned, overflow is defined
5155 and this is exactly the right thing. */
5156 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5157 arg0_type, low, 0, arg1, 0);
5158 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5159 arg0_type, high, 1, arg1, 0);
5160 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5161 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5162 return NULL_TREE;
5164 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5165 *strict_overflow_p = true;
5167 normalize:
5168 /* Check for an unsigned range which has wrapped around the maximum
5169 value thus making n_high < n_low, and normalize it. */
5170 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5172 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5173 build_int_cst (TREE_TYPE (n_high), 1), 0);
5174 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5175 build_int_cst (TREE_TYPE (n_low), 1), 0);
5177 /* If the range is of the form +/- [ x+1, x ], we won't
5178 be able to normalize it. But then, it represents the
5179 whole range or the empty set, so make it
5180 +/- [ -, - ]. */
5181 if (tree_int_cst_equal (n_low, low)
5182 && tree_int_cst_equal (n_high, high))
5183 low = high = 0;
5184 else
5185 in_p = ! in_p;
5187 else
5188 low = n_low, high = n_high;
5190 *p_low = low;
5191 *p_high = high;
5192 *p_in_p = in_p;
5193 return arg0;
5195 CASE_CONVERT:
5196 case NON_LVALUE_EXPR:
5197 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5198 return NULL_TREE;
5200 if (! INTEGRAL_TYPE_P (arg0_type)
5201 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5202 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5203 return NULL_TREE;
5205 n_low = low, n_high = high;
5207 if (n_low != 0)
5208 n_low = fold_convert_loc (loc, arg0_type, n_low);
5210 if (n_high != 0)
5211 n_high = fold_convert_loc (loc, arg0_type, n_high);
5213 /* If we're converting arg0 from an unsigned type, to exp,
5214 a signed type, we will be doing the comparison as unsigned.
5215 The tests above have already verified that LOW and HIGH
5216 are both positive.
5218 So we have to ensure that we will handle large unsigned
5219 values the same way that the current signed bounds treat
5220 negative values. */
5222 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5224 tree high_positive;
5225 tree equiv_type;
5226 /* For fixed-point modes, we need to pass the saturating flag
5227 as the 2nd parameter. */
5228 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5229 equiv_type
5230 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5231 TYPE_SATURATING (arg0_type));
5232 else
5233 equiv_type
5234 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5236 /* A range without an upper bound is, naturally, unbounded.
5237 Since convert would have cropped a very large value, use
5238 the max value for the destination type. */
5239 high_positive
5240 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5241 : TYPE_MAX_VALUE (arg0_type);
5243 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5244 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5245 fold_convert_loc (loc, arg0_type,
5246 high_positive),
5247 build_int_cst (arg0_type, 1));
5249 /* If the low bound is specified, "and" the range with the
5250 range for which the original unsigned value will be
5251 positive. */
5252 if (low != 0)
5254 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5255 1, fold_convert_loc (loc, arg0_type,
5256 integer_zero_node),
5257 high_positive))
5258 return NULL_TREE;
5260 in_p = (n_in_p == in_p);
5262 else
5264 /* Otherwise, "or" the range with the range of the input
5265 that will be interpreted as negative. */
5266 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5267 1, fold_convert_loc (loc, arg0_type,
5268 integer_zero_node),
5269 high_positive))
5270 return NULL_TREE;
5272 in_p = (in_p != n_in_p);
5276 *p_low = n_low;
5277 *p_high = n_high;
5278 *p_in_p = in_p;
5279 return arg0;
5281 default:
5282 return NULL_TREE;
5286 /* Given EXP, a logical expression, set the range it is testing into
5287 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5288 actually being tested. *PLOW and *PHIGH will be made of the same
5289 type as the returned expression. If EXP is not a comparison, we
5290 will most likely not be returning a useful value and range. Set
5291 *STRICT_OVERFLOW_P to true if the return value is only valid
5292 because signed overflow is undefined; otherwise, do not change
5293 *STRICT_OVERFLOW_P. */
5295 tree
5296 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5297 bool *strict_overflow_p)
5299 enum tree_code code;
5300 tree arg0, arg1 = NULL_TREE;
5301 tree exp_type, nexp;
5302 int in_p;
5303 tree low, high;
5304 location_t loc = EXPR_LOCATION (exp);
5306 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5307 and see if we can refine the range. Some of the cases below may not
5308 happen, but it doesn't seem worth worrying about this. We "continue"
5309 the outer loop when we've changed something; otherwise we "break"
5310 the switch, which will "break" the while. */
5312 in_p = 0;
5313 low = high = build_int_cst (TREE_TYPE (exp), 0);
5315 while (1)
5317 code = TREE_CODE (exp);
5318 exp_type = TREE_TYPE (exp);
5319 arg0 = NULL_TREE;
5321 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5323 if (TREE_OPERAND_LENGTH (exp) > 0)
5324 arg0 = TREE_OPERAND (exp, 0);
5325 if (TREE_CODE_CLASS (code) == tcc_binary
5326 || TREE_CODE_CLASS (code) == tcc_comparison
5327 || (TREE_CODE_CLASS (code) == tcc_expression
5328 && TREE_OPERAND_LENGTH (exp) > 1))
5329 arg1 = TREE_OPERAND (exp, 1);
5331 if (arg0 == NULL_TREE)
5332 break;
5334 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5335 &high, &in_p, strict_overflow_p);
5336 if (nexp == NULL_TREE)
5337 break;
5338 exp = nexp;
5341 /* If EXP is a constant, we can evaluate whether this is true or false. */
5342 if (TREE_CODE (exp) == INTEGER_CST)
5344 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5345 exp, 0, low, 0))
5346 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5347 exp, 1, high, 1)));
5348 low = high = 0;
5349 exp = 0;
5352 *pin_p = in_p, *plow = low, *phigh = high;
5353 return exp;
5356 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5357 a bitwise check i.e. when
5358 LOW == 0xXX...X00...0
5359 HIGH == 0xXX...X11...1
5360 Return corresponding mask in MASK and stem in VALUE. */
5362 static bool
5363 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5364 tree *value)
5366 if (TREE_CODE (low) != INTEGER_CST
5367 || TREE_CODE (high) != INTEGER_CST)
5368 return false;
5370 unsigned prec = TYPE_PRECISION (type);
5371 wide_int lo = wi::to_wide (low, prec);
5372 wide_int hi = wi::to_wide (high, prec);
5374 wide_int end_mask = lo ^ hi;
5375 if ((end_mask & (end_mask + 1)) != 0
5376 || (lo & end_mask) != 0)
5377 return false;
5379 wide_int stem_mask = ~end_mask;
5380 wide_int stem = lo & stem_mask;
5381 if (stem != (hi & stem_mask))
5382 return false;
5384 *mask = wide_int_to_tree (type, stem_mask);
5385 *value = wide_int_to_tree (type, stem);
5387 return true;
5390 /* Helper routine for build_range_check and match.pd. Return the type to
5391 perform the check or NULL if it shouldn't be optimized. */
5393 tree
5394 range_check_type (tree etype)
5396 /* First make sure that arithmetics in this type is valid, then make sure
5397 that it wraps around. */
5398 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5399 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5401 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5403 tree utype, minv, maxv;
5405 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5406 for the type in question, as we rely on this here. */
5407 utype = unsigned_type_for (etype);
5408 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5409 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5410 build_int_cst (TREE_TYPE (maxv), 1), 1);
5411 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5413 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5414 minv, 1, maxv, 1)))
5415 etype = utype;
5416 else
5417 return NULL_TREE;
5419 else if (POINTER_TYPE_P (etype) || TREE_CODE (etype) == OFFSET_TYPE)
5420 etype = unsigned_type_for (etype);
5421 return etype;
5424 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5425 type, TYPE, return an expression to test if EXP is in (or out of, depending
5426 on IN_P) the range. Return 0 if the test couldn't be created. */
5428 tree
5429 build_range_check (location_t loc, tree type, tree exp, int in_p,
5430 tree low, tree high)
5432 tree etype = TREE_TYPE (exp), mask, value;
5434 /* Disable this optimization for function pointer expressions
5435 on targets that require function pointer canonicalization. */
5436 if (targetm.have_canonicalize_funcptr_for_compare ()
5437 && POINTER_TYPE_P (etype)
5438 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5439 return NULL_TREE;
5441 if (! in_p)
5443 value = build_range_check (loc, type, exp, 1, low, high);
5444 if (value != 0)
5445 return invert_truthvalue_loc (loc, value);
5447 return 0;
5450 if (low == 0 && high == 0)
5451 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5453 if (low == 0)
5454 return fold_build2_loc (loc, LE_EXPR, type, exp,
5455 fold_convert_loc (loc, etype, high));
5457 if (high == 0)
5458 return fold_build2_loc (loc, GE_EXPR, type, exp,
5459 fold_convert_loc (loc, etype, low));
5461 if (operand_equal_p (low, high, 0))
5462 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5463 fold_convert_loc (loc, etype, low));
5465 if (TREE_CODE (exp) == BIT_AND_EXPR
5466 && maskable_range_p (low, high, etype, &mask, &value))
5467 return fold_build2_loc (loc, EQ_EXPR, type,
5468 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5469 exp, mask),
5470 value);
5472 if (integer_zerop (low))
5474 if (! TYPE_UNSIGNED (etype))
5476 etype = unsigned_type_for (etype);
5477 high = fold_convert_loc (loc, etype, high);
5478 exp = fold_convert_loc (loc, etype, exp);
5480 return build_range_check (loc, type, exp, 1, 0, high);
5483 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5484 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5486 int prec = TYPE_PRECISION (etype);
5488 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5490 if (TYPE_UNSIGNED (etype))
5492 tree signed_etype = signed_type_for (etype);
5493 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5494 etype
5495 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5496 else
5497 etype = signed_etype;
5498 exp = fold_convert_loc (loc, etype, exp);
5500 return fold_build2_loc (loc, GT_EXPR, type, exp,
5501 build_int_cst (etype, 0));
5505 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5506 This requires wrap-around arithmetics for the type of the expression. */
5507 etype = range_check_type (etype);
5508 if (etype == NULL_TREE)
5509 return NULL_TREE;
5511 high = fold_convert_loc (loc, etype, high);
5512 low = fold_convert_loc (loc, etype, low);
5513 exp = fold_convert_loc (loc, etype, exp);
5515 value = const_binop (MINUS_EXPR, high, low);
5517 if (value != 0 && !TREE_OVERFLOW (value))
5518 return build_range_check (loc, type,
5519 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5520 1, build_int_cst (etype, 0), value);
5522 return 0;
5525 /* Return the predecessor of VAL in its type, handling the infinite case. */
5527 static tree
5528 range_predecessor (tree val)
5530 tree type = TREE_TYPE (val);
5532 if (INTEGRAL_TYPE_P (type)
5533 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5534 return 0;
5535 else
5536 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5537 build_int_cst (TREE_TYPE (val), 1), 0);
5540 /* Return the successor of VAL in its type, handling the infinite case. */
5542 static tree
5543 range_successor (tree val)
5545 tree type = TREE_TYPE (val);
5547 if (INTEGRAL_TYPE_P (type)
5548 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5549 return 0;
5550 else
5551 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5552 build_int_cst (TREE_TYPE (val), 1), 0);
5555 /* Given two ranges, see if we can merge them into one. Return 1 if we
5556 can, 0 if we can't. Set the output range into the specified parameters. */
5558 bool
5559 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5560 tree high0, int in1_p, tree low1, tree high1)
5562 int no_overlap;
5563 int subset;
5564 int temp;
5565 tree tem;
5566 int in_p;
5567 tree low, high;
5568 int lowequal = ((low0 == 0 && low1 == 0)
5569 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5570 low0, 0, low1, 0)));
5571 int highequal = ((high0 == 0 && high1 == 0)
5572 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5573 high0, 1, high1, 1)));
5575 /* Make range 0 be the range that starts first, or ends last if they
5576 start at the same value. Swap them if it isn't. */
5577 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5578 low0, 0, low1, 0))
5579 || (lowequal
5580 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5581 high1, 1, high0, 1))))
5583 temp = in0_p, in0_p = in1_p, in1_p = temp;
5584 tem = low0, low0 = low1, low1 = tem;
5585 tem = high0, high0 = high1, high1 = tem;
5588 /* If the second range is != high1 where high1 is the type maximum of
5589 the type, try first merging with < high1 range. */
5590 if (low1
5591 && high1
5592 && TREE_CODE (low1) == INTEGER_CST
5593 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5594 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5595 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5596 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5597 && operand_equal_p (low1, high1, 0))
5599 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5600 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5601 !in1_p, NULL_TREE, range_predecessor (low1)))
5602 return true;
5603 /* Similarly for the second range != low1 where low1 is the type minimum
5604 of the type, try first merging with > low1 range. */
5605 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5606 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5607 !in1_p, range_successor (low1), NULL_TREE))
5608 return true;
5611 /* Now flag two cases, whether the ranges are disjoint or whether the
5612 second range is totally subsumed in the first. Note that the tests
5613 below are simplified by the ones above. */
5614 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5615 high0, 1, low1, 0));
5616 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5617 high1, 1, high0, 1));
5619 /* We now have four cases, depending on whether we are including or
5620 excluding the two ranges. */
5621 if (in0_p && in1_p)
5623 /* If they don't overlap, the result is false. If the second range
5624 is a subset it is the result. Otherwise, the range is from the start
5625 of the second to the end of the first. */
5626 if (no_overlap)
5627 in_p = 0, low = high = 0;
5628 else if (subset)
5629 in_p = 1, low = low1, high = high1;
5630 else
5631 in_p = 1, low = low1, high = high0;
5634 else if (in0_p && ! in1_p)
5636 /* If they don't overlap, the result is the first range. If they are
5637 equal, the result is false. If the second range is a subset of the
5638 first, and the ranges begin at the same place, we go from just after
5639 the end of the second range to the end of the first. If the second
5640 range is not a subset of the first, or if it is a subset and both
5641 ranges end at the same place, the range starts at the start of the
5642 first range and ends just before the second range.
5643 Otherwise, we can't describe this as a single range. */
5644 if (no_overlap)
5645 in_p = 1, low = low0, high = high0;
5646 else if (lowequal && highequal)
5647 in_p = 0, low = high = 0;
5648 else if (subset && lowequal)
5650 low = range_successor (high1);
5651 high = high0;
5652 in_p = 1;
5653 if (low == 0)
5655 /* We are in the weird situation where high0 > high1 but
5656 high1 has no successor. Punt. */
5657 return 0;
5660 else if (! subset || highequal)
5662 low = low0;
5663 high = range_predecessor (low1);
5664 in_p = 1;
5665 if (high == 0)
5667 /* low0 < low1 but low1 has no predecessor. Punt. */
5668 return 0;
5671 else
5672 return 0;
5675 else if (! in0_p && in1_p)
5677 /* If they don't overlap, the result is the second range. If the second
5678 is a subset of the first, the result is false. Otherwise,
5679 the range starts just after the first range and ends at the
5680 end of the second. */
5681 if (no_overlap)
5682 in_p = 1, low = low1, high = high1;
5683 else if (subset || highequal)
5684 in_p = 0, low = high = 0;
5685 else
5687 low = range_successor (high0);
5688 high = high1;
5689 in_p = 1;
5690 if (low == 0)
5692 /* high1 > high0 but high0 has no successor. Punt. */
5693 return 0;
5698 else
5700 /* The case where we are excluding both ranges. Here the complex case
5701 is if they don't overlap. In that case, the only time we have a
5702 range is if they are adjacent. If the second is a subset of the
5703 first, the result is the first. Otherwise, the range to exclude
5704 starts at the beginning of the first range and ends at the end of the
5705 second. */
5706 if (no_overlap)
5708 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5709 range_successor (high0),
5710 1, low1, 0)))
5711 in_p = 0, low = low0, high = high1;
5712 else
5714 /* Canonicalize - [min, x] into - [-, x]. */
5715 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5716 switch (TREE_CODE (TREE_TYPE (low0)))
5718 case ENUMERAL_TYPE:
5719 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5720 GET_MODE_BITSIZE
5721 (TYPE_MODE (TREE_TYPE (low0)))))
5722 break;
5723 /* FALLTHROUGH */
5724 case INTEGER_TYPE:
5725 if (tree_int_cst_equal (low0,
5726 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5727 low0 = 0;
5728 break;
5729 case POINTER_TYPE:
5730 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5731 && integer_zerop (low0))
5732 low0 = 0;
5733 break;
5734 default:
5735 break;
5738 /* Canonicalize - [x, max] into - [x, -]. */
5739 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5740 switch (TREE_CODE (TREE_TYPE (high1)))
5742 case ENUMERAL_TYPE:
5743 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5744 GET_MODE_BITSIZE
5745 (TYPE_MODE (TREE_TYPE (high1)))))
5746 break;
5747 /* FALLTHROUGH */
5748 case INTEGER_TYPE:
5749 if (tree_int_cst_equal (high1,
5750 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5751 high1 = 0;
5752 break;
5753 case POINTER_TYPE:
5754 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5755 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5756 high1, 1,
5757 build_int_cst (TREE_TYPE (high1), 1),
5758 1)))
5759 high1 = 0;
5760 break;
5761 default:
5762 break;
5765 /* The ranges might be also adjacent between the maximum and
5766 minimum values of the given type. For
5767 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5768 return + [x + 1, y - 1]. */
5769 if (low0 == 0 && high1 == 0)
5771 low = range_successor (high0);
5772 high = range_predecessor (low1);
5773 if (low == 0 || high == 0)
5774 return 0;
5776 in_p = 1;
5778 else
5779 return 0;
5782 else if (subset)
5783 in_p = 0, low = low0, high = high0;
5784 else
5785 in_p = 0, low = low0, high = high1;
5788 *pin_p = in_p, *plow = low, *phigh = high;
5789 return 1;
5793 /* Subroutine of fold, looking inside expressions of the form
5794 A op B ? A : C, where (ARG00, COMP_CODE, ARG01), ARG1 and ARG2
5795 are the three operands of the COND_EXPR. This function is
5796 being used also to optimize A op B ? C : A, by reversing the
5797 comparison first.
5799 Return a folded expression whose code is not a COND_EXPR
5800 anymore, or NULL_TREE if no folding opportunity is found. */
5802 static tree
5803 fold_cond_expr_with_comparison (location_t loc, tree type,
5804 enum tree_code comp_code,
5805 tree arg00, tree arg01, tree arg1, tree arg2)
5807 tree arg1_type = TREE_TYPE (arg1);
5808 tree tem;
5810 STRIP_NOPS (arg1);
5811 STRIP_NOPS (arg2);
5813 /* If we have A op 0 ? A : -A, consider applying the following
5814 transformations:
5816 A == 0? A : -A same as -A
5817 A != 0? A : -A same as A
5818 A >= 0? A : -A same as abs (A)
5819 A > 0? A : -A same as abs (A)
5820 A <= 0? A : -A same as -abs (A)
5821 A < 0? A : -A same as -abs (A)
5823 None of these transformations work for modes with signed
5824 zeros. If A is +/-0, the first two transformations will
5825 change the sign of the result (from +0 to -0, or vice
5826 versa). The last four will fix the sign of the result,
5827 even though the original expressions could be positive or
5828 negative, depending on the sign of A.
5830 Note that all these transformations are correct if A is
5831 NaN, since the two alternatives (A and -A) are also NaNs. */
5832 if (!HONOR_SIGNED_ZEROS (type)
5833 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5834 ? real_zerop (arg01)
5835 : integer_zerop (arg01))
5836 && ((TREE_CODE (arg2) == NEGATE_EXPR
5837 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5838 /* In the case that A is of the form X-Y, '-A' (arg2) may
5839 have already been folded to Y-X, check for that. */
5840 || (TREE_CODE (arg1) == MINUS_EXPR
5841 && TREE_CODE (arg2) == MINUS_EXPR
5842 && operand_equal_p (TREE_OPERAND (arg1, 0),
5843 TREE_OPERAND (arg2, 1), 0)
5844 && operand_equal_p (TREE_OPERAND (arg1, 1),
5845 TREE_OPERAND (arg2, 0), 0))))
5846 switch (comp_code)
5848 case EQ_EXPR:
5849 case UNEQ_EXPR:
5850 tem = fold_convert_loc (loc, arg1_type, arg1);
5851 return fold_convert_loc (loc, type, negate_expr (tem));
5852 case NE_EXPR:
5853 case LTGT_EXPR:
5854 return fold_convert_loc (loc, type, arg1);
5855 case UNGE_EXPR:
5856 case UNGT_EXPR:
5857 if (flag_trapping_math)
5858 break;
5859 /* Fall through. */
5860 case GE_EXPR:
5861 case GT_EXPR:
5862 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5863 break;
5864 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5865 return fold_convert_loc (loc, type, tem);
5866 case UNLE_EXPR:
5867 case UNLT_EXPR:
5868 if (flag_trapping_math)
5869 break;
5870 /* FALLTHRU */
5871 case LE_EXPR:
5872 case LT_EXPR:
5873 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5874 break;
5875 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5876 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
5878 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
5879 is not, invokes UB both in abs and in the negation of it.
5880 So, use ABSU_EXPR instead. */
5881 tree utype = unsigned_type_for (TREE_TYPE (arg1));
5882 tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
5883 tem = negate_expr (tem);
5884 return fold_convert_loc (loc, type, tem);
5886 else
5888 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5889 return negate_expr (fold_convert_loc (loc, type, tem));
5891 default:
5892 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5893 break;
5896 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5897 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5898 both transformations are correct when A is NaN: A != 0
5899 is then true, and A == 0 is false. */
5901 if (!HONOR_SIGNED_ZEROS (type)
5902 && integer_zerop (arg01) && integer_zerop (arg2))
5904 if (comp_code == NE_EXPR)
5905 return fold_convert_loc (loc, type, arg1);
5906 else if (comp_code == EQ_EXPR)
5907 return build_zero_cst (type);
5910 /* Try some transformations of A op B ? A : B.
5912 A == B? A : B same as B
5913 A != B? A : B same as A
5914 A >= B? A : B same as max (A, B)
5915 A > B? A : B same as max (B, A)
5916 A <= B? A : B same as min (A, B)
5917 A < B? A : B same as min (B, A)
5919 As above, these transformations don't work in the presence
5920 of signed zeros. For example, if A and B are zeros of
5921 opposite sign, the first two transformations will change
5922 the sign of the result. In the last four, the original
5923 expressions give different results for (A=+0, B=-0) and
5924 (A=-0, B=+0), but the transformed expressions do not.
5926 The first two transformations are correct if either A or B
5927 is a NaN. In the first transformation, the condition will
5928 be false, and B will indeed be chosen. In the case of the
5929 second transformation, the condition A != B will be true,
5930 and A will be chosen.
5932 The conversions to max() and min() are not correct if B is
5933 a number and A is not. The conditions in the original
5934 expressions will be false, so all four give B. The min()
5935 and max() versions would give a NaN instead. */
5936 if (!HONOR_SIGNED_ZEROS (type)
5937 && operand_equal_for_comparison_p (arg01, arg2)
5938 /* Avoid these transformations if the COND_EXPR may be used
5939 as an lvalue in the C++ front-end. PR c++/19199. */
5940 && (in_gimple_form
5941 || VECTOR_TYPE_P (type)
5942 || (! lang_GNU_CXX ()
5943 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5944 || ! maybe_lvalue_p (arg1)
5945 || ! maybe_lvalue_p (arg2)))
5947 tree comp_op0 = arg00;
5948 tree comp_op1 = arg01;
5949 tree comp_type = TREE_TYPE (comp_op0);
5951 switch (comp_code)
5953 case EQ_EXPR:
5954 return fold_convert_loc (loc, type, arg2);
5955 case NE_EXPR:
5956 return fold_convert_loc (loc, type, arg1);
5957 case LE_EXPR:
5958 case LT_EXPR:
5959 case UNLE_EXPR:
5960 case UNLT_EXPR:
5961 /* In C++ a ?: expression can be an lvalue, so put the
5962 operand which will be used if they are equal first
5963 so that we can convert this back to the
5964 corresponding COND_EXPR. */
5965 if (!HONOR_NANS (arg1))
5967 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5968 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5969 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5970 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5971 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5972 comp_op1, comp_op0);
5973 return fold_convert_loc (loc, type, tem);
5975 break;
5976 case GE_EXPR:
5977 case GT_EXPR:
5978 case UNGE_EXPR:
5979 case UNGT_EXPR:
5980 if (!HONOR_NANS (arg1))
5982 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5983 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5984 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5985 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5986 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5987 comp_op1, comp_op0);
5988 return fold_convert_loc (loc, type, tem);
5990 break;
5991 case UNEQ_EXPR:
5992 if (!HONOR_NANS (arg1))
5993 return fold_convert_loc (loc, type, arg2);
5994 break;
5995 case LTGT_EXPR:
5996 if (!HONOR_NANS (arg1))
5997 return fold_convert_loc (loc, type, arg1);
5998 break;
5999 default:
6000 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6001 break;
6005 return NULL_TREE;
6010 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
6011 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
6012 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
6013 false) >= 2)
6014 #endif
6016 /* EXP is some logical combination of boolean tests. See if we can
6017 merge it into some range test. Return the new tree if so. */
6019 static tree
6020 fold_range_test (location_t loc, enum tree_code code, tree type,
6021 tree op0, tree op1)
6023 int or_op = (code == TRUTH_ORIF_EXPR
6024 || code == TRUTH_OR_EXPR);
6025 int in0_p, in1_p, in_p;
6026 tree low0, low1, low, high0, high1, high;
6027 bool strict_overflow_p = false;
6028 tree tem, lhs, rhs;
6029 const char * const warnmsg = G_("assuming signed overflow does not occur "
6030 "when simplifying range test");
6032 if (!INTEGRAL_TYPE_P (type))
6033 return 0;
6035 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
6036 /* If op0 is known true or false and this is a short-circuiting
6037 operation we must not merge with op1 since that makes side-effects
6038 unconditional. So special-case this. */
6039 if (!lhs
6040 && ((code == TRUTH_ORIF_EXPR && in0_p)
6041 || (code == TRUTH_ANDIF_EXPR && !in0_p)))
6042 return op0;
6043 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
6045 /* If this is an OR operation, invert both sides; we will invert
6046 again at the end. */
6047 if (or_op)
6048 in0_p = ! in0_p, in1_p = ! in1_p;
6050 /* If both expressions are the same, if we can merge the ranges, and we
6051 can build the range test, return it or it inverted. If one of the
6052 ranges is always true or always false, consider it to be the same
6053 expression as the other. */
6054 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
6055 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
6056 in1_p, low1, high1)
6057 && (tem = (build_range_check (loc, type,
6058 lhs != 0 ? lhs
6059 : rhs != 0 ? rhs : integer_zero_node,
6060 in_p, low, high))) != 0)
6062 if (strict_overflow_p)
6063 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
6064 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6067 /* On machines where the branch cost is expensive, if this is a
6068 short-circuited branch and the underlying object on both sides
6069 is the same, make a non-short-circuit operation. */
6070 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6071 if (param_logical_op_non_short_circuit != -1)
6072 logical_op_non_short_circuit
6073 = param_logical_op_non_short_circuit;
6074 if (logical_op_non_short_circuit
6075 && !sanitize_coverage_p ()
6076 && lhs != 0 && rhs != 0
6077 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6078 && operand_equal_p (lhs, rhs, 0))
6080 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6081 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6082 which cases we can't do this. */
6083 if (simple_operand_p (lhs))
6084 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6085 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6086 type, op0, op1);
6088 else if (!lang_hooks.decls.global_bindings_p ()
6089 && !CONTAINS_PLACEHOLDER_P (lhs))
6091 tree common = save_expr (lhs);
6093 if ((lhs = build_range_check (loc, type, common,
6094 or_op ? ! in0_p : in0_p,
6095 low0, high0)) != 0
6096 && (rhs = build_range_check (loc, type, common,
6097 or_op ? ! in1_p : in1_p,
6098 low1, high1)) != 0)
6100 if (strict_overflow_p)
6101 fold_overflow_warning (warnmsg,
6102 WARN_STRICT_OVERFLOW_COMPARISON);
6103 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6104 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6105 type, lhs, rhs);
6110 return 0;
6113 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6114 bit value. Arrange things so the extra bits will be set to zero if and
6115 only if C is signed-extended to its full width. If MASK is nonzero,
6116 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6118 static tree
6119 unextend (tree c, int p, int unsignedp, tree mask)
6121 tree type = TREE_TYPE (c);
6122 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6123 tree temp;
6125 if (p == modesize || unsignedp)
6126 return c;
6128 /* We work by getting just the sign bit into the low-order bit, then
6129 into the high-order bit, then sign-extend. We then XOR that value
6130 with C. */
6131 temp = build_int_cst (TREE_TYPE (c),
6132 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6134 /* We must use a signed type in order to get an arithmetic right shift.
6135 However, we must also avoid introducing accidental overflows, so that
6136 a subsequent call to integer_zerop will work. Hence we must
6137 do the type conversion here. At this point, the constant is either
6138 zero or one, and the conversion to a signed type can never overflow.
6139 We could get an overflow if this conversion is done anywhere else. */
6140 if (TYPE_UNSIGNED (type))
6141 temp = fold_convert (signed_type_for (type), temp);
6143 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6144 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6145 if (mask != 0)
6146 temp = const_binop (BIT_AND_EXPR, temp,
6147 fold_convert (TREE_TYPE (c), mask));
6148 /* If necessary, convert the type back to match the type of C. */
6149 if (TYPE_UNSIGNED (type))
6150 temp = fold_convert (type, temp);
6152 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6155 /* For an expression that has the form
6156 (A && B) || ~B
6158 (A || B) && ~B,
6159 we can drop one of the inner expressions and simplify to
6160 A || ~B
6162 A && ~B
6163 LOC is the location of the resulting expression. OP is the inner
6164 logical operation; the left-hand side in the examples above, while CMPOP
6165 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6166 removing a condition that guards another, as in
6167 (A != NULL && A->...) || A == NULL
6168 which we must not transform. If RHS_ONLY is true, only eliminate the
6169 right-most operand of the inner logical operation. */
6171 static tree
6172 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6173 bool rhs_only)
6175 tree type = TREE_TYPE (cmpop);
6176 enum tree_code code = TREE_CODE (cmpop);
6177 enum tree_code truthop_code = TREE_CODE (op);
6178 tree lhs = TREE_OPERAND (op, 0);
6179 tree rhs = TREE_OPERAND (op, 1);
6180 tree orig_lhs = lhs, orig_rhs = rhs;
6181 enum tree_code rhs_code = TREE_CODE (rhs);
6182 enum tree_code lhs_code = TREE_CODE (lhs);
6183 enum tree_code inv_code;
6185 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6186 return NULL_TREE;
6188 if (TREE_CODE_CLASS (code) != tcc_comparison)
6189 return NULL_TREE;
6191 if (rhs_code == truthop_code)
6193 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6194 if (newrhs != NULL_TREE)
6196 rhs = newrhs;
6197 rhs_code = TREE_CODE (rhs);
6200 if (lhs_code == truthop_code && !rhs_only)
6202 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6203 if (newlhs != NULL_TREE)
6205 lhs = newlhs;
6206 lhs_code = TREE_CODE (lhs);
6210 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6211 if (inv_code == rhs_code
6212 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6213 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6214 return lhs;
6215 if (!rhs_only && inv_code == lhs_code
6216 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6217 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6218 return rhs;
6219 if (rhs != orig_rhs || lhs != orig_lhs)
6220 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6221 lhs, rhs);
6222 return NULL_TREE;
6225 /* Find ways of folding logical expressions of LHS and RHS:
6226 Try to merge two comparisons to the same innermost item.
6227 Look for range tests like "ch >= '0' && ch <= '9'".
6228 Look for combinations of simple terms on machines with expensive branches
6229 and evaluate the RHS unconditionally.
6231 For example, if we have p->a == 2 && p->b == 4 and we can make an
6232 object large enough to span both A and B, we can do this with a comparison
6233 against the object ANDed with the a mask.
6235 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6236 operations to do this with one comparison.
6238 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6239 function and the one above.
6241 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6242 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6244 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6245 two operands.
6247 We return the simplified tree or 0 if no optimization is possible. */
6249 static tree
6250 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6251 tree lhs, tree rhs)
6253 /* If this is the "or" of two comparisons, we can do something if
6254 the comparisons are NE_EXPR. If this is the "and", we can do something
6255 if the comparisons are EQ_EXPR. I.e.,
6256 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6258 WANTED_CODE is this operation code. For single bit fields, we can
6259 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6260 comparison for one-bit fields. */
6262 enum tree_code wanted_code;
6263 enum tree_code lcode, rcode;
6264 tree ll_arg, lr_arg, rl_arg, rr_arg;
6265 tree ll_inner, lr_inner, rl_inner, rr_inner;
6266 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6267 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6268 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6269 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6270 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6271 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6272 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6273 scalar_int_mode lnmode, rnmode;
6274 tree ll_mask, lr_mask, rl_mask, rr_mask;
6275 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6276 tree l_const, r_const;
6277 tree lntype, rntype, result;
6278 HOST_WIDE_INT first_bit, end_bit;
6279 int volatilep;
6281 /* Start by getting the comparison codes. Fail if anything is volatile.
6282 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6283 it were surrounded with a NE_EXPR. */
6285 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6286 return 0;
6288 lcode = TREE_CODE (lhs);
6289 rcode = TREE_CODE (rhs);
6291 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6293 lhs = build2 (NE_EXPR, truth_type, lhs,
6294 build_int_cst (TREE_TYPE (lhs), 0));
6295 lcode = NE_EXPR;
6298 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6300 rhs = build2 (NE_EXPR, truth_type, rhs,
6301 build_int_cst (TREE_TYPE (rhs), 0));
6302 rcode = NE_EXPR;
6305 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6306 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6307 return 0;
6309 ll_arg = TREE_OPERAND (lhs, 0);
6310 lr_arg = TREE_OPERAND (lhs, 1);
6311 rl_arg = TREE_OPERAND (rhs, 0);
6312 rr_arg = TREE_OPERAND (rhs, 1);
6314 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6315 if (simple_operand_p (ll_arg)
6316 && simple_operand_p (lr_arg))
6318 if (operand_equal_p (ll_arg, rl_arg, 0)
6319 && operand_equal_p (lr_arg, rr_arg, 0))
6321 result = combine_comparisons (loc, code, lcode, rcode,
6322 truth_type, ll_arg, lr_arg);
6323 if (result)
6324 return result;
6326 else if (operand_equal_p (ll_arg, rr_arg, 0)
6327 && operand_equal_p (lr_arg, rl_arg, 0))
6329 result = combine_comparisons (loc, code, lcode,
6330 swap_tree_comparison (rcode),
6331 truth_type, ll_arg, lr_arg);
6332 if (result)
6333 return result;
6337 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6338 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6340 /* If the RHS can be evaluated unconditionally and its operands are
6341 simple, it wins to evaluate the RHS unconditionally on machines
6342 with expensive branches. In this case, this isn't a comparison
6343 that can be merged. */
6345 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6346 false) >= 2
6347 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6348 && simple_operand_p (rl_arg)
6349 && simple_operand_p (rr_arg))
6351 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6352 if (code == TRUTH_OR_EXPR
6353 && lcode == NE_EXPR && integer_zerop (lr_arg)
6354 && rcode == NE_EXPR && integer_zerop (rr_arg)
6355 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6356 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6357 return build2_loc (loc, NE_EXPR, truth_type,
6358 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6359 ll_arg, rl_arg),
6360 build_int_cst (TREE_TYPE (ll_arg), 0));
6362 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6363 if (code == TRUTH_AND_EXPR
6364 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6365 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6366 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6367 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6368 return build2_loc (loc, EQ_EXPR, truth_type,
6369 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6370 ll_arg, rl_arg),
6371 build_int_cst (TREE_TYPE (ll_arg), 0));
6374 /* See if the comparisons can be merged. Then get all the parameters for
6375 each side. */
6377 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6378 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6379 return 0;
6381 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6382 volatilep = 0;
6383 ll_inner = decode_field_reference (loc, &ll_arg,
6384 &ll_bitsize, &ll_bitpos, &ll_mode,
6385 &ll_unsignedp, &ll_reversep, &volatilep,
6386 &ll_mask, &ll_and_mask);
6387 lr_inner = decode_field_reference (loc, &lr_arg,
6388 &lr_bitsize, &lr_bitpos, &lr_mode,
6389 &lr_unsignedp, &lr_reversep, &volatilep,
6390 &lr_mask, &lr_and_mask);
6391 rl_inner = decode_field_reference (loc, &rl_arg,
6392 &rl_bitsize, &rl_bitpos, &rl_mode,
6393 &rl_unsignedp, &rl_reversep, &volatilep,
6394 &rl_mask, &rl_and_mask);
6395 rr_inner = decode_field_reference (loc, &rr_arg,
6396 &rr_bitsize, &rr_bitpos, &rr_mode,
6397 &rr_unsignedp, &rr_reversep, &volatilep,
6398 &rr_mask, &rr_and_mask);
6400 /* It must be true that the inner operation on the lhs of each
6401 comparison must be the same if we are to be able to do anything.
6402 Then see if we have constants. If not, the same must be true for
6403 the rhs's. */
6404 if (volatilep
6405 || ll_reversep != rl_reversep
6406 || ll_inner == 0 || rl_inner == 0
6407 || ! operand_equal_p (ll_inner, rl_inner, 0))
6408 return 0;
6410 if (TREE_CODE (lr_arg) == INTEGER_CST
6411 && TREE_CODE (rr_arg) == INTEGER_CST)
6413 l_const = lr_arg, r_const = rr_arg;
6414 lr_reversep = ll_reversep;
6416 else if (lr_reversep != rr_reversep
6417 || lr_inner == 0 || rr_inner == 0
6418 || ! operand_equal_p (lr_inner, rr_inner, 0))
6419 return 0;
6420 else
6421 l_const = r_const = 0;
6423 /* If either comparison code is not correct for our logical operation,
6424 fail. However, we can convert a one-bit comparison against zero into
6425 the opposite comparison against that bit being set in the field. */
6427 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6428 if (lcode != wanted_code)
6430 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6432 /* Make the left operand unsigned, since we are only interested
6433 in the value of one bit. Otherwise we are doing the wrong
6434 thing below. */
6435 ll_unsignedp = 1;
6436 l_const = ll_mask;
6438 else
6439 return 0;
6442 /* This is analogous to the code for l_const above. */
6443 if (rcode != wanted_code)
6445 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6447 rl_unsignedp = 1;
6448 r_const = rl_mask;
6450 else
6451 return 0;
6454 /* See if we can find a mode that contains both fields being compared on
6455 the left. If we can't, fail. Otherwise, update all constants and masks
6456 to be relative to a field of that size. */
6457 first_bit = MIN (ll_bitpos, rl_bitpos);
6458 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6459 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6460 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6461 volatilep, &lnmode))
6462 return 0;
6464 lnbitsize = GET_MODE_BITSIZE (lnmode);
6465 lnbitpos = first_bit & ~ (lnbitsize - 1);
6466 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6467 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6469 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6471 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6472 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6475 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6476 size_int (xll_bitpos));
6477 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6478 size_int (xrl_bitpos));
6479 if (ll_mask == NULL_TREE || rl_mask == NULL_TREE)
6480 return 0;
6482 if (l_const)
6484 l_const = fold_convert_loc (loc, lntype, l_const);
6485 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6486 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6487 if (l_const == NULL_TREE)
6488 return 0;
6489 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6490 fold_build1_loc (loc, BIT_NOT_EXPR,
6491 lntype, ll_mask))))
6493 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6495 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6498 if (r_const)
6500 r_const = fold_convert_loc (loc, lntype, r_const);
6501 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6502 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6503 if (r_const == NULL_TREE)
6504 return 0;
6505 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6506 fold_build1_loc (loc, BIT_NOT_EXPR,
6507 lntype, rl_mask))))
6509 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6511 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6515 /* If the right sides are not constant, do the same for it. Also,
6516 disallow this optimization if a size, signedness or storage order
6517 mismatch occurs between the left and right sides. */
6518 if (l_const == 0)
6520 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6521 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6522 || ll_reversep != lr_reversep
6523 /* Make sure the two fields on the right
6524 correspond to the left without being swapped. */
6525 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6526 return 0;
6528 first_bit = MIN (lr_bitpos, rr_bitpos);
6529 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6530 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6531 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6532 volatilep, &rnmode))
6533 return 0;
6535 rnbitsize = GET_MODE_BITSIZE (rnmode);
6536 rnbitpos = first_bit & ~ (rnbitsize - 1);
6537 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6538 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6540 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6542 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6543 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6546 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6547 rntype, lr_mask),
6548 size_int (xlr_bitpos));
6549 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6550 rntype, rr_mask),
6551 size_int (xrr_bitpos));
6552 if (lr_mask == NULL_TREE || rr_mask == NULL_TREE)
6553 return 0;
6555 /* Make a mask that corresponds to both fields being compared.
6556 Do this for both items being compared. If the operands are the
6557 same size and the bits being compared are in the same position
6558 then we can do this by masking both and comparing the masked
6559 results. */
6560 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6561 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6562 if (lnbitsize == rnbitsize
6563 && xll_bitpos == xlr_bitpos
6564 && lnbitpos >= 0
6565 && rnbitpos >= 0)
6567 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6568 lntype, lnbitsize, lnbitpos,
6569 ll_unsignedp || rl_unsignedp, ll_reversep);
6570 if (! all_ones_mask_p (ll_mask, lnbitsize))
6571 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6573 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6574 rntype, rnbitsize, rnbitpos,
6575 lr_unsignedp || rr_unsignedp, lr_reversep);
6576 if (! all_ones_mask_p (lr_mask, rnbitsize))
6577 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6579 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6582 /* There is still another way we can do something: If both pairs of
6583 fields being compared are adjacent, we may be able to make a wider
6584 field containing them both.
6586 Note that we still must mask the lhs/rhs expressions. Furthermore,
6587 the mask must be shifted to account for the shift done by
6588 make_bit_field_ref. */
6589 if (((ll_bitsize + ll_bitpos == rl_bitpos
6590 && lr_bitsize + lr_bitpos == rr_bitpos)
6591 || (ll_bitpos == rl_bitpos + rl_bitsize
6592 && lr_bitpos == rr_bitpos + rr_bitsize))
6593 && ll_bitpos >= 0
6594 && rl_bitpos >= 0
6595 && lr_bitpos >= 0
6596 && rr_bitpos >= 0)
6598 tree type;
6600 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6601 ll_bitsize + rl_bitsize,
6602 MIN (ll_bitpos, rl_bitpos),
6603 ll_unsignedp, ll_reversep);
6604 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6605 lr_bitsize + rr_bitsize,
6606 MIN (lr_bitpos, rr_bitpos),
6607 lr_unsignedp, lr_reversep);
6609 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6610 size_int (MIN (xll_bitpos, xrl_bitpos)));
6611 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6612 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6613 if (ll_mask == NULL_TREE || lr_mask == NULL_TREE)
6614 return 0;
6616 /* Convert to the smaller type before masking out unwanted bits. */
6617 type = lntype;
6618 if (lntype != rntype)
6620 if (lnbitsize > rnbitsize)
6622 lhs = fold_convert_loc (loc, rntype, lhs);
6623 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6624 type = rntype;
6626 else if (lnbitsize < rnbitsize)
6628 rhs = fold_convert_loc (loc, lntype, rhs);
6629 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6630 type = lntype;
6634 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6635 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6637 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6638 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6640 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6643 return 0;
6646 /* Handle the case of comparisons with constants. If there is something in
6647 common between the masks, those bits of the constants must be the same.
6648 If not, the condition is always false. Test for this to avoid generating
6649 incorrect code below. */
6650 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6651 if (! integer_zerop (result)
6652 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6653 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6655 if (wanted_code == NE_EXPR)
6657 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6658 return constant_boolean_node (true, truth_type);
6660 else
6662 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6663 return constant_boolean_node (false, truth_type);
6667 if (lnbitpos < 0)
6668 return 0;
6670 /* Construct the expression we will return. First get the component
6671 reference we will make. Unless the mask is all ones the width of
6672 that field, perform the mask operation. Then compare with the
6673 merged constant. */
6674 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6675 lntype, lnbitsize, lnbitpos,
6676 ll_unsignedp || rl_unsignedp, ll_reversep);
6678 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6679 if (! all_ones_mask_p (ll_mask, lnbitsize))
6680 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6682 return build2_loc (loc, wanted_code, truth_type, result,
6683 const_binop (BIT_IOR_EXPR, l_const, r_const));
6686 /* T is an integer expression that is being multiplied, divided, or taken a
6687 modulus (CODE says which and what kind of divide or modulus) by a
6688 constant C. See if we can eliminate that operation by folding it with
6689 other operations already in T. WIDE_TYPE, if non-null, is a type that
6690 should be used for the computation if wider than our type.
6692 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6693 (X * 2) + (Y * 4). We must, however, be assured that either the original
6694 expression would not overflow or that overflow is undefined for the type
6695 in the language in question.
6697 If we return a non-null expression, it is an equivalent form of the
6698 original computation, but need not be in the original type.
6700 We set *STRICT_OVERFLOW_P to true if the return values depends on
6701 signed overflow being undefined. Otherwise we do not change
6702 *STRICT_OVERFLOW_P. */
6704 static tree
6705 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6706 bool *strict_overflow_p)
6708 /* To avoid exponential search depth, refuse to allow recursion past
6709 three levels. Beyond that (1) it's highly unlikely that we'll find
6710 something interesting and (2) we've probably processed it before
6711 when we built the inner expression. */
6713 static int depth;
6714 tree ret;
6716 if (depth > 3)
6717 return NULL;
6719 depth++;
6720 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6721 depth--;
6723 return ret;
6726 static tree
6727 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6728 bool *strict_overflow_p)
6730 tree type = TREE_TYPE (t);
6731 enum tree_code tcode = TREE_CODE (t);
6732 tree ctype = (wide_type != 0
6733 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6734 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6735 ? wide_type : type);
6736 tree t1, t2;
6737 int same_p = tcode == code;
6738 tree op0 = NULL_TREE, op1 = NULL_TREE;
6739 bool sub_strict_overflow_p;
6741 /* Don't deal with constants of zero here; they confuse the code below. */
6742 if (integer_zerop (c))
6743 return NULL_TREE;
6745 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6746 op0 = TREE_OPERAND (t, 0);
6748 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6749 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6751 /* Note that we need not handle conditional operations here since fold
6752 already handles those cases. So just do arithmetic here. */
6753 switch (tcode)
6755 case INTEGER_CST:
6756 /* For a constant, we can always simplify if we are a multiply
6757 or (for divide and modulus) if it is a multiple of our constant. */
6758 if (code == MULT_EXPR
6759 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6760 TYPE_SIGN (type)))
6762 tree tem = const_binop (code, fold_convert (ctype, t),
6763 fold_convert (ctype, c));
6764 /* If the multiplication overflowed, we lost information on it.
6765 See PR68142 and PR69845. */
6766 if (TREE_OVERFLOW (tem))
6767 return NULL_TREE;
6768 return tem;
6770 break;
6772 CASE_CONVERT: case NON_LVALUE_EXPR:
6773 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0)))
6774 break;
6775 /* If op0 is an expression ... */
6776 if ((COMPARISON_CLASS_P (op0)
6777 || UNARY_CLASS_P (op0)
6778 || BINARY_CLASS_P (op0)
6779 || VL_EXP_CLASS_P (op0)
6780 || EXPRESSION_CLASS_P (op0))
6781 /* ... and has wrapping overflow, and its type is smaller
6782 than ctype, then we cannot pass through as widening. */
6783 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6784 && (TYPE_PRECISION (ctype)
6785 > TYPE_PRECISION (TREE_TYPE (op0))))
6786 /* ... or this is a truncation (t is narrower than op0),
6787 then we cannot pass through this narrowing. */
6788 || (TYPE_PRECISION (type)
6789 < TYPE_PRECISION (TREE_TYPE (op0)))
6790 /* ... or signedness changes for division or modulus,
6791 then we cannot pass through this conversion. */
6792 || (code != MULT_EXPR
6793 && (TYPE_UNSIGNED (ctype)
6794 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6795 /* ... or has undefined overflow while the converted to
6796 type has not, we cannot do the operation in the inner type
6797 as that would introduce undefined overflow. */
6798 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6799 && !TYPE_OVERFLOW_UNDEFINED (type))))
6800 break;
6802 /* Pass the constant down and see if we can make a simplification. If
6803 we can, replace this expression with the inner simplification for
6804 possible later conversion to our or some other type. */
6805 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6806 && TREE_CODE (t2) == INTEGER_CST
6807 && !TREE_OVERFLOW (t2)
6808 && (t1 = extract_muldiv (op0, t2, code,
6809 code == MULT_EXPR ? ctype : NULL_TREE,
6810 strict_overflow_p)) != 0)
6811 return t1;
6812 break;
6814 case ABS_EXPR:
6815 /* If widening the type changes it from signed to unsigned, then we
6816 must avoid building ABS_EXPR itself as unsigned. */
6817 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6819 tree cstype = (*signed_type_for) (ctype);
6820 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6821 != 0)
6823 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6824 return fold_convert (ctype, t1);
6826 break;
6828 /* If the constant is negative, we cannot simplify this. */
6829 if (tree_int_cst_sgn (c) == -1)
6830 break;
6831 /* FALLTHROUGH */
6832 case NEGATE_EXPR:
6833 /* For division and modulus, type can't be unsigned, as e.g.
6834 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6835 For signed types, even with wrapping overflow, this is fine. */
6836 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6837 break;
6838 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6839 != 0)
6840 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6841 break;
6843 case MIN_EXPR: case MAX_EXPR:
6844 /* If widening the type changes the signedness, then we can't perform
6845 this optimization as that changes the result. */
6846 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6847 break;
6849 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6850 sub_strict_overflow_p = false;
6851 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6852 &sub_strict_overflow_p)) != 0
6853 && (t2 = extract_muldiv (op1, c, code, wide_type,
6854 &sub_strict_overflow_p)) != 0)
6856 if (tree_int_cst_sgn (c) < 0)
6857 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6858 if (sub_strict_overflow_p)
6859 *strict_overflow_p = true;
6860 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6861 fold_convert (ctype, t2));
6863 break;
6865 case LSHIFT_EXPR: case RSHIFT_EXPR:
6866 /* If the second operand is constant, this is a multiplication
6867 or floor division, by a power of two, so we can treat it that
6868 way unless the multiplier or divisor overflows. Signed
6869 left-shift overflow is implementation-defined rather than
6870 undefined in C90, so do not convert signed left shift into
6871 multiplication. */
6872 if (TREE_CODE (op1) == INTEGER_CST
6873 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6874 /* const_binop may not detect overflow correctly,
6875 so check for it explicitly here. */
6876 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6877 wi::to_wide (op1))
6878 && (t1 = fold_convert (ctype,
6879 const_binop (LSHIFT_EXPR, size_one_node,
6880 op1))) != 0
6881 && !TREE_OVERFLOW (t1))
6882 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6883 ? MULT_EXPR : FLOOR_DIV_EXPR,
6884 ctype,
6885 fold_convert (ctype, op0),
6886 t1),
6887 c, code, wide_type, strict_overflow_p);
6888 break;
6890 case PLUS_EXPR: case MINUS_EXPR:
6891 /* See if we can eliminate the operation on both sides. If we can, we
6892 can return a new PLUS or MINUS. If we can't, the only remaining
6893 cases where we can do anything are if the second operand is a
6894 constant. */
6895 sub_strict_overflow_p = false;
6896 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6897 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6898 if (t1 != 0 && t2 != 0
6899 && TYPE_OVERFLOW_WRAPS (ctype)
6900 && (code == MULT_EXPR
6901 /* If not multiplication, we can only do this if both operands
6902 are divisible by c. */
6903 || (multiple_of_p (ctype, op0, c)
6904 && multiple_of_p (ctype, op1, c))))
6906 if (sub_strict_overflow_p)
6907 *strict_overflow_p = true;
6908 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6909 fold_convert (ctype, t2));
6912 /* If this was a subtraction, negate OP1 and set it to be an addition.
6913 This simplifies the logic below. */
6914 if (tcode == MINUS_EXPR)
6916 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6917 /* If OP1 was not easily negatable, the constant may be OP0. */
6918 if (TREE_CODE (op0) == INTEGER_CST)
6920 std::swap (op0, op1);
6921 std::swap (t1, t2);
6925 if (TREE_CODE (op1) != INTEGER_CST)
6926 break;
6928 /* If either OP1 or C are negative, this optimization is not safe for
6929 some of the division and remainder types while for others we need
6930 to change the code. */
6931 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6933 if (code == CEIL_DIV_EXPR)
6934 code = FLOOR_DIV_EXPR;
6935 else if (code == FLOOR_DIV_EXPR)
6936 code = CEIL_DIV_EXPR;
6937 else if (code != MULT_EXPR
6938 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6939 break;
6942 /* If it's a multiply or a division/modulus operation of a multiple
6943 of our constant, do the operation and verify it doesn't overflow. */
6944 if (code == MULT_EXPR
6945 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6946 TYPE_SIGN (type)))
6948 op1 = const_binop (code, fold_convert (ctype, op1),
6949 fold_convert (ctype, c));
6950 /* We allow the constant to overflow with wrapping semantics. */
6951 if (op1 == 0
6952 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6953 break;
6955 else
6956 break;
6958 /* If we have an unsigned type, we cannot widen the operation since it
6959 will change the result if the original computation overflowed. */
6960 if (TYPE_UNSIGNED (ctype) && ctype != type)
6961 break;
6963 /* The last case is if we are a multiply. In that case, we can
6964 apply the distributive law to commute the multiply and addition
6965 if the multiplication of the constants doesn't overflow
6966 and overflow is defined. With undefined overflow
6967 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6968 But fold_plusminus_mult_expr would factor back any power-of-two
6969 value so do not distribute in the first place in this case. */
6970 if (code == MULT_EXPR
6971 && TYPE_OVERFLOW_WRAPS (ctype)
6972 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
6973 return fold_build2 (tcode, ctype,
6974 fold_build2 (code, ctype,
6975 fold_convert (ctype, op0),
6976 fold_convert (ctype, c)),
6977 op1);
6979 break;
6981 case MULT_EXPR:
6982 /* We have a special case here if we are doing something like
6983 (C * 8) % 4 since we know that's zero. */
6984 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6985 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6986 /* If the multiplication can overflow we cannot optimize this. */
6987 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6988 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6989 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6990 TYPE_SIGN (type)))
6992 *strict_overflow_p = true;
6993 return omit_one_operand (type, integer_zero_node, op0);
6996 /* ... fall through ... */
6998 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6999 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
7000 /* If we can extract our operation from the LHS, do so and return a
7001 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
7002 do something only if the second operand is a constant. */
7003 if (same_p
7004 && TYPE_OVERFLOW_WRAPS (ctype)
7005 && (t1 = extract_muldiv (op0, c, code, wide_type,
7006 strict_overflow_p)) != 0)
7007 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7008 fold_convert (ctype, op1));
7009 else if (tcode == MULT_EXPR && code == MULT_EXPR
7010 && TYPE_OVERFLOW_WRAPS (ctype)
7011 && (t1 = extract_muldiv (op1, c, code, wide_type,
7012 strict_overflow_p)) != 0)
7013 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7014 fold_convert (ctype, t1));
7015 else if (TREE_CODE (op1) != INTEGER_CST)
7016 return 0;
7018 /* If these are the same operation types, we can associate them
7019 assuming no overflow. */
7020 if (tcode == code)
7022 bool overflow_p = false;
7023 wi::overflow_type overflow_mul;
7024 signop sign = TYPE_SIGN (ctype);
7025 unsigned prec = TYPE_PRECISION (ctype);
7026 wide_int mul = wi::mul (wi::to_wide (op1, prec),
7027 wi::to_wide (c, prec),
7028 sign, &overflow_mul);
7029 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
7030 if (overflow_mul
7031 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
7032 overflow_p = true;
7033 if (!overflow_p)
7034 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7035 wide_int_to_tree (ctype, mul));
7038 /* If these operations "cancel" each other, we have the main
7039 optimizations of this pass, which occur when either constant is a
7040 multiple of the other, in which case we replace this with either an
7041 operation or CODE or TCODE.
7043 If we have an unsigned type, we cannot do this since it will change
7044 the result if the original computation overflowed. */
7045 if (TYPE_OVERFLOW_UNDEFINED (ctype)
7046 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
7047 || (tcode == MULT_EXPR
7048 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
7049 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
7050 && code != MULT_EXPR)))
7052 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7053 TYPE_SIGN (type)))
7055 if (TYPE_OVERFLOW_UNDEFINED (ctype))
7056 *strict_overflow_p = true;
7057 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7058 fold_convert (ctype,
7059 const_binop (TRUNC_DIV_EXPR,
7060 op1, c)));
7062 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
7063 TYPE_SIGN (type)))
7065 if (TYPE_OVERFLOW_UNDEFINED (ctype))
7066 *strict_overflow_p = true;
7067 return fold_build2 (code, ctype, fold_convert (ctype, op0),
7068 fold_convert (ctype,
7069 const_binop (TRUNC_DIV_EXPR,
7070 c, op1)));
7073 break;
7075 default:
7076 break;
7079 return 0;
7082 /* Return a node which has the indicated constant VALUE (either 0 or
7083 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7084 and is of the indicated TYPE. */
7086 tree
7087 constant_boolean_node (bool value, tree type)
7089 if (type == integer_type_node)
7090 return value ? integer_one_node : integer_zero_node;
7091 else if (type == boolean_type_node)
7092 return value ? boolean_true_node : boolean_false_node;
7093 else if (TREE_CODE (type) == VECTOR_TYPE)
7094 return build_vector_from_val (type,
7095 build_int_cst (TREE_TYPE (type),
7096 value ? -1 : 0));
7097 else
7098 return fold_convert (type, value ? integer_one_node : integer_zero_node);
7102 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7103 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7104 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7105 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7106 COND is the first argument to CODE; otherwise (as in the example
7107 given here), it is the second argument. TYPE is the type of the
7108 original expression. Return NULL_TREE if no simplification is
7109 possible. */
7111 static tree
7112 fold_binary_op_with_conditional_arg (location_t loc,
7113 enum tree_code code,
7114 tree type, tree op0, tree op1,
7115 tree cond, tree arg, int cond_first_p)
7117 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7118 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7119 tree test, true_value, false_value;
7120 tree lhs = NULL_TREE;
7121 tree rhs = NULL_TREE;
7122 enum tree_code cond_code = COND_EXPR;
7124 /* Do not move possibly trapping operations into the conditional as this
7125 pessimizes code and causes gimplification issues when applied late. */
7126 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7127 ANY_INTEGRAL_TYPE_P (type)
7128 && TYPE_OVERFLOW_TRAPS (type), op1))
7129 return NULL_TREE;
7131 if (TREE_CODE (cond) == COND_EXPR
7132 || TREE_CODE (cond) == VEC_COND_EXPR)
7134 test = TREE_OPERAND (cond, 0);
7135 true_value = TREE_OPERAND (cond, 1);
7136 false_value = TREE_OPERAND (cond, 2);
7137 /* If this operand throws an expression, then it does not make
7138 sense to try to perform a logical or arithmetic operation
7139 involving it. */
7140 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7141 lhs = true_value;
7142 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7143 rhs = false_value;
7145 else if (!(TREE_CODE (type) != VECTOR_TYPE
7146 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
7148 tree testtype = TREE_TYPE (cond);
7149 test = cond;
7150 true_value = constant_boolean_node (true, testtype);
7151 false_value = constant_boolean_node (false, testtype);
7153 else
7154 /* Detect the case of mixing vector and scalar types - bail out. */
7155 return NULL_TREE;
7157 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
7158 cond_code = VEC_COND_EXPR;
7160 /* This transformation is only worthwhile if we don't have to wrap ARG
7161 in a SAVE_EXPR and the operation can be simplified without recursing
7162 on at least one of the branches once its pushed inside the COND_EXPR. */
7163 if (!TREE_CONSTANT (arg)
7164 && (TREE_SIDE_EFFECTS (arg)
7165 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7166 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7167 return NULL_TREE;
7169 arg = fold_convert_loc (loc, arg_type, arg);
7170 if (lhs == 0)
7172 true_value = fold_convert_loc (loc, cond_type, true_value);
7173 if (cond_first_p)
7174 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7175 else
7176 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7178 if (rhs == 0)
7180 false_value = fold_convert_loc (loc, cond_type, false_value);
7181 if (cond_first_p)
7182 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7183 else
7184 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7187 /* Check that we have simplified at least one of the branches. */
7188 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7189 return NULL_TREE;
7191 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7195 /* Subroutine of fold() that checks for the addition of ARG +/- 0.0.
7197 If !NEGATE, return true if ZERO_ARG is +/-0.0 and, for all ARG of
7198 type TYPE, ARG + ZERO_ARG is the same as ARG. If NEGATE, return true
7199 if ARG - ZERO_ARG is the same as X.
7201 If ARG is NULL, check for any value of type TYPE.
7203 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7204 and finite. The problematic cases are when X is zero, and its mode
7205 has signed zeros. In the case of rounding towards -infinity,
7206 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7207 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7209 bool
7210 fold_real_zero_addition_p (const_tree type, const_tree arg,
7211 const_tree zero_arg, int negate)
7213 if (!real_zerop (zero_arg))
7214 return false;
7216 /* Don't allow the fold with -fsignaling-nans. */
7217 if (arg ? tree_expr_maybe_signaling_nan_p (arg) : HONOR_SNANS (type))
7218 return false;
7220 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7221 if (!HONOR_SIGNED_ZEROS (type))
7222 return true;
7224 /* There is no case that is safe for all rounding modes. */
7225 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7226 return false;
7228 /* In a vector or complex, we would need to check the sign of all zeros. */
7229 if (TREE_CODE (zero_arg) == VECTOR_CST)
7230 zero_arg = uniform_vector_p (zero_arg);
7231 if (!zero_arg || TREE_CODE (zero_arg) != REAL_CST)
7232 return false;
7234 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7235 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (zero_arg)))
7236 negate = !negate;
7238 /* The mode has signed zeros, and we have to honor their sign.
7239 In this situation, there are only two cases we can return true for.
7240 (i) X - 0 is the same as X with default rounding.
7241 (ii) X + 0 is X when X can't possibly be -0.0. */
7242 return negate || (arg && !tree_expr_maybe_real_minus_zero_p (arg));
7245 /* Subroutine of match.pd that optimizes comparisons of a division by
7246 a nonzero integer constant against an integer constant, i.e.
7247 X/C1 op C2.
7249 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7250 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7252 enum tree_code
7253 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7254 tree *hi, bool *neg_overflow)
7256 tree prod, tmp, type = TREE_TYPE (c1);
7257 signop sign = TYPE_SIGN (type);
7258 wi::overflow_type overflow;
7260 /* We have to do this the hard way to detect unsigned overflow.
7261 prod = int_const_binop (MULT_EXPR, c1, c2); */
7262 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7263 prod = force_fit_type (type, val, -1, overflow);
7264 *neg_overflow = false;
7266 if (sign == UNSIGNED)
7268 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7269 *lo = prod;
7271 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7272 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7273 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7275 else if (tree_int_cst_sgn (c1) >= 0)
7277 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7278 switch (tree_int_cst_sgn (c2))
7280 case -1:
7281 *neg_overflow = true;
7282 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7283 *hi = prod;
7284 break;
7286 case 0:
7287 *lo = fold_negate_const (tmp, type);
7288 *hi = tmp;
7289 break;
7291 case 1:
7292 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7293 *lo = prod;
7294 break;
7296 default:
7297 gcc_unreachable ();
7300 else
7302 /* A negative divisor reverses the relational operators. */
7303 code = swap_tree_comparison (code);
7305 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7306 switch (tree_int_cst_sgn (c2))
7308 case -1:
7309 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7310 *lo = prod;
7311 break;
7313 case 0:
7314 *hi = fold_negate_const (tmp, type);
7315 *lo = tmp;
7316 break;
7318 case 1:
7319 *neg_overflow = true;
7320 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7321 *hi = prod;
7322 break;
7324 default:
7325 gcc_unreachable ();
7329 if (code != EQ_EXPR && code != NE_EXPR)
7330 return code;
7332 if (TREE_OVERFLOW (*lo)
7333 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7334 *lo = NULL_TREE;
7335 if (TREE_OVERFLOW (*hi)
7336 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7337 *hi = NULL_TREE;
7339 return code;
7343 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7344 equality/inequality test, then return a simplified form of the test
7345 using a sign testing. Otherwise return NULL. TYPE is the desired
7346 result type. */
7348 static tree
7349 fold_single_bit_test_into_sign_test (location_t loc,
7350 enum tree_code code, tree arg0, tree arg1,
7351 tree result_type)
7353 /* If this is testing a single bit, we can optimize the test. */
7354 if ((code == NE_EXPR || code == EQ_EXPR)
7355 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7356 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7358 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7359 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7360 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7362 if (arg00 != NULL_TREE
7363 /* This is only a win if casting to a signed type is cheap,
7364 i.e. when arg00's type is not a partial mode. */
7365 && type_has_mode_precision_p (TREE_TYPE (arg00)))
7367 tree stype = signed_type_for (TREE_TYPE (arg00));
7368 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7369 result_type,
7370 fold_convert_loc (loc, stype, arg00),
7371 build_int_cst (stype, 0));
7375 return NULL_TREE;
7378 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7379 equality/inequality test, then return a simplified form of
7380 the test using shifts and logical operations. Otherwise return
7381 NULL. TYPE is the desired result type. */
7383 tree
7384 fold_single_bit_test (location_t loc, enum tree_code code,
7385 tree arg0, tree arg1, tree result_type)
7387 /* If this is testing a single bit, we can optimize the test. */
7388 if ((code == NE_EXPR || code == EQ_EXPR)
7389 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7390 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7392 tree inner = TREE_OPERAND (arg0, 0);
7393 tree type = TREE_TYPE (arg0);
7394 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7395 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
7396 int ops_unsigned;
7397 tree signed_type, unsigned_type, intermediate_type;
7398 tree tem, one;
7400 /* First, see if we can fold the single bit test into a sign-bit
7401 test. */
7402 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7403 result_type);
7404 if (tem)
7405 return tem;
7407 /* Otherwise we have (A & C) != 0 where C is a single bit,
7408 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7409 Similarly for (A & C) == 0. */
7411 /* If INNER is a right shift of a constant and it plus BITNUM does
7412 not overflow, adjust BITNUM and INNER. */
7413 if (TREE_CODE (inner) == RSHIFT_EXPR
7414 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7415 && bitnum < TYPE_PRECISION (type)
7416 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
7417 TYPE_PRECISION (type) - bitnum))
7419 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
7420 inner = TREE_OPERAND (inner, 0);
7423 /* If we are going to be able to omit the AND below, we must do our
7424 operations as unsigned. If we must use the AND, we have a choice.
7425 Normally unsigned is faster, but for some machines signed is. */
7426 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
7427 && !flag_syntax_only) ? 0 : 1;
7429 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7430 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7431 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7432 inner = fold_convert_loc (loc, intermediate_type, inner);
7434 if (bitnum != 0)
7435 inner = build2 (RSHIFT_EXPR, intermediate_type,
7436 inner, size_int (bitnum));
7438 one = build_int_cst (intermediate_type, 1);
7440 if (code == EQ_EXPR)
7441 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7443 /* Put the AND last so it can combine with more things. */
7444 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7446 /* Make sure to return the proper type. */
7447 inner = fold_convert_loc (loc, result_type, inner);
7449 return inner;
7451 return NULL_TREE;
7454 /* Test whether it is preferable to swap two operands, ARG0 and
7455 ARG1, for example because ARG0 is an integer constant and ARG1
7456 isn't. */
7458 bool
7459 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7461 if (CONSTANT_CLASS_P (arg1))
7462 return 0;
7463 if (CONSTANT_CLASS_P (arg0))
7464 return 1;
7466 STRIP_NOPS (arg0);
7467 STRIP_NOPS (arg1);
7469 if (TREE_CONSTANT (arg1))
7470 return 0;
7471 if (TREE_CONSTANT (arg0))
7472 return 1;
7474 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7475 for commutative and comparison operators. Ensuring a canonical
7476 form allows the optimizers to find additional redundancies without
7477 having to explicitly check for both orderings. */
7478 if (TREE_CODE (arg0) == SSA_NAME
7479 && TREE_CODE (arg1) == SSA_NAME
7480 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7481 return 1;
7483 /* Put SSA_NAMEs last. */
7484 if (TREE_CODE (arg1) == SSA_NAME)
7485 return 0;
7486 if (TREE_CODE (arg0) == SSA_NAME)
7487 return 1;
7489 /* Put variables last. */
7490 if (DECL_P (arg1))
7491 return 0;
7492 if (DECL_P (arg0))
7493 return 1;
7495 return 0;
7499 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7500 means A >= Y && A != MAX, but in this case we know that
7501 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7503 static tree
7504 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7506 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7508 if (TREE_CODE (bound) == LT_EXPR)
7509 a = TREE_OPERAND (bound, 0);
7510 else if (TREE_CODE (bound) == GT_EXPR)
7511 a = TREE_OPERAND (bound, 1);
7512 else
7513 return NULL_TREE;
7515 typea = TREE_TYPE (a);
7516 if (!INTEGRAL_TYPE_P (typea)
7517 && !POINTER_TYPE_P (typea))
7518 return NULL_TREE;
7520 if (TREE_CODE (ineq) == LT_EXPR)
7522 a1 = TREE_OPERAND (ineq, 1);
7523 y = TREE_OPERAND (ineq, 0);
7525 else if (TREE_CODE (ineq) == GT_EXPR)
7527 a1 = TREE_OPERAND (ineq, 0);
7528 y = TREE_OPERAND (ineq, 1);
7530 else
7531 return NULL_TREE;
7533 if (TREE_TYPE (a1) != typea)
7534 return NULL_TREE;
7536 if (POINTER_TYPE_P (typea))
7538 /* Convert the pointer types into integer before taking the difference. */
7539 tree ta = fold_convert_loc (loc, ssizetype, a);
7540 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7541 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7543 else
7544 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7546 if (!diff || !integer_onep (diff))
7547 return NULL_TREE;
7549 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7552 /* Fold a sum or difference of at least one multiplication.
7553 Returns the folded tree or NULL if no simplification could be made. */
7555 static tree
7556 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7557 tree arg0, tree arg1)
7559 tree arg00, arg01, arg10, arg11;
7560 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7562 /* (A * C) +- (B * C) -> (A+-B) * C.
7563 (A * C) +- A -> A * (C+-1).
7564 We are most concerned about the case where C is a constant,
7565 but other combinations show up during loop reduction. Since
7566 it is not difficult, try all four possibilities. */
7568 if (TREE_CODE (arg0) == MULT_EXPR)
7570 arg00 = TREE_OPERAND (arg0, 0);
7571 arg01 = TREE_OPERAND (arg0, 1);
7573 else if (TREE_CODE (arg0) == INTEGER_CST)
7575 arg00 = build_one_cst (type);
7576 arg01 = arg0;
7578 else
7580 /* We cannot generate constant 1 for fract. */
7581 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7582 return NULL_TREE;
7583 arg00 = arg0;
7584 arg01 = build_one_cst (type);
7586 if (TREE_CODE (arg1) == MULT_EXPR)
7588 arg10 = TREE_OPERAND (arg1, 0);
7589 arg11 = TREE_OPERAND (arg1, 1);
7591 else if (TREE_CODE (arg1) == INTEGER_CST)
7593 arg10 = build_one_cst (type);
7594 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7595 the purpose of this canonicalization. */
7596 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7597 && negate_expr_p (arg1)
7598 && code == PLUS_EXPR)
7600 arg11 = negate_expr (arg1);
7601 code = MINUS_EXPR;
7603 else
7604 arg11 = arg1;
7606 else
7608 /* We cannot generate constant 1 for fract. */
7609 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7610 return NULL_TREE;
7611 arg10 = arg1;
7612 arg11 = build_one_cst (type);
7614 same = NULL_TREE;
7616 /* Prefer factoring a common non-constant. */
7617 if (operand_equal_p (arg00, arg10, 0))
7618 same = arg00, alt0 = arg01, alt1 = arg11;
7619 else if (operand_equal_p (arg01, arg11, 0))
7620 same = arg01, alt0 = arg00, alt1 = arg10;
7621 else if (operand_equal_p (arg00, arg11, 0))
7622 same = arg00, alt0 = arg01, alt1 = arg10;
7623 else if (operand_equal_p (arg01, arg10, 0))
7624 same = arg01, alt0 = arg00, alt1 = arg11;
7626 /* No identical multiplicands; see if we can find a common
7627 power-of-two factor in non-power-of-two multiplies. This
7628 can help in multi-dimensional array access. */
7629 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7631 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7632 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7633 HOST_WIDE_INT tmp;
7634 bool swap = false;
7635 tree maybe_same;
7637 /* Move min of absolute values to int11. */
7638 if (absu_hwi (int01) < absu_hwi (int11))
7640 tmp = int01, int01 = int11, int11 = tmp;
7641 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7642 maybe_same = arg01;
7643 swap = true;
7645 else
7646 maybe_same = arg11;
7648 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7649 if (factor > 1
7650 && pow2p_hwi (factor)
7651 && (int01 & (factor - 1)) == 0
7652 /* The remainder should not be a constant, otherwise we
7653 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7654 increased the number of multiplications necessary. */
7655 && TREE_CODE (arg10) != INTEGER_CST)
7657 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7658 build_int_cst (TREE_TYPE (arg00),
7659 int01 / int11));
7660 alt1 = arg10;
7661 same = maybe_same;
7662 if (swap)
7663 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7667 if (!same)
7668 return NULL_TREE;
7670 if (! ANY_INTEGRAL_TYPE_P (type)
7671 || TYPE_OVERFLOW_WRAPS (type)
7672 /* We are neither factoring zero nor minus one. */
7673 || TREE_CODE (same) == INTEGER_CST)
7674 return fold_build2_loc (loc, MULT_EXPR, type,
7675 fold_build2_loc (loc, code, type,
7676 fold_convert_loc (loc, type, alt0),
7677 fold_convert_loc (loc, type, alt1)),
7678 fold_convert_loc (loc, type, same));
7680 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7681 same may be minus one and thus the multiplication may overflow. Perform
7682 the sum operation in an unsigned type. */
7683 tree utype = unsigned_type_for (type);
7684 tree tem = fold_build2_loc (loc, code, utype,
7685 fold_convert_loc (loc, utype, alt0),
7686 fold_convert_loc (loc, utype, alt1));
7687 /* If the sum evaluated to a constant that is not -INF the multiplication
7688 cannot overflow. */
7689 if (TREE_CODE (tem) == INTEGER_CST
7690 && (wi::to_wide (tem)
7691 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7692 return fold_build2_loc (loc, MULT_EXPR, type,
7693 fold_convert (type, tem), same);
7695 /* Do not resort to unsigned multiplication because
7696 we lose the no-overflow property of the expression. */
7697 return NULL_TREE;
7700 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7701 specified by EXPR into the buffer PTR of length LEN bytes.
7702 Return the number of bytes placed in the buffer, or zero
7703 upon failure. */
7705 static int
7706 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7708 tree type = TREE_TYPE (expr);
7709 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7710 int byte, offset, word, words;
7711 unsigned char value;
7713 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7714 return 0;
7715 if (off == -1)
7716 off = 0;
7718 if (ptr == NULL)
7719 /* Dry run. */
7720 return MIN (len, total_bytes - off);
7722 words = total_bytes / UNITS_PER_WORD;
7724 for (byte = 0; byte < total_bytes; byte++)
7726 int bitpos = byte * BITS_PER_UNIT;
7727 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7728 number of bytes. */
7729 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7731 if (total_bytes > UNITS_PER_WORD)
7733 word = byte / UNITS_PER_WORD;
7734 if (WORDS_BIG_ENDIAN)
7735 word = (words - 1) - word;
7736 offset = word * UNITS_PER_WORD;
7737 if (BYTES_BIG_ENDIAN)
7738 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7739 else
7740 offset += byte % UNITS_PER_WORD;
7742 else
7743 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7744 if (offset >= off && offset - off < len)
7745 ptr[offset - off] = value;
7747 return MIN (len, total_bytes - off);
7751 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7752 specified by EXPR into the buffer PTR of length LEN bytes.
7753 Return the number of bytes placed in the buffer, or zero
7754 upon failure. */
7756 static int
7757 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7759 tree type = TREE_TYPE (expr);
7760 scalar_mode mode = SCALAR_TYPE_MODE (type);
7761 int total_bytes = GET_MODE_SIZE (mode);
7762 FIXED_VALUE_TYPE value;
7763 tree i_value, i_type;
7765 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7766 return 0;
7768 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7770 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7771 return 0;
7773 value = TREE_FIXED_CST (expr);
7774 i_value = double_int_to_tree (i_type, value.data);
7776 return native_encode_int (i_value, ptr, len, off);
7780 /* Subroutine of native_encode_expr. Encode the REAL_CST
7781 specified by EXPR into the buffer PTR of length LEN bytes.
7782 Return the number of bytes placed in the buffer, or zero
7783 upon failure. */
7785 static int
7786 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7788 tree type = TREE_TYPE (expr);
7789 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7790 int byte, offset, word, words, bitpos;
7791 unsigned char value;
7793 /* There are always 32 bits in each long, no matter the size of
7794 the hosts long. We handle floating point representations with
7795 up to 192 bits. */
7796 long tmp[6];
7798 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7799 return 0;
7800 if (off == -1)
7801 off = 0;
7803 if (ptr == NULL)
7804 /* Dry run. */
7805 return MIN (len, total_bytes - off);
7807 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7809 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7811 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7812 bitpos += BITS_PER_UNIT)
7814 byte = (bitpos / BITS_PER_UNIT) & 3;
7815 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7817 if (UNITS_PER_WORD < 4)
7819 word = byte / UNITS_PER_WORD;
7820 if (WORDS_BIG_ENDIAN)
7821 word = (words - 1) - word;
7822 offset = word * UNITS_PER_WORD;
7823 if (BYTES_BIG_ENDIAN)
7824 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7825 else
7826 offset += byte % UNITS_PER_WORD;
7828 else
7830 offset = byte;
7831 if (BYTES_BIG_ENDIAN)
7833 /* Reverse bytes within each long, or within the entire float
7834 if it's smaller than a long (for HFmode). */
7835 offset = MIN (3, total_bytes - 1) - offset;
7836 gcc_assert (offset >= 0);
7839 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7840 if (offset >= off
7841 && offset - off < len)
7842 ptr[offset - off] = value;
7844 return MIN (len, total_bytes - off);
7847 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7848 specified by EXPR into the buffer PTR of length LEN bytes.
7849 Return the number of bytes placed in the buffer, or zero
7850 upon failure. */
7852 static int
7853 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7855 int rsize, isize;
7856 tree part;
7858 part = TREE_REALPART (expr);
7859 rsize = native_encode_expr (part, ptr, len, off);
7860 if (off == -1 && rsize == 0)
7861 return 0;
7862 part = TREE_IMAGPART (expr);
7863 if (off != -1)
7864 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7865 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7866 len - rsize, off);
7867 if (off == -1 && isize != rsize)
7868 return 0;
7869 return rsize + isize;
7872 /* Like native_encode_vector, but only encode the first COUNT elements.
7873 The other arguments are as for native_encode_vector. */
7875 static int
7876 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7877 int off, unsigned HOST_WIDE_INT count)
7879 tree itype = TREE_TYPE (TREE_TYPE (expr));
7880 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7881 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7883 /* This is the only case in which elements can be smaller than a byte.
7884 Element 0 is always in the lsb of the containing byte. */
7885 unsigned int elt_bits = TYPE_PRECISION (itype);
7886 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7887 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7888 return 0;
7890 if (off == -1)
7891 off = 0;
7893 /* Zero the buffer and then set bits later where necessary. */
7894 int extract_bytes = MIN (len, total_bytes - off);
7895 if (ptr)
7896 memset (ptr, 0, extract_bytes);
7898 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7899 unsigned int first_elt = off * elts_per_byte;
7900 unsigned int extract_elts = extract_bytes * elts_per_byte;
7901 for (unsigned int i = 0; i < extract_elts; ++i)
7903 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7904 if (TREE_CODE (elt) != INTEGER_CST)
7905 return 0;
7907 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7909 unsigned int bit = i * elt_bits;
7910 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7913 return extract_bytes;
7916 int offset = 0;
7917 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7918 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7920 if (off >= size)
7922 off -= size;
7923 continue;
7925 tree elem = VECTOR_CST_ELT (expr, i);
7926 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7927 len - offset, off);
7928 if ((off == -1 && res != size) || res == 0)
7929 return 0;
7930 offset += res;
7931 if (offset >= len)
7932 return (off == -1 && i < count - 1) ? 0 : offset;
7933 if (off != -1)
7934 off = 0;
7936 return offset;
7939 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7940 specified by EXPR into the buffer PTR of length LEN bytes.
7941 Return the number of bytes placed in the buffer, or zero
7942 upon failure. */
7944 static int
7945 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7947 unsigned HOST_WIDE_INT count;
7948 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7949 return 0;
7950 return native_encode_vector_part (expr, ptr, len, off, count);
7954 /* Subroutine of native_encode_expr. Encode the STRING_CST
7955 specified by EXPR into the buffer PTR of length LEN bytes.
7956 Return the number of bytes placed in the buffer, or zero
7957 upon failure. */
7959 static int
7960 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7962 tree type = TREE_TYPE (expr);
7964 /* Wide-char strings are encoded in target byte-order so native
7965 encoding them is trivial. */
7966 if (BITS_PER_UNIT != CHAR_BIT
7967 || TREE_CODE (type) != ARRAY_TYPE
7968 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7969 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7970 return 0;
7972 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7973 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7974 return 0;
7975 if (off == -1)
7976 off = 0;
7977 len = MIN (total_bytes - off, len);
7978 if (ptr == NULL)
7979 /* Dry run. */;
7980 else
7982 int written = 0;
7983 if (off < TREE_STRING_LENGTH (expr))
7985 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7986 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7988 memset (ptr + written, 0, len - written);
7990 return len;
7994 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
7995 FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
7996 the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
7997 anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
7998 sufficient to encode the entire EXPR, or if OFF is out of bounds.
7999 Otherwise, start at byte offset OFF and encode at most LEN bytes.
8000 Return the number of bytes placed in the buffer, or zero upon failure. */
8003 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
8005 /* We don't support starting at negative offset and -1 is special. */
8006 if (off < -1)
8007 return 0;
8009 switch (TREE_CODE (expr))
8011 case INTEGER_CST:
8012 return native_encode_int (expr, ptr, len, off);
8014 case REAL_CST:
8015 return native_encode_real (expr, ptr, len, off);
8017 case FIXED_CST:
8018 return native_encode_fixed (expr, ptr, len, off);
8020 case COMPLEX_CST:
8021 return native_encode_complex (expr, ptr, len, off);
8023 case VECTOR_CST:
8024 return native_encode_vector (expr, ptr, len, off);
8026 case STRING_CST:
8027 return native_encode_string (expr, ptr, len, off);
8029 default:
8030 return 0;
8034 /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
8035 or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
8036 of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
8037 machine modes, we can't just use build_nonstandard_integer_type. */
8039 tree
8040 find_bitfield_repr_type (int fieldsize, int len)
8042 machine_mode mode;
8043 for (int pass = 0; pass < 2; pass++)
8045 enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
8046 FOR_EACH_MODE_IN_CLASS (mode, mclass)
8047 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8048 && known_eq (GET_MODE_PRECISION (mode),
8049 GET_MODE_BITSIZE (mode))
8050 && known_le (GET_MODE_SIZE (mode), len))
8052 tree ret = lang_hooks.types.type_for_mode (mode, 1);
8053 if (ret && TYPE_MODE (ret) == mode)
8054 return ret;
8058 for (int i = 0; i < NUM_INT_N_ENTS; i ++)
8059 if (int_n_enabled_p[i]
8060 && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
8061 && int_n_trees[i].unsigned_type)
8063 tree ret = int_n_trees[i].unsigned_type;
8064 mode = TYPE_MODE (ret);
8065 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8066 && known_eq (GET_MODE_PRECISION (mode),
8067 GET_MODE_BITSIZE (mode))
8068 && known_le (GET_MODE_SIZE (mode), len))
8069 return ret;
8072 return NULL_TREE;
8075 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8076 NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
8077 to be non-NULL and OFF zero), then in addition to filling the
8078 bytes pointed by PTR with the value also clear any bits pointed
8079 by MASK that are known to be initialized, keep them as is for
8080 e.g. uninitialized padding bits or uninitialized fields. */
8083 native_encode_initializer (tree init, unsigned char *ptr, int len,
8084 int off, unsigned char *mask)
8086 int r;
8088 /* We don't support starting at negative offset and -1 is special. */
8089 if (off < -1 || init == NULL_TREE)
8090 return 0;
8092 gcc_assert (mask == NULL || (off == 0 && ptr));
8094 STRIP_NOPS (init);
8095 switch (TREE_CODE (init))
8097 case VIEW_CONVERT_EXPR:
8098 case NON_LVALUE_EXPR:
8099 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
8100 mask);
8101 default:
8102 r = native_encode_expr (init, ptr, len, off);
8103 if (mask)
8104 memset (mask, 0, r);
8105 return r;
8106 case CONSTRUCTOR:
8107 tree type = TREE_TYPE (init);
8108 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
8109 if (total_bytes < 0)
8110 return 0;
8111 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8112 return 0;
8113 int o = off == -1 ? 0 : off;
8114 if (TREE_CODE (type) == ARRAY_TYPE)
8116 tree min_index;
8117 unsigned HOST_WIDE_INT cnt;
8118 HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
8119 constructor_elt *ce;
8121 if (!TYPE_DOMAIN (type)
8122 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
8123 return 0;
8125 fieldsize = int_size_in_bytes (TREE_TYPE (type));
8126 if (fieldsize <= 0)
8127 return 0;
8129 min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8130 if (ptr)
8131 memset (ptr, '\0', MIN (total_bytes - off, len));
8133 for (cnt = 0; ; cnt++)
8135 tree val = NULL_TREE, index = NULL_TREE;
8136 HOST_WIDE_INT pos = curpos, count = 0;
8137 bool full = false;
8138 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8140 val = ce->value;
8141 index = ce->index;
8143 else if (mask == NULL
8144 || CONSTRUCTOR_NO_CLEARING (init)
8145 || curpos >= total_bytes)
8146 break;
8147 else
8148 pos = total_bytes;
8150 if (index && TREE_CODE (index) == RANGE_EXPR)
8152 if (TREE_CODE (TREE_OPERAND (index, 0)) != INTEGER_CST
8153 || TREE_CODE (TREE_OPERAND (index, 1)) != INTEGER_CST)
8154 return 0;
8155 offset_int wpos
8156 = wi::sext (wi::to_offset (TREE_OPERAND (index, 0))
8157 - wi::to_offset (min_index),
8158 TYPE_PRECISION (sizetype));
8159 wpos *= fieldsize;
8160 if (!wi::fits_shwi_p (pos))
8161 return 0;
8162 pos = wpos.to_shwi ();
8163 offset_int wcount
8164 = wi::sext (wi::to_offset (TREE_OPERAND (index, 1))
8165 - wi::to_offset (TREE_OPERAND (index, 0)),
8166 TYPE_PRECISION (sizetype));
8167 if (!wi::fits_shwi_p (wcount))
8168 return 0;
8169 count = wcount.to_shwi ();
8171 else if (index)
8173 if (TREE_CODE (index) != INTEGER_CST)
8174 return 0;
8175 offset_int wpos
8176 = wi::sext (wi::to_offset (index)
8177 - wi::to_offset (min_index),
8178 TYPE_PRECISION (sizetype));
8179 wpos *= fieldsize;
8180 if (!wi::fits_shwi_p (wpos))
8181 return 0;
8182 pos = wpos.to_shwi ();
8185 if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
8187 if (valueinit == -1)
8189 tree zero = build_zero_cst (TREE_TYPE (type));
8190 r = native_encode_initializer (zero, ptr + curpos,
8191 fieldsize, 0,
8192 mask + curpos);
8193 if (TREE_CODE (zero) == CONSTRUCTOR)
8194 ggc_free (zero);
8195 if (!r)
8196 return 0;
8197 valueinit = curpos;
8198 curpos += fieldsize;
8200 while (curpos != pos)
8202 memcpy (ptr + curpos, ptr + valueinit, fieldsize);
8203 memcpy (mask + curpos, mask + valueinit, fieldsize);
8204 curpos += fieldsize;
8208 curpos = pos;
8209 if (val)
8212 if (off == -1
8213 || (curpos >= off
8214 && (curpos + fieldsize
8215 <= (HOST_WIDE_INT) off + len)))
8217 if (full)
8219 if (ptr)
8220 memcpy (ptr + (curpos - o), ptr + (pos - o),
8221 fieldsize);
8222 if (mask)
8223 memcpy (mask + curpos, mask + pos, fieldsize);
8225 else if (!native_encode_initializer (val,
8227 ? ptr + curpos - o
8228 : NULL,
8229 fieldsize,
8230 off == -1 ? -1
8231 : 0,
8232 mask
8233 ? mask + curpos
8234 : NULL))
8235 return 0;
8236 else
8238 full = true;
8239 pos = curpos;
8242 else if (curpos + fieldsize > off
8243 && curpos < (HOST_WIDE_INT) off + len)
8245 /* Partial overlap. */
8246 unsigned char *p = NULL;
8247 int no = 0;
8248 int l;
8249 gcc_assert (mask == NULL);
8250 if (curpos >= off)
8252 if (ptr)
8253 p = ptr + curpos - off;
8254 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8255 fieldsize);
8257 else
8259 p = ptr;
8260 no = off - curpos;
8261 l = len;
8263 if (!native_encode_initializer (val, p, l, no, NULL))
8264 return 0;
8266 curpos += fieldsize;
8268 while (count-- != 0);
8270 return MIN (total_bytes - off, len);
8272 else if (TREE_CODE (type) == RECORD_TYPE
8273 || TREE_CODE (type) == UNION_TYPE)
8275 unsigned HOST_WIDE_INT cnt;
8276 constructor_elt *ce;
8277 tree fld_base = TYPE_FIELDS (type);
8278 tree to_free = NULL_TREE;
8280 gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
8281 if (ptr != NULL)
8282 memset (ptr, '\0', MIN (total_bytes - o, len));
8283 for (cnt = 0; ; cnt++)
8285 tree val = NULL_TREE, field = NULL_TREE;
8286 HOST_WIDE_INT pos = 0, fieldsize;
8287 unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8289 if (to_free)
8291 ggc_free (to_free);
8292 to_free = NULL_TREE;
8295 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8297 val = ce->value;
8298 field = ce->index;
8299 if (field == NULL_TREE)
8300 return 0;
8302 pos = int_byte_position (field);
8303 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8304 continue;
8306 else if (mask == NULL
8307 || CONSTRUCTOR_NO_CLEARING (init))
8308 break;
8309 else
8310 pos = total_bytes;
8312 if (mask && !CONSTRUCTOR_NO_CLEARING (init))
8314 tree fld;
8315 for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
8317 if (TREE_CODE (fld) != FIELD_DECL)
8318 continue;
8319 if (fld == field)
8320 break;
8321 if (DECL_PADDING_P (fld))
8322 continue;
8323 if (DECL_SIZE_UNIT (fld) == NULL_TREE
8324 || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
8325 return 0;
8326 if (integer_zerop (DECL_SIZE_UNIT (fld)))
8327 continue;
8328 break;
8330 if (fld == NULL_TREE)
8332 if (ce == NULL)
8333 break;
8334 return 0;
8336 fld_base = DECL_CHAIN (fld);
8337 if (fld != field)
8339 cnt--;
8340 field = fld;
8341 pos = int_byte_position (field);
8342 val = build_zero_cst (TREE_TYPE (fld));
8343 if (TREE_CODE (val) == CONSTRUCTOR)
8344 to_free = val;
8348 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8349 && TYPE_DOMAIN (TREE_TYPE (field))
8350 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8352 if (mask || off != -1)
8353 return 0;
8354 if (val == NULL_TREE)
8355 continue;
8356 if (TREE_CODE (TREE_TYPE (val)) != ARRAY_TYPE)
8357 return 0;
8358 fieldsize = int_size_in_bytes (TREE_TYPE (val));
8359 if (fieldsize < 0
8360 || (int) fieldsize != fieldsize
8361 || (pos + fieldsize) > INT_MAX)
8362 return 0;
8363 if (pos + fieldsize > total_bytes)
8365 if (ptr != NULL && total_bytes < len)
8366 memset (ptr + total_bytes, '\0',
8367 MIN (pos + fieldsize, len) - total_bytes);
8368 total_bytes = pos + fieldsize;
8371 else
8373 if (DECL_SIZE_UNIT (field) == NULL_TREE
8374 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8375 return 0;
8376 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8378 if (fieldsize == 0)
8379 continue;
8381 if (DECL_BIT_FIELD (field))
8383 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8384 return 0;
8385 fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8386 bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8387 if (bpos % BITS_PER_UNIT)
8388 bpos %= BITS_PER_UNIT;
8389 else
8390 bpos = 0;
8391 fieldsize += bpos;
8392 epos = fieldsize % BITS_PER_UNIT;
8393 fieldsize += BITS_PER_UNIT - 1;
8394 fieldsize /= BITS_PER_UNIT;
8397 if (off != -1 && pos + fieldsize <= off)
8398 continue;
8400 if (val == NULL_TREE)
8401 continue;
8403 if (DECL_BIT_FIELD (field))
8405 /* FIXME: Handle PDP endian. */
8406 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8407 return 0;
8409 if (TREE_CODE (val) != INTEGER_CST)
8410 return 0;
8412 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8413 tree repr_type = NULL_TREE;
8414 HOST_WIDE_INT rpos = 0;
8415 if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8417 rpos = int_byte_position (repr);
8418 repr_type = TREE_TYPE (repr);
8420 else
8422 repr_type = find_bitfield_repr_type (fieldsize, len);
8423 if (repr_type == NULL_TREE)
8424 return 0;
8425 HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
8426 gcc_assert (repr_size > 0 && repr_size <= len);
8427 if (pos + repr_size <= o + len)
8428 rpos = pos;
8429 else
8431 rpos = o + len - repr_size;
8432 gcc_assert (rpos <= pos);
8436 if (rpos > pos)
8437 return 0;
8438 wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
8439 int diff = (TYPE_PRECISION (repr_type)
8440 - TYPE_PRECISION (TREE_TYPE (field)));
8441 HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8442 if (!BYTES_BIG_ENDIAN)
8443 w = wi::lshift (w, bitoff);
8444 else
8445 w = wi::lshift (w, diff - bitoff);
8446 val = wide_int_to_tree (repr_type, w);
8448 unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8449 / BITS_PER_UNIT + 1];
8450 int l = native_encode_int (val, buf, sizeof buf, 0);
8451 if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
8452 return 0;
8454 if (ptr == NULL)
8455 continue;
8457 /* If the bitfield does not start at byte boundary, handle
8458 the partial byte at the start. */
8459 if (bpos
8460 && (off == -1 || (pos >= off && len >= 1)))
8462 if (!BYTES_BIG_ENDIAN)
8464 int msk = (1 << bpos) - 1;
8465 buf[pos - rpos] &= ~msk;
8466 buf[pos - rpos] |= ptr[pos - o] & msk;
8467 if (mask)
8469 if (fieldsize > 1 || epos == 0)
8470 mask[pos] &= msk;
8471 else
8472 mask[pos] &= (msk | ~((1 << epos) - 1));
8475 else
8477 int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
8478 buf[pos - rpos] &= msk;
8479 buf[pos - rpos] |= ptr[pos - o] & ~msk;
8480 if (mask)
8482 if (fieldsize > 1 || epos == 0)
8483 mask[pos] &= ~msk;
8484 else
8485 mask[pos] &= (~msk
8486 | ((1 << (BITS_PER_UNIT - epos))
8487 - 1));
8491 /* If the bitfield does not end at byte boundary, handle
8492 the partial byte at the end. */
8493 if (epos
8494 && (off == -1
8495 || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8497 if (!BYTES_BIG_ENDIAN)
8499 int msk = (1 << epos) - 1;
8500 buf[pos - rpos + fieldsize - 1] &= msk;
8501 buf[pos - rpos + fieldsize - 1]
8502 |= ptr[pos + fieldsize - 1 - o] & ~msk;
8503 if (mask && (fieldsize > 1 || bpos == 0))
8504 mask[pos + fieldsize - 1] &= ~msk;
8506 else
8508 int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
8509 buf[pos - rpos + fieldsize - 1] &= ~msk;
8510 buf[pos - rpos + fieldsize - 1]
8511 |= ptr[pos + fieldsize - 1 - o] & msk;
8512 if (mask && (fieldsize > 1 || bpos == 0))
8513 mask[pos + fieldsize - 1] &= msk;
8516 if (off == -1
8517 || (pos >= off
8518 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8520 memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8521 if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
8522 memset (mask + pos + (bpos != 0), 0,
8523 fieldsize - (bpos != 0) - (epos != 0));
8525 else
8527 /* Partial overlap. */
8528 HOST_WIDE_INT fsz = fieldsize;
8529 gcc_assert (mask == NULL);
8530 if (pos < off)
8532 fsz -= (off - pos);
8533 pos = off;
8535 if (pos + fsz > (HOST_WIDE_INT) off + len)
8536 fsz = (HOST_WIDE_INT) off + len - pos;
8537 memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8539 continue;
8542 if (off == -1
8543 || (pos >= off
8544 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8546 int fldsize = fieldsize;
8547 if (off == -1)
8549 tree fld = DECL_CHAIN (field);
8550 while (fld)
8552 if (TREE_CODE (fld) == FIELD_DECL)
8553 break;
8554 fld = DECL_CHAIN (fld);
8556 if (fld == NULL_TREE)
8557 fldsize = len - pos;
8559 r = native_encode_initializer (val, ptr ? ptr + pos - o
8560 : NULL,
8561 fldsize,
8562 off == -1 ? -1 : 0,
8563 mask ? mask + pos : NULL);
8564 if (!r)
8565 return 0;
8566 if (off == -1
8567 && fldsize != fieldsize
8568 && r > fieldsize
8569 && pos + r > total_bytes)
8570 total_bytes = pos + r;
8572 else
8574 /* Partial overlap. */
8575 unsigned char *p = NULL;
8576 int no = 0;
8577 int l;
8578 gcc_assert (mask == NULL);
8579 if (pos >= off)
8581 if (ptr)
8582 p = ptr + pos - off;
8583 l = MIN ((HOST_WIDE_INT) off + len - pos,
8584 fieldsize);
8586 else
8588 p = ptr;
8589 no = off - pos;
8590 l = len;
8592 if (!native_encode_initializer (val, p, l, no, NULL))
8593 return 0;
8596 return MIN (total_bytes - off, len);
8598 return 0;
8603 /* Subroutine of native_interpret_expr. Interpret the contents of
8604 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8605 If the buffer cannot be interpreted, return NULL_TREE. */
8607 static tree
8608 native_interpret_int (tree type, const unsigned char *ptr, int len)
8610 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8612 if (total_bytes > len
8613 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8614 return NULL_TREE;
8616 wide_int result = wi::from_buffer (ptr, total_bytes);
8618 return wide_int_to_tree (type, result);
8622 /* Subroutine of native_interpret_expr. Interpret the contents of
8623 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8624 If the buffer cannot be interpreted, return NULL_TREE. */
8626 static tree
8627 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8629 scalar_mode mode = SCALAR_TYPE_MODE (type);
8630 int total_bytes = GET_MODE_SIZE (mode);
8631 double_int result;
8632 FIXED_VALUE_TYPE fixed_value;
8634 if (total_bytes > len
8635 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8636 return NULL_TREE;
8638 result = double_int::from_buffer (ptr, total_bytes);
8639 fixed_value = fixed_from_double_int (result, mode);
8641 return build_fixed (type, fixed_value);
8645 /* Subroutine of native_interpret_expr. Interpret the contents of
8646 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8647 If the buffer cannot be interpreted, return NULL_TREE. */
8649 tree
8650 native_interpret_real (tree type, const unsigned char *ptr, int len)
8652 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8653 int total_bytes = GET_MODE_SIZE (mode);
8654 unsigned char value;
8655 /* There are always 32 bits in each long, no matter the size of
8656 the hosts long. We handle floating point representations with
8657 up to 192 bits. */
8658 REAL_VALUE_TYPE r;
8659 long tmp[6];
8661 if (total_bytes > len || total_bytes > 24)
8662 return NULL_TREE;
8663 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8665 memset (tmp, 0, sizeof (tmp));
8666 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8667 bitpos += BITS_PER_UNIT)
8669 /* Both OFFSET and BYTE index within a long;
8670 bitpos indexes the whole float. */
8671 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8672 if (UNITS_PER_WORD < 4)
8674 int word = byte / UNITS_PER_WORD;
8675 if (WORDS_BIG_ENDIAN)
8676 word = (words - 1) - word;
8677 offset = word * UNITS_PER_WORD;
8678 if (BYTES_BIG_ENDIAN)
8679 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8680 else
8681 offset += byte % UNITS_PER_WORD;
8683 else
8685 offset = byte;
8686 if (BYTES_BIG_ENDIAN)
8688 /* Reverse bytes within each long, or within the entire float
8689 if it's smaller than a long (for HFmode). */
8690 offset = MIN (3, total_bytes - 1) - offset;
8691 gcc_assert (offset >= 0);
8694 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8696 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8699 real_from_target (&r, tmp, mode);
8700 return build_real (type, r);
8704 /* Subroutine of native_interpret_expr. Interpret the contents of
8705 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8706 If the buffer cannot be interpreted, return NULL_TREE. */
8708 static tree
8709 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8711 tree etype, rpart, ipart;
8712 int size;
8714 etype = TREE_TYPE (type);
8715 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8716 if (size * 2 > len)
8717 return NULL_TREE;
8718 rpart = native_interpret_expr (etype, ptr, size);
8719 if (!rpart)
8720 return NULL_TREE;
8721 ipart = native_interpret_expr (etype, ptr+size, size);
8722 if (!ipart)
8723 return NULL_TREE;
8724 return build_complex (type, rpart, ipart);
8727 /* Read a vector of type TYPE from the target memory image given by BYTES,
8728 which contains LEN bytes. The vector is known to be encodable using
8729 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8731 Return the vector on success, otherwise return null. */
8733 static tree
8734 native_interpret_vector_part (tree type, const unsigned char *bytes,
8735 unsigned int len, unsigned int npatterns,
8736 unsigned int nelts_per_pattern)
8738 tree elt_type = TREE_TYPE (type);
8739 if (VECTOR_BOOLEAN_TYPE_P (type)
8740 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8742 /* This is the only case in which elements can be smaller than a byte.
8743 Element 0 is always in the lsb of the containing byte. */
8744 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8745 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8746 return NULL_TREE;
8748 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8749 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8751 unsigned int bit_index = i * elt_bits;
8752 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8753 unsigned int lsb = bit_index % BITS_PER_UNIT;
8754 builder.quick_push (bytes[byte_index] & (1 << lsb)
8755 ? build_all_ones_cst (elt_type)
8756 : build_zero_cst (elt_type));
8758 return builder.build ();
8761 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8762 if (elt_bytes * npatterns * nelts_per_pattern > len)
8763 return NULL_TREE;
8765 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8766 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8768 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8769 if (!elt)
8770 return NULL_TREE;
8771 builder.quick_push (elt);
8772 bytes += elt_bytes;
8774 return builder.build ();
8777 /* Subroutine of native_interpret_expr. Interpret the contents of
8778 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8779 If the buffer cannot be interpreted, return NULL_TREE. */
8781 static tree
8782 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8784 tree etype;
8785 unsigned int size;
8786 unsigned HOST_WIDE_INT count;
8788 etype = TREE_TYPE (type);
8789 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8790 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
8791 || size * count > len)
8792 return NULL_TREE;
8794 return native_interpret_vector_part (type, ptr, len, count, 1);
8798 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8799 the buffer PTR of length LEN as a constant of type TYPE. For
8800 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8801 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8802 return NULL_TREE. */
8804 tree
8805 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8807 switch (TREE_CODE (type))
8809 case INTEGER_TYPE:
8810 case ENUMERAL_TYPE:
8811 case BOOLEAN_TYPE:
8812 case POINTER_TYPE:
8813 case REFERENCE_TYPE:
8814 case OFFSET_TYPE:
8815 return native_interpret_int (type, ptr, len);
8817 case REAL_TYPE:
8818 if (tree ret = native_interpret_real (type, ptr, len))
8820 /* For floating point values in composite modes, punt if this
8821 folding doesn't preserve bit representation. As the mode doesn't
8822 have fixed precision while GCC pretends it does, there could be
8823 valid values that GCC can't really represent accurately.
8824 See PR95450. Even for other modes, e.g. x86 XFmode can have some
8825 bit combinationations which GCC doesn't preserve. */
8826 unsigned char buf[24];
8827 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8828 int total_bytes = GET_MODE_SIZE (mode);
8829 if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
8830 || memcmp (ptr, buf, total_bytes) != 0)
8831 return NULL_TREE;
8832 return ret;
8834 return NULL_TREE;
8836 case FIXED_POINT_TYPE:
8837 return native_interpret_fixed (type, ptr, len);
8839 case COMPLEX_TYPE:
8840 return native_interpret_complex (type, ptr, len);
8842 case VECTOR_TYPE:
8843 return native_interpret_vector (type, ptr, len);
8845 default:
8846 return NULL_TREE;
8850 /* Returns true if we can interpret the contents of a native encoding
8851 as TYPE. */
8853 bool
8854 can_native_interpret_type_p (tree type)
8856 switch (TREE_CODE (type))
8858 case INTEGER_TYPE:
8859 case ENUMERAL_TYPE:
8860 case BOOLEAN_TYPE:
8861 case POINTER_TYPE:
8862 case REFERENCE_TYPE:
8863 case FIXED_POINT_TYPE:
8864 case REAL_TYPE:
8865 case COMPLEX_TYPE:
8866 case VECTOR_TYPE:
8867 case OFFSET_TYPE:
8868 return true;
8869 default:
8870 return false;
8874 /* Attempt to interpret aggregate of TYPE from bytes encoded in target
8875 byte order at PTR + OFF with LEN bytes. Does not handle unions. */
8877 tree
8878 native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
8879 int len)
8881 vec<constructor_elt, va_gc> *elts = NULL;
8882 if (TREE_CODE (type) == ARRAY_TYPE)
8884 HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
8885 if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
8886 return NULL_TREE;
8888 HOST_WIDE_INT cnt = 0;
8889 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
8891 if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
8892 return NULL_TREE;
8893 cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
8895 if (eltsz == 0)
8896 cnt = 0;
8897 HOST_WIDE_INT pos = 0;
8898 for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
8900 tree v = NULL_TREE;
8901 if (pos >= len || pos + eltsz > len)
8902 return NULL_TREE;
8903 if (can_native_interpret_type_p (TREE_TYPE (type)))
8905 v = native_interpret_expr (TREE_TYPE (type),
8906 ptr + off + pos, eltsz);
8907 if (v == NULL_TREE)
8908 return NULL_TREE;
8910 else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
8911 || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
8912 v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos,
8913 eltsz);
8914 if (v == NULL_TREE)
8915 return NULL_TREE;
8916 CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
8918 return build_constructor (type, elts);
8920 if (TREE_CODE (type) != RECORD_TYPE)
8921 return NULL_TREE;
8922 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8924 if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field))
8925 continue;
8926 tree fld = field;
8927 HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
8928 int diff = 0;
8929 tree v = NULL_TREE;
8930 if (DECL_BIT_FIELD (field))
8932 fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
8933 if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
8935 poly_int64 bitoffset;
8936 poly_uint64 field_offset, fld_offset;
8937 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8938 && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
8939 bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
8940 else
8941 bitoffset = 0;
8942 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8943 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
8944 diff = (TYPE_PRECISION (TREE_TYPE (fld))
8945 - TYPE_PRECISION (TREE_TYPE (field)));
8946 if (!bitoffset.is_constant (&bitoff)
8947 || bitoff < 0
8948 || bitoff > diff)
8949 return NULL_TREE;
8951 else
8953 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8954 return NULL_TREE;
8955 int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8956 int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8957 bpos %= BITS_PER_UNIT;
8958 fieldsize += bpos;
8959 fieldsize += BITS_PER_UNIT - 1;
8960 fieldsize /= BITS_PER_UNIT;
8961 tree repr_type = find_bitfield_repr_type (fieldsize, len);
8962 if (repr_type == NULL_TREE)
8963 return NULL_TREE;
8964 sz = int_size_in_bytes (repr_type);
8965 if (sz < 0 || sz > len)
8966 return NULL_TREE;
8967 pos = int_byte_position (field);
8968 if (pos < 0 || pos > len || pos + fieldsize > len)
8969 return NULL_TREE;
8970 HOST_WIDE_INT rpos;
8971 if (pos + sz <= len)
8972 rpos = pos;
8973 else
8975 rpos = len - sz;
8976 gcc_assert (rpos <= pos);
8978 bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
8979 pos = rpos;
8980 diff = (TYPE_PRECISION (repr_type)
8981 - TYPE_PRECISION (TREE_TYPE (field)));
8982 v = native_interpret_expr (repr_type, ptr + off + pos, sz);
8983 if (v == NULL_TREE)
8984 return NULL_TREE;
8985 fld = NULL_TREE;
8989 if (fld)
8991 sz = int_size_in_bytes (TREE_TYPE (fld));
8992 if (sz < 0 || sz > len)
8993 return NULL_TREE;
8994 tree byte_pos = byte_position (fld);
8995 if (!tree_fits_shwi_p (byte_pos))
8996 return NULL_TREE;
8997 pos = tree_to_shwi (byte_pos);
8998 if (pos < 0 || pos > len || pos + sz > len)
8999 return NULL_TREE;
9001 if (fld == NULL_TREE)
9002 /* Already handled above. */;
9003 else if (can_native_interpret_type_p (TREE_TYPE (fld)))
9005 v = native_interpret_expr (TREE_TYPE (fld),
9006 ptr + off + pos, sz);
9007 if (v == NULL_TREE)
9008 return NULL_TREE;
9010 else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
9011 || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
9012 v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz);
9013 if (v == NULL_TREE)
9014 return NULL_TREE;
9015 if (fld != field)
9017 if (TREE_CODE (v) != INTEGER_CST)
9018 return NULL_TREE;
9020 /* FIXME: Figure out how to handle PDP endian bitfields. */
9021 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
9022 return NULL_TREE;
9023 if (!BYTES_BIG_ENDIAN)
9024 v = wide_int_to_tree (TREE_TYPE (field),
9025 wi::lrshift (wi::to_wide (v), bitoff));
9026 else
9027 v = wide_int_to_tree (TREE_TYPE (field),
9028 wi::lrshift (wi::to_wide (v),
9029 diff - bitoff));
9031 CONSTRUCTOR_APPEND_ELT (elts, field, v);
9033 return build_constructor (type, elts);
9036 /* Routines for manipulation of native_encode_expr encoded data if the encoded
9037 or extracted constant positions and/or sizes aren't byte aligned. */
9039 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
9040 bits between adjacent elements. AMNT should be within
9041 [0, BITS_PER_UNIT).
9042 Example, AMNT = 2:
9043 00011111|11100000 << 2 = 01111111|10000000
9044 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
9046 void
9047 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
9048 unsigned int amnt)
9050 if (amnt == 0)
9051 return;
9053 unsigned char carry_over = 0U;
9054 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
9055 unsigned char clear_mask = (~0U) << amnt;
9057 for (unsigned int i = 0; i < sz; i++)
9059 unsigned prev_carry_over = carry_over;
9060 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
9062 ptr[i] <<= amnt;
9063 if (i != 0)
9065 ptr[i] &= clear_mask;
9066 ptr[i] |= prev_carry_over;
9071 /* Like shift_bytes_in_array_left but for big-endian.
9072 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
9073 bits between adjacent elements. AMNT should be within
9074 [0, BITS_PER_UNIT).
9075 Example, AMNT = 2:
9076 00011111|11100000 >> 2 = 00000111|11111000
9077 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
9079 void
9080 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
9081 unsigned int amnt)
9083 if (amnt == 0)
9084 return;
9086 unsigned char carry_over = 0U;
9087 unsigned char carry_mask = ~(~0U << amnt);
9089 for (unsigned int i = 0; i < sz; i++)
9091 unsigned prev_carry_over = carry_over;
9092 carry_over = ptr[i] & carry_mask;
9094 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
9095 ptr[i] >>= amnt;
9096 ptr[i] |= prev_carry_over;
9100 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9101 directly on the VECTOR_CST encoding, in a way that works for variable-
9102 length vectors. Return the resulting VECTOR_CST on success or null
9103 on failure. */
9105 static tree
9106 fold_view_convert_vector_encoding (tree type, tree expr)
9108 tree expr_type = TREE_TYPE (expr);
9109 poly_uint64 type_bits, expr_bits;
9110 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
9111 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
9112 return NULL_TREE;
9114 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
9115 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
9116 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
9117 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
9119 /* We can only preserve the semantics of a stepped pattern if the new
9120 vector element is an integer of the same size. */
9121 if (VECTOR_CST_STEPPED_P (expr)
9122 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
9123 return NULL_TREE;
9125 /* The number of bits needed to encode one element from every pattern
9126 of the original vector. */
9127 unsigned int expr_sequence_bits
9128 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
9130 /* The number of bits needed to encode one element from every pattern
9131 of the result. */
9132 unsigned int type_sequence_bits
9133 = least_common_multiple (expr_sequence_bits, type_elt_bits);
9135 /* Don't try to read more bytes than are available, which can happen
9136 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9137 The general VIEW_CONVERT handling can cope with that case, so there's
9138 no point complicating things here. */
9139 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
9140 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
9141 BITS_PER_UNIT);
9142 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
9143 if (known_gt (buffer_bits, expr_bits))
9144 return NULL_TREE;
9146 /* Get enough bytes of EXPR to form the new encoding. */
9147 auto_vec<unsigned char, 128> buffer (buffer_bytes);
9148 buffer.quick_grow (buffer_bytes);
9149 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
9150 buffer_bits / expr_elt_bits)
9151 != (int) buffer_bytes)
9152 return NULL_TREE;
9154 /* Reencode the bytes as TYPE. */
9155 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
9156 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
9157 type_npatterns, nelts_per_pattern);
9160 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9161 TYPE at compile-time. If we're unable to perform the conversion
9162 return NULL_TREE. */
9164 static tree
9165 fold_view_convert_expr (tree type, tree expr)
9167 /* We support up to 512-bit values (for V8DFmode). */
9168 unsigned char buffer[64];
9169 int len;
9171 /* Check that the host and target are sane. */
9172 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
9173 return NULL_TREE;
9175 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
9176 if (tree res = fold_view_convert_vector_encoding (type, expr))
9177 return res;
9179 len = native_encode_expr (expr, buffer, sizeof (buffer));
9180 if (len == 0)
9181 return NULL_TREE;
9183 return native_interpret_expr (type, buffer, len);
9186 /* Build an expression for the address of T. Folds away INDIRECT_REF
9187 to avoid confusing the gimplify process. */
9189 tree
9190 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
9192 /* The size of the object is not relevant when talking about its address. */
9193 if (TREE_CODE (t) == WITH_SIZE_EXPR)
9194 t = TREE_OPERAND (t, 0);
9196 if (TREE_CODE (t) == INDIRECT_REF)
9198 t = TREE_OPERAND (t, 0);
9200 if (TREE_TYPE (t) != ptrtype)
9201 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
9203 else if (TREE_CODE (t) == MEM_REF
9204 && integer_zerop (TREE_OPERAND (t, 1)))
9206 t = TREE_OPERAND (t, 0);
9208 if (TREE_TYPE (t) != ptrtype)
9209 t = fold_convert_loc (loc, ptrtype, t);
9211 else if (TREE_CODE (t) == MEM_REF
9212 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
9213 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
9214 TREE_OPERAND (t, 0),
9215 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
9216 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
9218 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
9220 if (TREE_TYPE (t) != ptrtype)
9221 t = fold_convert_loc (loc, ptrtype, t);
9223 else
9224 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
9226 return t;
9229 /* Build an expression for the address of T. */
9231 tree
9232 build_fold_addr_expr_loc (location_t loc, tree t)
9234 tree ptrtype = build_pointer_type (TREE_TYPE (t));
9236 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
9239 /* Fold a unary expression of code CODE and type TYPE with operand
9240 OP0. Return the folded expression if folding is successful.
9241 Otherwise, return NULL_TREE. */
9243 tree
9244 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
9246 tree tem;
9247 tree arg0;
9248 enum tree_code_class kind = TREE_CODE_CLASS (code);
9250 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9251 && TREE_CODE_LENGTH (code) == 1);
9253 arg0 = op0;
9254 if (arg0)
9256 if (CONVERT_EXPR_CODE_P (code)
9257 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
9259 /* Don't use STRIP_NOPS, because signedness of argument type
9260 matters. */
9261 STRIP_SIGN_NOPS (arg0);
9263 else
9265 /* Strip any conversions that don't change the mode. This
9266 is safe for every expression, except for a comparison
9267 expression because its signedness is derived from its
9268 operands.
9270 Note that this is done as an internal manipulation within
9271 the constant folder, in order to find the simplest
9272 representation of the arguments so that their form can be
9273 studied. In any cases, the appropriate type conversions
9274 should be put back in the tree that will get out of the
9275 constant folder. */
9276 STRIP_NOPS (arg0);
9279 if (CONSTANT_CLASS_P (arg0))
9281 tree tem = const_unop (code, type, arg0);
9282 if (tem)
9284 if (TREE_TYPE (tem) != type)
9285 tem = fold_convert_loc (loc, type, tem);
9286 return tem;
9291 tem = generic_simplify (loc, code, type, op0);
9292 if (tem)
9293 return tem;
9295 if (TREE_CODE_CLASS (code) == tcc_unary)
9297 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9298 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9299 fold_build1_loc (loc, code, type,
9300 fold_convert_loc (loc, TREE_TYPE (op0),
9301 TREE_OPERAND (arg0, 1))));
9302 else if (TREE_CODE (arg0) == COND_EXPR)
9304 tree arg01 = TREE_OPERAND (arg0, 1);
9305 tree arg02 = TREE_OPERAND (arg0, 2);
9306 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
9307 arg01 = fold_build1_loc (loc, code, type,
9308 fold_convert_loc (loc,
9309 TREE_TYPE (op0), arg01));
9310 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
9311 arg02 = fold_build1_loc (loc, code, type,
9312 fold_convert_loc (loc,
9313 TREE_TYPE (op0), arg02));
9314 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
9315 arg01, arg02);
9317 /* If this was a conversion, and all we did was to move into
9318 inside the COND_EXPR, bring it back out. But leave it if
9319 it is a conversion from integer to integer and the
9320 result precision is no wider than a word since such a
9321 conversion is cheap and may be optimized away by combine,
9322 while it couldn't if it were outside the COND_EXPR. Then return
9323 so we don't get into an infinite recursion loop taking the
9324 conversion out and then back in. */
9326 if ((CONVERT_EXPR_CODE_P (code)
9327 || code == NON_LVALUE_EXPR)
9328 && TREE_CODE (tem) == COND_EXPR
9329 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
9330 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
9331 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
9332 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
9333 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
9334 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
9335 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9336 && (INTEGRAL_TYPE_P
9337 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
9338 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
9339 || flag_syntax_only))
9340 tem = build1_loc (loc, code, type,
9341 build3 (COND_EXPR,
9342 TREE_TYPE (TREE_OPERAND
9343 (TREE_OPERAND (tem, 1), 0)),
9344 TREE_OPERAND (tem, 0),
9345 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
9346 TREE_OPERAND (TREE_OPERAND (tem, 2),
9347 0)));
9348 return tem;
9352 switch (code)
9354 case NON_LVALUE_EXPR:
9355 if (!maybe_lvalue_p (op0))
9356 return fold_convert_loc (loc, type, op0);
9357 return NULL_TREE;
9359 CASE_CONVERT:
9360 case FLOAT_EXPR:
9361 case FIX_TRUNC_EXPR:
9362 if (COMPARISON_CLASS_P (op0))
9364 /* If we have (type) (a CMP b) and type is an integral type, return
9365 new expression involving the new type. Canonicalize
9366 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9367 non-integral type.
9368 Do not fold the result as that would not simplify further, also
9369 folding again results in recursions. */
9370 if (TREE_CODE (type) == BOOLEAN_TYPE)
9371 return build2_loc (loc, TREE_CODE (op0), type,
9372 TREE_OPERAND (op0, 0),
9373 TREE_OPERAND (op0, 1));
9374 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
9375 && TREE_CODE (type) != VECTOR_TYPE)
9376 return build3_loc (loc, COND_EXPR, type, op0,
9377 constant_boolean_node (true, type),
9378 constant_boolean_node (false, type));
9381 /* Handle (T *)&A.B.C for A being of type T and B and C
9382 living at offset zero. This occurs frequently in
9383 C++ upcasting and then accessing the base. */
9384 if (TREE_CODE (op0) == ADDR_EXPR
9385 && POINTER_TYPE_P (type)
9386 && handled_component_p (TREE_OPERAND (op0, 0)))
9388 poly_int64 bitsize, bitpos;
9389 tree offset;
9390 machine_mode mode;
9391 int unsignedp, reversep, volatilep;
9392 tree base
9393 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
9394 &offset, &mode, &unsignedp, &reversep,
9395 &volatilep);
9396 /* If the reference was to a (constant) zero offset, we can use
9397 the address of the base if it has the same base type
9398 as the result type and the pointer type is unqualified. */
9399 if (!offset
9400 && known_eq (bitpos, 0)
9401 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
9402 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
9403 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
9404 return fold_convert_loc (loc, type,
9405 build_fold_addr_expr_loc (loc, base));
9408 if (TREE_CODE (op0) == MODIFY_EXPR
9409 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
9410 /* Detect assigning a bitfield. */
9411 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
9412 && DECL_BIT_FIELD
9413 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
9415 /* Don't leave an assignment inside a conversion
9416 unless assigning a bitfield. */
9417 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
9418 /* First do the assignment, then return converted constant. */
9419 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
9420 suppress_warning (tem /* What warning? */);
9421 TREE_USED (tem) = 1;
9422 return tem;
9425 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9426 constants (if x has signed type, the sign bit cannot be set
9427 in c). This folds extension into the BIT_AND_EXPR.
9428 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9429 very likely don't have maximal range for their precision and this
9430 transformation effectively doesn't preserve non-maximal ranges. */
9431 if (TREE_CODE (type) == INTEGER_TYPE
9432 && TREE_CODE (op0) == BIT_AND_EXPR
9433 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9435 tree and_expr = op0;
9436 tree and0 = TREE_OPERAND (and_expr, 0);
9437 tree and1 = TREE_OPERAND (and_expr, 1);
9438 int change = 0;
9440 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
9441 || (TYPE_PRECISION (type)
9442 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
9443 change = 1;
9444 else if (TYPE_PRECISION (TREE_TYPE (and1))
9445 <= HOST_BITS_PER_WIDE_INT
9446 && tree_fits_uhwi_p (and1))
9448 unsigned HOST_WIDE_INT cst;
9450 cst = tree_to_uhwi (and1);
9451 cst &= HOST_WIDE_INT_M1U
9452 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
9453 change = (cst == 0);
9454 if (change
9455 && !flag_syntax_only
9456 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
9457 == ZERO_EXTEND))
9459 tree uns = unsigned_type_for (TREE_TYPE (and0));
9460 and0 = fold_convert_loc (loc, uns, and0);
9461 and1 = fold_convert_loc (loc, uns, and1);
9464 if (change)
9466 tem = force_fit_type (type, wi::to_widest (and1), 0,
9467 TREE_OVERFLOW (and1));
9468 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9469 fold_convert_loc (loc, type, and0), tem);
9473 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9474 cast (T1)X will fold away. We assume that this happens when X itself
9475 is a cast. */
9476 if (POINTER_TYPE_P (type)
9477 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
9478 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
9480 tree arg00 = TREE_OPERAND (arg0, 0);
9481 tree arg01 = TREE_OPERAND (arg0, 1);
9483 /* If -fsanitize=alignment, avoid this optimization in GENERIC
9484 when the pointed type needs higher alignment than
9485 the p+ first operand's pointed type. */
9486 if (!in_gimple_form
9487 && sanitize_flags_p (SANITIZE_ALIGNMENT)
9488 && (min_align_of_type (TREE_TYPE (type))
9489 > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00)))))
9490 return NULL_TREE;
9492 arg00 = fold_convert_loc (loc, type, arg00);
9493 return fold_build_pointer_plus_loc (loc, arg00, arg01);
9496 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9497 of the same precision, and X is an integer type not narrower than
9498 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9499 if (INTEGRAL_TYPE_P (type)
9500 && TREE_CODE (op0) == BIT_NOT_EXPR
9501 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9502 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
9503 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
9505 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
9506 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9507 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9508 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9509 fold_convert_loc (loc, type, tem));
9512 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9513 type of X and Y (integer types only). */
9514 if (INTEGRAL_TYPE_P (type)
9515 && TREE_CODE (op0) == MULT_EXPR
9516 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9517 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
9519 /* Be careful not to introduce new overflows. */
9520 tree mult_type;
9521 if (TYPE_OVERFLOW_WRAPS (type))
9522 mult_type = type;
9523 else
9524 mult_type = unsigned_type_for (type);
9526 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9528 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9529 fold_convert_loc (loc, mult_type,
9530 TREE_OPERAND (op0, 0)),
9531 fold_convert_loc (loc, mult_type,
9532 TREE_OPERAND (op0, 1)));
9533 return fold_convert_loc (loc, type, tem);
9537 return NULL_TREE;
9539 case VIEW_CONVERT_EXPR:
9540 if (TREE_CODE (op0) == MEM_REF)
9542 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9543 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9544 tem = fold_build2_loc (loc, MEM_REF, type,
9545 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9546 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9547 return tem;
9550 return NULL_TREE;
9552 case NEGATE_EXPR:
9553 tem = fold_negate_expr (loc, arg0);
9554 if (tem)
9555 return fold_convert_loc (loc, type, tem);
9556 return NULL_TREE;
9558 case ABS_EXPR:
9559 /* Convert fabs((double)float) into (double)fabsf(float). */
9560 if (TREE_CODE (arg0) == NOP_EXPR
9561 && TREE_CODE (type) == REAL_TYPE)
9563 tree targ0 = strip_float_extensions (arg0);
9564 if (targ0 != arg0)
9565 return fold_convert_loc (loc, type,
9566 fold_build1_loc (loc, ABS_EXPR,
9567 TREE_TYPE (targ0),
9568 targ0));
9570 return NULL_TREE;
9572 case BIT_NOT_EXPR:
9573 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9574 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9575 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9576 fold_convert_loc (loc, type,
9577 TREE_OPERAND (arg0, 0)))))
9578 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9579 fold_convert_loc (loc, type,
9580 TREE_OPERAND (arg0, 1)));
9581 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9582 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9583 fold_convert_loc (loc, type,
9584 TREE_OPERAND (arg0, 1)))))
9585 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9586 fold_convert_loc (loc, type,
9587 TREE_OPERAND (arg0, 0)), tem);
9589 return NULL_TREE;
9591 case TRUTH_NOT_EXPR:
9592 /* Note that the operand of this must be an int
9593 and its values must be 0 or 1.
9594 ("true" is a fixed value perhaps depending on the language,
9595 but we don't handle values other than 1 correctly yet.) */
9596 tem = fold_truth_not_expr (loc, arg0);
9597 if (!tem)
9598 return NULL_TREE;
9599 return fold_convert_loc (loc, type, tem);
9601 case INDIRECT_REF:
9602 /* Fold *&X to X if X is an lvalue. */
9603 if (TREE_CODE (op0) == ADDR_EXPR)
9605 tree op00 = TREE_OPERAND (op0, 0);
9606 if ((VAR_P (op00)
9607 || TREE_CODE (op00) == PARM_DECL
9608 || TREE_CODE (op00) == RESULT_DECL)
9609 && !TREE_READONLY (op00))
9610 return op00;
9612 return NULL_TREE;
9614 default:
9615 return NULL_TREE;
9616 } /* switch (code) */
9620 /* If the operation was a conversion do _not_ mark a resulting constant
9621 with TREE_OVERFLOW if the original constant was not. These conversions
9622 have implementation defined behavior and retaining the TREE_OVERFLOW
9623 flag here would confuse later passes such as VRP. */
9624 tree
9625 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9626 tree type, tree op0)
9628 tree res = fold_unary_loc (loc, code, type, op0);
9629 if (res
9630 && TREE_CODE (res) == INTEGER_CST
9631 && TREE_CODE (op0) == INTEGER_CST
9632 && CONVERT_EXPR_CODE_P (code))
9633 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9635 return res;
9638 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9639 operands OP0 and OP1. LOC is the location of the resulting expression.
9640 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9641 Return the folded expression if folding is successful. Otherwise,
9642 return NULL_TREE. */
9643 static tree
9644 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9645 tree arg0, tree arg1, tree op0, tree op1)
9647 tree tem;
9649 /* We only do these simplifications if we are optimizing. */
9650 if (!optimize)
9651 return NULL_TREE;
9653 /* Check for things like (A || B) && (A || C). We can convert this
9654 to A || (B && C). Note that either operator can be any of the four
9655 truth and/or operations and the transformation will still be
9656 valid. Also note that we only care about order for the
9657 ANDIF and ORIF operators. If B contains side effects, this
9658 might change the truth-value of A. */
9659 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9660 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9661 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9662 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9663 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9664 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9666 tree a00 = TREE_OPERAND (arg0, 0);
9667 tree a01 = TREE_OPERAND (arg0, 1);
9668 tree a10 = TREE_OPERAND (arg1, 0);
9669 tree a11 = TREE_OPERAND (arg1, 1);
9670 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9671 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9672 && (code == TRUTH_AND_EXPR
9673 || code == TRUTH_OR_EXPR));
9675 if (operand_equal_p (a00, a10, 0))
9676 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9677 fold_build2_loc (loc, code, type, a01, a11));
9678 else if (commutative && operand_equal_p (a00, a11, 0))
9679 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9680 fold_build2_loc (loc, code, type, a01, a10));
9681 else if (commutative && operand_equal_p (a01, a10, 0))
9682 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9683 fold_build2_loc (loc, code, type, a00, a11));
9685 /* This case if tricky because we must either have commutative
9686 operators or else A10 must not have side-effects. */
9688 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9689 && operand_equal_p (a01, a11, 0))
9690 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9691 fold_build2_loc (loc, code, type, a00, a10),
9692 a01);
9695 /* See if we can build a range comparison. */
9696 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9697 return tem;
9699 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9700 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9702 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9703 if (tem)
9704 return fold_build2_loc (loc, code, type, tem, arg1);
9707 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9708 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9710 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9711 if (tem)
9712 return fold_build2_loc (loc, code, type, arg0, tem);
9715 /* Check for the possibility of merging component references. If our
9716 lhs is another similar operation, try to merge its rhs with our
9717 rhs. Then try to merge our lhs and rhs. */
9718 if (TREE_CODE (arg0) == code
9719 && (tem = fold_truth_andor_1 (loc, code, type,
9720 TREE_OPERAND (arg0, 1), arg1)) != 0)
9721 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9723 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9724 return tem;
9726 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9727 if (param_logical_op_non_short_circuit != -1)
9728 logical_op_non_short_circuit
9729 = param_logical_op_non_short_circuit;
9730 if (logical_op_non_short_circuit
9731 && !sanitize_coverage_p ()
9732 && (code == TRUTH_AND_EXPR
9733 || code == TRUTH_ANDIF_EXPR
9734 || code == TRUTH_OR_EXPR
9735 || code == TRUTH_ORIF_EXPR))
9737 enum tree_code ncode, icode;
9739 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9740 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9741 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9743 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9744 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9745 We don't want to pack more than two leafs to a non-IF AND/OR
9746 expression.
9747 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9748 equal to IF-CODE, then we don't want to add right-hand operand.
9749 If the inner right-hand side of left-hand operand has
9750 side-effects, or isn't simple, then we can't add to it,
9751 as otherwise we might destroy if-sequence. */
9752 if (TREE_CODE (arg0) == icode
9753 && simple_operand_p_2 (arg1)
9754 /* Needed for sequence points to handle trappings, and
9755 side-effects. */
9756 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
9758 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9759 arg1);
9760 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9761 tem);
9763 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9764 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9765 else if (TREE_CODE (arg1) == icode
9766 && simple_operand_p_2 (arg0)
9767 /* Needed for sequence points to handle trappings, and
9768 side-effects. */
9769 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
9771 tem = fold_build2_loc (loc, ncode, type,
9772 arg0, TREE_OPERAND (arg1, 0));
9773 return fold_build2_loc (loc, icode, type, tem,
9774 TREE_OPERAND (arg1, 1));
9776 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9777 into (A OR B).
9778 For sequence point consistancy, we need to check for trapping,
9779 and side-effects. */
9780 else if (code == icode && simple_operand_p_2 (arg0)
9781 && simple_operand_p_2 (arg1))
9782 return fold_build2_loc (loc, ncode, type, arg0, arg1);
9785 return NULL_TREE;
9788 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9789 by changing CODE to reduce the magnitude of constants involved in
9790 ARG0 of the comparison.
9791 Returns a canonicalized comparison tree if a simplification was
9792 possible, otherwise returns NULL_TREE.
9793 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9794 valid if signed overflow is undefined. */
9796 static tree
9797 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9798 tree arg0, tree arg1,
9799 bool *strict_overflow_p)
9801 enum tree_code code0 = TREE_CODE (arg0);
9802 tree t, cst0 = NULL_TREE;
9803 int sgn0;
9805 /* Match A +- CST code arg1. We can change this only if overflow
9806 is undefined. */
9807 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9808 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9809 /* In principle pointers also have undefined overflow behavior,
9810 but that causes problems elsewhere. */
9811 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9812 && (code0 == MINUS_EXPR
9813 || code0 == PLUS_EXPR)
9814 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9815 return NULL_TREE;
9817 /* Identify the constant in arg0 and its sign. */
9818 cst0 = TREE_OPERAND (arg0, 1);
9819 sgn0 = tree_int_cst_sgn (cst0);
9821 /* Overflowed constants and zero will cause problems. */
9822 if (integer_zerop (cst0)
9823 || TREE_OVERFLOW (cst0))
9824 return NULL_TREE;
9826 /* See if we can reduce the magnitude of the constant in
9827 arg0 by changing the comparison code. */
9828 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9829 if (code == LT_EXPR
9830 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9831 code = LE_EXPR;
9832 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9833 else if (code == GT_EXPR
9834 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9835 code = GE_EXPR;
9836 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9837 else if (code == LE_EXPR
9838 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9839 code = LT_EXPR;
9840 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9841 else if (code == GE_EXPR
9842 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9843 code = GT_EXPR;
9844 else
9845 return NULL_TREE;
9846 *strict_overflow_p = true;
9848 /* Now build the constant reduced in magnitude. But not if that
9849 would produce one outside of its types range. */
9850 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9851 && ((sgn0 == 1
9852 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9853 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9854 || (sgn0 == -1
9855 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9856 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9857 return NULL_TREE;
9859 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9860 cst0, build_int_cst (TREE_TYPE (cst0), 1));
9861 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9862 t = fold_convert (TREE_TYPE (arg1), t);
9864 return fold_build2_loc (loc, code, type, t, arg1);
9867 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9868 overflow further. Try to decrease the magnitude of constants involved
9869 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9870 and put sole constants at the second argument position.
9871 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9873 static tree
9874 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9875 tree arg0, tree arg1)
9877 tree t;
9878 bool strict_overflow_p;
9879 const char * const warnmsg = G_("assuming signed overflow does not occur "
9880 "when reducing constant in comparison");
9882 /* Try canonicalization by simplifying arg0. */
9883 strict_overflow_p = false;
9884 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9885 &strict_overflow_p);
9886 if (t)
9888 if (strict_overflow_p)
9889 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9890 return t;
9893 /* Try canonicalization by simplifying arg1 using the swapped
9894 comparison. */
9895 code = swap_tree_comparison (code);
9896 strict_overflow_p = false;
9897 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9898 &strict_overflow_p);
9899 if (t && strict_overflow_p)
9900 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9901 return t;
9904 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9905 space. This is used to avoid issuing overflow warnings for
9906 expressions like &p->x which cannot wrap. */
9908 static bool
9909 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9911 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9912 return true;
9914 if (maybe_lt (bitpos, 0))
9915 return true;
9917 poly_wide_int wi_offset;
9918 int precision = TYPE_PRECISION (TREE_TYPE (base));
9919 if (offset == NULL_TREE)
9920 wi_offset = wi::zero (precision);
9921 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9922 return true;
9923 else
9924 wi_offset = wi::to_poly_wide (offset);
9926 wi::overflow_type overflow;
9927 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9928 precision);
9929 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9930 if (overflow)
9931 return true;
9933 poly_uint64 total_hwi, size;
9934 if (!total.to_uhwi (&total_hwi)
9935 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
9936 &size)
9937 || known_eq (size, 0U))
9938 return true;
9940 if (known_le (total_hwi, size))
9941 return false;
9943 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9944 array. */
9945 if (TREE_CODE (base) == ADDR_EXPR
9946 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
9947 &size)
9948 && maybe_ne (size, 0U)
9949 && known_le (total_hwi, size))
9950 return false;
9952 return true;
9955 /* Return a positive integer when the symbol DECL is known to have
9956 a nonzero address, zero when it's known not to (e.g., it's a weak
9957 symbol), and a negative integer when the symbol is not yet in the
9958 symbol table and so whether or not its address is zero is unknown.
9959 For function local objects always return positive integer. */
9960 static int
9961 maybe_nonzero_address (tree decl)
9963 /* Normally, don't do anything for variables and functions before symtab is
9964 built; it is quite possible that DECL will be declared weak later.
9965 But if folding_initializer, we need a constant answer now, so create
9966 the symtab entry and prevent later weak declaration. */
9967 if (DECL_P (decl) && decl_in_symtab_p (decl))
9968 if (struct symtab_node *symbol
9969 = (folding_initializer
9970 ? symtab_node::get_create (decl)
9971 : symtab_node::get (decl)))
9972 return symbol->nonzero_address ();
9974 /* Function local objects are never NULL. */
9975 if (DECL_P (decl)
9976 && (DECL_CONTEXT (decl)
9977 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
9978 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
9979 return 1;
9981 return -1;
9984 /* Subroutine of fold_binary. This routine performs all of the
9985 transformations that are common to the equality/inequality
9986 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9987 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9988 fold_binary should call fold_binary. Fold a comparison with
9989 tree code CODE and type TYPE with operands OP0 and OP1. Return
9990 the folded comparison or NULL_TREE. */
9992 static tree
9993 fold_comparison (location_t loc, enum tree_code code, tree type,
9994 tree op0, tree op1)
9996 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
9997 tree arg0, arg1, tem;
9999 arg0 = op0;
10000 arg1 = op1;
10002 STRIP_SIGN_NOPS (arg0);
10003 STRIP_SIGN_NOPS (arg1);
10005 /* For comparisons of pointers we can decompose it to a compile time
10006 comparison of the base objects and the offsets into the object.
10007 This requires at least one operand being an ADDR_EXPR or a
10008 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
10009 if (POINTER_TYPE_P (TREE_TYPE (arg0))
10010 && (TREE_CODE (arg0) == ADDR_EXPR
10011 || TREE_CODE (arg1) == ADDR_EXPR
10012 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10013 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
10015 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
10016 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
10017 machine_mode mode;
10018 int volatilep, reversep, unsignedp;
10019 bool indirect_base0 = false, indirect_base1 = false;
10021 /* Get base and offset for the access. Strip ADDR_EXPR for
10022 get_inner_reference, but put it back by stripping INDIRECT_REF
10023 off the base object if possible. indirect_baseN will be true
10024 if baseN is not an address but refers to the object itself. */
10025 base0 = arg0;
10026 if (TREE_CODE (arg0) == ADDR_EXPR)
10028 base0
10029 = get_inner_reference (TREE_OPERAND (arg0, 0),
10030 &bitsize, &bitpos0, &offset0, &mode,
10031 &unsignedp, &reversep, &volatilep);
10032 if (TREE_CODE (base0) == INDIRECT_REF)
10033 base0 = TREE_OPERAND (base0, 0);
10034 else
10035 indirect_base0 = true;
10037 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10039 base0 = TREE_OPERAND (arg0, 0);
10040 STRIP_SIGN_NOPS (base0);
10041 if (TREE_CODE (base0) == ADDR_EXPR)
10043 base0
10044 = get_inner_reference (TREE_OPERAND (base0, 0),
10045 &bitsize, &bitpos0, &offset0, &mode,
10046 &unsignedp, &reversep, &volatilep);
10047 if (TREE_CODE (base0) == INDIRECT_REF)
10048 base0 = TREE_OPERAND (base0, 0);
10049 else
10050 indirect_base0 = true;
10052 if (offset0 == NULL_TREE || integer_zerop (offset0))
10053 offset0 = TREE_OPERAND (arg0, 1);
10054 else
10055 offset0 = size_binop (PLUS_EXPR, offset0,
10056 TREE_OPERAND (arg0, 1));
10057 if (poly_int_tree_p (offset0))
10059 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
10060 TYPE_PRECISION (sizetype));
10061 tem <<= LOG2_BITS_PER_UNIT;
10062 tem += bitpos0;
10063 if (tem.to_shwi (&bitpos0))
10064 offset0 = NULL_TREE;
10068 base1 = arg1;
10069 if (TREE_CODE (arg1) == ADDR_EXPR)
10071 base1
10072 = get_inner_reference (TREE_OPERAND (arg1, 0),
10073 &bitsize, &bitpos1, &offset1, &mode,
10074 &unsignedp, &reversep, &volatilep);
10075 if (TREE_CODE (base1) == INDIRECT_REF)
10076 base1 = TREE_OPERAND (base1, 0);
10077 else
10078 indirect_base1 = true;
10080 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10082 base1 = TREE_OPERAND (arg1, 0);
10083 STRIP_SIGN_NOPS (base1);
10084 if (TREE_CODE (base1) == ADDR_EXPR)
10086 base1
10087 = get_inner_reference (TREE_OPERAND (base1, 0),
10088 &bitsize, &bitpos1, &offset1, &mode,
10089 &unsignedp, &reversep, &volatilep);
10090 if (TREE_CODE (base1) == INDIRECT_REF)
10091 base1 = TREE_OPERAND (base1, 0);
10092 else
10093 indirect_base1 = true;
10095 if (offset1 == NULL_TREE || integer_zerop (offset1))
10096 offset1 = TREE_OPERAND (arg1, 1);
10097 else
10098 offset1 = size_binop (PLUS_EXPR, offset1,
10099 TREE_OPERAND (arg1, 1));
10100 if (poly_int_tree_p (offset1))
10102 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
10103 TYPE_PRECISION (sizetype));
10104 tem <<= LOG2_BITS_PER_UNIT;
10105 tem += bitpos1;
10106 if (tem.to_shwi (&bitpos1))
10107 offset1 = NULL_TREE;
10111 /* If we have equivalent bases we might be able to simplify. */
10112 if (indirect_base0 == indirect_base1
10113 && operand_equal_p (base0, base1,
10114 indirect_base0 ? OEP_ADDRESS_OF : 0))
10116 /* We can fold this expression to a constant if the non-constant
10117 offset parts are equal. */
10118 if ((offset0 == offset1
10119 || (offset0 && offset1
10120 && operand_equal_p (offset0, offset1, 0)))
10121 && (equality_code
10122 || (indirect_base0
10123 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10124 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10126 if (!equality_code
10127 && maybe_ne (bitpos0, bitpos1)
10128 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10129 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10130 fold_overflow_warning (("assuming pointer wraparound does not "
10131 "occur when comparing P +- C1 with "
10132 "P +- C2"),
10133 WARN_STRICT_OVERFLOW_CONDITIONAL);
10135 switch (code)
10137 case EQ_EXPR:
10138 if (known_eq (bitpos0, bitpos1))
10139 return constant_boolean_node (true, type);
10140 if (known_ne (bitpos0, bitpos1))
10141 return constant_boolean_node (false, type);
10142 break;
10143 case NE_EXPR:
10144 if (known_ne (bitpos0, bitpos1))
10145 return constant_boolean_node (true, type);
10146 if (known_eq (bitpos0, bitpos1))
10147 return constant_boolean_node (false, type);
10148 break;
10149 case LT_EXPR:
10150 if (known_lt (bitpos0, bitpos1))
10151 return constant_boolean_node (true, type);
10152 if (known_ge (bitpos0, bitpos1))
10153 return constant_boolean_node (false, type);
10154 break;
10155 case LE_EXPR:
10156 if (known_le (bitpos0, bitpos1))
10157 return constant_boolean_node (true, type);
10158 if (known_gt (bitpos0, bitpos1))
10159 return constant_boolean_node (false, type);
10160 break;
10161 case GE_EXPR:
10162 if (known_ge (bitpos0, bitpos1))
10163 return constant_boolean_node (true, type);
10164 if (known_lt (bitpos0, bitpos1))
10165 return constant_boolean_node (false, type);
10166 break;
10167 case GT_EXPR:
10168 if (known_gt (bitpos0, bitpos1))
10169 return constant_boolean_node (true, type);
10170 if (known_le (bitpos0, bitpos1))
10171 return constant_boolean_node (false, type);
10172 break;
10173 default:;
10176 /* We can simplify the comparison to a comparison of the variable
10177 offset parts if the constant offset parts are equal.
10178 Be careful to use signed sizetype here because otherwise we
10179 mess with array offsets in the wrong way. This is possible
10180 because pointer arithmetic is restricted to retain within an
10181 object and overflow on pointer differences is undefined as of
10182 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10183 else if (known_eq (bitpos0, bitpos1)
10184 && (equality_code
10185 || (indirect_base0
10186 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10187 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10189 /* By converting to signed sizetype we cover middle-end pointer
10190 arithmetic which operates on unsigned pointer types of size
10191 type size and ARRAY_REF offsets which are properly sign or
10192 zero extended from their type in case it is narrower than
10193 sizetype. */
10194 if (offset0 == NULL_TREE)
10195 offset0 = build_int_cst (ssizetype, 0);
10196 else
10197 offset0 = fold_convert_loc (loc, ssizetype, offset0);
10198 if (offset1 == NULL_TREE)
10199 offset1 = build_int_cst (ssizetype, 0);
10200 else
10201 offset1 = fold_convert_loc (loc, ssizetype, offset1);
10203 if (!equality_code
10204 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10205 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10206 fold_overflow_warning (("assuming pointer wraparound does not "
10207 "occur when comparing P +- C1 with "
10208 "P +- C2"),
10209 WARN_STRICT_OVERFLOW_COMPARISON);
10211 return fold_build2_loc (loc, code, type, offset0, offset1);
10214 /* For equal offsets we can simplify to a comparison of the
10215 base addresses. */
10216 else if (known_eq (bitpos0, bitpos1)
10217 && (indirect_base0
10218 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
10219 && (indirect_base1
10220 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
10221 && ((offset0 == offset1)
10222 || (offset0 && offset1
10223 && operand_equal_p (offset0, offset1, 0))))
10225 if (indirect_base0)
10226 base0 = build_fold_addr_expr_loc (loc, base0);
10227 if (indirect_base1)
10228 base1 = build_fold_addr_expr_loc (loc, base1);
10229 return fold_build2_loc (loc, code, type, base0, base1);
10231 /* Comparison between an ordinary (non-weak) symbol and a null
10232 pointer can be eliminated since such symbols must have a non
10233 null address. In C, relational expressions between pointers
10234 to objects and null pointers are undefined. The results
10235 below follow the C++ rules with the additional property that
10236 every object pointer compares greater than a null pointer.
10238 else if (((DECL_P (base0)
10239 && maybe_nonzero_address (base0) > 0
10240 /* Avoid folding references to struct members at offset 0 to
10241 prevent tests like '&ptr->firstmember == 0' from getting
10242 eliminated. When ptr is null, although the -> expression
10243 is strictly speaking invalid, GCC retains it as a matter
10244 of QoI. See PR c/44555. */
10245 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
10246 || CONSTANT_CLASS_P (base0))
10247 && indirect_base0
10248 /* The caller guarantees that when one of the arguments is
10249 constant (i.e., null in this case) it is second. */
10250 && integer_zerop (arg1))
10252 switch (code)
10254 case EQ_EXPR:
10255 case LE_EXPR:
10256 case LT_EXPR:
10257 return constant_boolean_node (false, type);
10258 case GE_EXPR:
10259 case GT_EXPR:
10260 case NE_EXPR:
10261 return constant_boolean_node (true, type);
10262 default:
10263 gcc_unreachable ();
10268 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10269 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10270 the resulting offset is smaller in absolute value than the
10271 original one and has the same sign. */
10272 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10273 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
10274 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10275 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10276 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
10277 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
10278 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10279 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
10281 tree const1 = TREE_OPERAND (arg0, 1);
10282 tree const2 = TREE_OPERAND (arg1, 1);
10283 tree variable1 = TREE_OPERAND (arg0, 0);
10284 tree variable2 = TREE_OPERAND (arg1, 0);
10285 tree cst;
10286 const char * const warnmsg = G_("assuming signed overflow does not "
10287 "occur when combining constants around "
10288 "a comparison");
10290 /* Put the constant on the side where it doesn't overflow and is
10291 of lower absolute value and of same sign than before. */
10292 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10293 ? MINUS_EXPR : PLUS_EXPR,
10294 const2, const1);
10295 if (!TREE_OVERFLOW (cst)
10296 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
10297 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
10299 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10300 return fold_build2_loc (loc, code, type,
10301 variable1,
10302 fold_build2_loc (loc, TREE_CODE (arg1),
10303 TREE_TYPE (arg1),
10304 variable2, cst));
10307 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10308 ? MINUS_EXPR : PLUS_EXPR,
10309 const1, const2);
10310 if (!TREE_OVERFLOW (cst)
10311 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
10312 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
10314 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10315 return fold_build2_loc (loc, code, type,
10316 fold_build2_loc (loc, TREE_CODE (arg0),
10317 TREE_TYPE (arg0),
10318 variable1, cst),
10319 variable2);
10323 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
10324 if (tem)
10325 return tem;
10327 /* If we are comparing an expression that just has comparisons
10328 of two integer values, arithmetic expressions of those comparisons,
10329 and constants, we can simplify it. There are only three cases
10330 to check: the two values can either be equal, the first can be
10331 greater, or the second can be greater. Fold the expression for
10332 those three values. Since each value must be 0 or 1, we have
10333 eight possibilities, each of which corresponds to the constant 0
10334 or 1 or one of the six possible comparisons.
10336 This handles common cases like (a > b) == 0 but also handles
10337 expressions like ((x > y) - (y > x)) > 0, which supposedly
10338 occur in macroized code. */
10340 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
10342 tree cval1 = 0, cval2 = 0;
10344 if (twoval_comparison_p (arg0, &cval1, &cval2)
10345 /* Don't handle degenerate cases here; they should already
10346 have been handled anyway. */
10347 && cval1 != 0 && cval2 != 0
10348 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
10349 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
10350 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
10351 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
10352 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
10353 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
10354 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
10356 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
10357 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
10359 /* We can't just pass T to eval_subst in case cval1 or cval2
10360 was the same as ARG1. */
10362 tree high_result
10363 = fold_build2_loc (loc, code, type,
10364 eval_subst (loc, arg0, cval1, maxval,
10365 cval2, minval),
10366 arg1);
10367 tree equal_result
10368 = fold_build2_loc (loc, code, type,
10369 eval_subst (loc, arg0, cval1, maxval,
10370 cval2, maxval),
10371 arg1);
10372 tree low_result
10373 = fold_build2_loc (loc, code, type,
10374 eval_subst (loc, arg0, cval1, minval,
10375 cval2, maxval),
10376 arg1);
10378 /* All three of these results should be 0 or 1. Confirm they are.
10379 Then use those values to select the proper code to use. */
10381 if (TREE_CODE (high_result) == INTEGER_CST
10382 && TREE_CODE (equal_result) == INTEGER_CST
10383 && TREE_CODE (low_result) == INTEGER_CST)
10385 /* Make a 3-bit mask with the high-order bit being the
10386 value for `>', the next for '=', and the low for '<'. */
10387 switch ((integer_onep (high_result) * 4)
10388 + (integer_onep (equal_result) * 2)
10389 + integer_onep (low_result))
10391 case 0:
10392 /* Always false. */
10393 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10394 case 1:
10395 code = LT_EXPR;
10396 break;
10397 case 2:
10398 code = EQ_EXPR;
10399 break;
10400 case 3:
10401 code = LE_EXPR;
10402 break;
10403 case 4:
10404 code = GT_EXPR;
10405 break;
10406 case 5:
10407 code = NE_EXPR;
10408 break;
10409 case 6:
10410 code = GE_EXPR;
10411 break;
10412 case 7:
10413 /* Always true. */
10414 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10417 return fold_build2_loc (loc, code, type, cval1, cval2);
10422 return NULL_TREE;
10426 /* Subroutine of fold_binary. Optimize complex multiplications of the
10427 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10428 argument EXPR represents the expression "z" of type TYPE. */
10430 static tree
10431 fold_mult_zconjz (location_t loc, tree type, tree expr)
10433 tree itype = TREE_TYPE (type);
10434 tree rpart, ipart, tem;
10436 if (TREE_CODE (expr) == COMPLEX_EXPR)
10438 rpart = TREE_OPERAND (expr, 0);
10439 ipart = TREE_OPERAND (expr, 1);
10441 else if (TREE_CODE (expr) == COMPLEX_CST)
10443 rpart = TREE_REALPART (expr);
10444 ipart = TREE_IMAGPART (expr);
10446 else
10448 expr = save_expr (expr);
10449 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
10450 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
10453 rpart = save_expr (rpart);
10454 ipart = save_expr (ipart);
10455 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
10456 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
10457 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
10458 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
10459 build_zero_cst (itype));
10463 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10464 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10465 true if successful. */
10467 static bool
10468 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
10470 unsigned HOST_WIDE_INT i, nunits;
10472 if (TREE_CODE (arg) == VECTOR_CST
10473 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
10475 for (i = 0; i < nunits; ++i)
10476 elts[i] = VECTOR_CST_ELT (arg, i);
10478 else if (TREE_CODE (arg) == CONSTRUCTOR)
10480 constructor_elt *elt;
10482 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
10483 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
10484 return false;
10485 else
10486 elts[i] = elt->value;
10488 else
10489 return false;
10490 for (; i < nelts; i++)
10491 elts[i]
10492 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
10493 return true;
10496 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10497 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10498 NULL_TREE otherwise. */
10500 tree
10501 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
10503 unsigned int i;
10504 unsigned HOST_WIDE_INT nelts;
10505 bool need_ctor = false;
10507 if (!sel.length ().is_constant (&nelts))
10508 return NULL_TREE;
10509 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
10510 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
10511 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
10512 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
10513 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10514 return NULL_TREE;
10516 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10517 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
10518 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
10519 return NULL_TREE;
10521 tree_vector_builder out_elts (type, nelts, 1);
10522 for (i = 0; i < nelts; i++)
10524 HOST_WIDE_INT index;
10525 if (!sel[i].is_constant (&index))
10526 return NULL_TREE;
10527 if (!CONSTANT_CLASS_P (in_elts[index]))
10528 need_ctor = true;
10529 out_elts.quick_push (unshare_expr (in_elts[index]));
10532 if (need_ctor)
10534 vec<constructor_elt, va_gc> *v;
10535 vec_alloc (v, nelts);
10536 for (i = 0; i < nelts; i++)
10537 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
10538 return build_constructor (type, v);
10540 else
10541 return out_elts.build ();
10544 /* Try to fold a pointer difference of type TYPE two address expressions of
10545 array references AREF0 and AREF1 using location LOC. Return a
10546 simplified expression for the difference or NULL_TREE. */
10548 static tree
10549 fold_addr_of_array_ref_difference (location_t loc, tree type,
10550 tree aref0, tree aref1,
10551 bool use_pointer_diff)
10553 tree base0 = TREE_OPERAND (aref0, 0);
10554 tree base1 = TREE_OPERAND (aref1, 0);
10555 tree base_offset = build_int_cst (type, 0);
10557 /* If the bases are array references as well, recurse. If the bases
10558 are pointer indirections compute the difference of the pointers.
10559 If the bases are equal, we are set. */
10560 if ((TREE_CODE (base0) == ARRAY_REF
10561 && TREE_CODE (base1) == ARRAY_REF
10562 && (base_offset
10563 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
10564 use_pointer_diff)))
10565 || (INDIRECT_REF_P (base0)
10566 && INDIRECT_REF_P (base1)
10567 && (base_offset
10568 = use_pointer_diff
10569 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
10570 TREE_OPERAND (base0, 0),
10571 TREE_OPERAND (base1, 0))
10572 : fold_binary_loc (loc, MINUS_EXPR, type,
10573 fold_convert (type,
10574 TREE_OPERAND (base0, 0)),
10575 fold_convert (type,
10576 TREE_OPERAND (base1, 0)))))
10577 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
10579 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10580 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10581 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
10582 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
10583 return fold_build2_loc (loc, PLUS_EXPR, type,
10584 base_offset,
10585 fold_build2_loc (loc, MULT_EXPR, type,
10586 diff, esz));
10588 return NULL_TREE;
10591 /* If the real or vector real constant CST of type TYPE has an exact
10592 inverse, return it, else return NULL. */
10594 tree
10595 exact_inverse (tree type, tree cst)
10597 REAL_VALUE_TYPE r;
10598 tree unit_type;
10599 machine_mode mode;
10601 switch (TREE_CODE (cst))
10603 case REAL_CST:
10604 r = TREE_REAL_CST (cst);
10606 if (exact_real_inverse (TYPE_MODE (type), &r))
10607 return build_real (type, r);
10609 return NULL_TREE;
10611 case VECTOR_CST:
10613 unit_type = TREE_TYPE (type);
10614 mode = TYPE_MODE (unit_type);
10616 tree_vector_builder elts;
10617 if (!elts.new_unary_operation (type, cst, false))
10618 return NULL_TREE;
10619 unsigned int count = elts.encoded_nelts ();
10620 for (unsigned int i = 0; i < count; ++i)
10622 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10623 if (!exact_real_inverse (mode, &r))
10624 return NULL_TREE;
10625 elts.quick_push (build_real (unit_type, r));
10628 return elts.build ();
10631 default:
10632 return NULL_TREE;
10636 /* Mask out the tz least significant bits of X of type TYPE where
10637 tz is the number of trailing zeroes in Y. */
10638 static wide_int
10639 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10641 int tz = wi::ctz (y);
10642 if (tz > 0)
10643 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10644 return x;
10647 /* Return true when T is an address and is known to be nonzero.
10648 For floating point we further ensure that T is not denormal.
10649 Similar logic is present in nonzero_address in rtlanal.h.
10651 If the return value is based on the assumption that signed overflow
10652 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10653 change *STRICT_OVERFLOW_P. */
10655 static bool
10656 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10658 tree type = TREE_TYPE (t);
10659 enum tree_code code;
10661 /* Doing something useful for floating point would need more work. */
10662 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10663 return false;
10665 code = TREE_CODE (t);
10666 switch (TREE_CODE_CLASS (code))
10668 case tcc_unary:
10669 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10670 strict_overflow_p);
10671 case tcc_binary:
10672 case tcc_comparison:
10673 return tree_binary_nonzero_warnv_p (code, type,
10674 TREE_OPERAND (t, 0),
10675 TREE_OPERAND (t, 1),
10676 strict_overflow_p);
10677 case tcc_constant:
10678 case tcc_declaration:
10679 case tcc_reference:
10680 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10682 default:
10683 break;
10686 switch (code)
10688 case TRUTH_NOT_EXPR:
10689 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10690 strict_overflow_p);
10692 case TRUTH_AND_EXPR:
10693 case TRUTH_OR_EXPR:
10694 case TRUTH_XOR_EXPR:
10695 return tree_binary_nonzero_warnv_p (code, type,
10696 TREE_OPERAND (t, 0),
10697 TREE_OPERAND (t, 1),
10698 strict_overflow_p);
10700 case COND_EXPR:
10701 case CONSTRUCTOR:
10702 case OBJ_TYPE_REF:
10703 case ASSERT_EXPR:
10704 case ADDR_EXPR:
10705 case WITH_SIZE_EXPR:
10706 case SSA_NAME:
10707 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10709 case COMPOUND_EXPR:
10710 case MODIFY_EXPR:
10711 case BIND_EXPR:
10712 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10713 strict_overflow_p);
10715 case SAVE_EXPR:
10716 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10717 strict_overflow_p);
10719 case CALL_EXPR:
10721 tree fndecl = get_callee_fndecl (t);
10722 if (!fndecl) return false;
10723 if (flag_delete_null_pointer_checks && !flag_check_new
10724 && DECL_IS_OPERATOR_NEW_P (fndecl)
10725 && !TREE_NOTHROW (fndecl))
10726 return true;
10727 if (flag_delete_null_pointer_checks
10728 && lookup_attribute ("returns_nonnull",
10729 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10730 return true;
10731 return alloca_call_p (t);
10734 default:
10735 break;
10737 return false;
10740 /* Return true when T is an address and is known to be nonzero.
10741 Handle warnings about undefined signed overflow. */
10743 bool
10744 tree_expr_nonzero_p (tree t)
10746 bool ret, strict_overflow_p;
10748 strict_overflow_p = false;
10749 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10750 if (strict_overflow_p)
10751 fold_overflow_warning (("assuming signed overflow does not occur when "
10752 "determining that expression is always "
10753 "non-zero"),
10754 WARN_STRICT_OVERFLOW_MISC);
10755 return ret;
10758 /* Return true if T is known not to be equal to an integer W. */
10760 bool
10761 expr_not_equal_to (tree t, const wide_int &w)
10763 int_range_max vr;
10764 switch (TREE_CODE (t))
10766 case INTEGER_CST:
10767 return wi::to_wide (t) != w;
10769 case SSA_NAME:
10770 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10771 return false;
10773 if (cfun)
10774 get_range_query (cfun)->range_of_expr (vr, t);
10775 else
10776 get_global_range_query ()->range_of_expr (vr, t);
10778 if (!vr.undefined_p ()
10779 && !vr.contains_p (wide_int_to_tree (TREE_TYPE (t), w)))
10780 return true;
10781 /* If T has some known zero bits and W has any of those bits set,
10782 then T is known not to be equal to W. */
10783 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10784 TYPE_PRECISION (TREE_TYPE (t))), 0))
10785 return true;
10786 return false;
10788 default:
10789 return false;
10793 /* Fold a binary expression of code CODE and type TYPE with operands
10794 OP0 and OP1. LOC is the location of the resulting expression.
10795 Return the folded expression if folding is successful. Otherwise,
10796 return NULL_TREE. */
10798 tree
10799 fold_binary_loc (location_t loc, enum tree_code code, tree type,
10800 tree op0, tree op1)
10802 enum tree_code_class kind = TREE_CODE_CLASS (code);
10803 tree arg0, arg1, tem;
10804 tree t1 = NULL_TREE;
10805 bool strict_overflow_p;
10806 unsigned int prec;
10808 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10809 && TREE_CODE_LENGTH (code) == 2
10810 && op0 != NULL_TREE
10811 && op1 != NULL_TREE);
10813 arg0 = op0;
10814 arg1 = op1;
10816 /* Strip any conversions that don't change the mode. This is
10817 safe for every expression, except for a comparison expression
10818 because its signedness is derived from its operands. So, in
10819 the latter case, only strip conversions that don't change the
10820 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10821 preserved.
10823 Note that this is done as an internal manipulation within the
10824 constant folder, in order to find the simplest representation
10825 of the arguments so that their form can be studied. In any
10826 cases, the appropriate type conversions should be put back in
10827 the tree that will get out of the constant folder. */
10829 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10831 STRIP_SIGN_NOPS (arg0);
10832 STRIP_SIGN_NOPS (arg1);
10834 else
10836 STRIP_NOPS (arg0);
10837 STRIP_NOPS (arg1);
10840 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10841 constant but we can't do arithmetic on them. */
10842 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
10844 tem = const_binop (code, type, arg0, arg1);
10845 if (tem != NULL_TREE)
10847 if (TREE_TYPE (tem) != type)
10848 tem = fold_convert_loc (loc, type, tem);
10849 return tem;
10853 /* If this is a commutative operation, and ARG0 is a constant, move it
10854 to ARG1 to reduce the number of tests below. */
10855 if (commutative_tree_code (code)
10856 && tree_swap_operands_p (arg0, arg1))
10857 return fold_build2_loc (loc, code, type, op1, op0);
10859 /* Likewise if this is a comparison, and ARG0 is a constant, move it
10860 to ARG1 to reduce the number of tests below. */
10861 if (kind == tcc_comparison
10862 && tree_swap_operands_p (arg0, arg1))
10863 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
10865 tem = generic_simplify (loc, code, type, op0, op1);
10866 if (tem)
10867 return tem;
10869 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10871 First check for cases where an arithmetic operation is applied to a
10872 compound, conditional, or comparison operation. Push the arithmetic
10873 operation inside the compound or conditional to see if any folding
10874 can then be done. Convert comparison to conditional for this purpose.
10875 The also optimizes non-constant cases that used to be done in
10876 expand_expr.
10878 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10879 one of the operands is a comparison and the other is a comparison, a
10880 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10881 code below would make the expression more complex. Change it to a
10882 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10883 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10885 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10886 || code == EQ_EXPR || code == NE_EXPR)
10887 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
10888 && ((truth_value_p (TREE_CODE (arg0))
10889 && (truth_value_p (TREE_CODE (arg1))
10890 || (TREE_CODE (arg1) == BIT_AND_EXPR
10891 && integer_onep (TREE_OPERAND (arg1, 1)))))
10892 || (truth_value_p (TREE_CODE (arg1))
10893 && (truth_value_p (TREE_CODE (arg0))
10894 || (TREE_CODE (arg0) == BIT_AND_EXPR
10895 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10897 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10898 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10899 : TRUTH_XOR_EXPR,
10900 boolean_type_node,
10901 fold_convert_loc (loc, boolean_type_node, arg0),
10902 fold_convert_loc (loc, boolean_type_node, arg1));
10904 if (code == EQ_EXPR)
10905 tem = invert_truthvalue_loc (loc, tem);
10907 return fold_convert_loc (loc, type, tem);
10910 if (TREE_CODE_CLASS (code) == tcc_binary
10911 || TREE_CODE_CLASS (code) == tcc_comparison)
10913 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10915 tem = fold_build2_loc (loc, code, type,
10916 fold_convert_loc (loc, TREE_TYPE (op0),
10917 TREE_OPERAND (arg0, 1)), op1);
10918 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10919 tem);
10921 if (TREE_CODE (arg1) == COMPOUND_EXPR)
10923 tem = fold_build2_loc (loc, code, type, op0,
10924 fold_convert_loc (loc, TREE_TYPE (op1),
10925 TREE_OPERAND (arg1, 1)));
10926 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10927 tem);
10930 if (TREE_CODE (arg0) == COND_EXPR
10931 || TREE_CODE (arg0) == VEC_COND_EXPR
10932 || COMPARISON_CLASS_P (arg0))
10934 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10935 arg0, arg1,
10936 /*cond_first_p=*/1);
10937 if (tem != NULL_TREE)
10938 return tem;
10941 if (TREE_CODE (arg1) == COND_EXPR
10942 || TREE_CODE (arg1) == VEC_COND_EXPR
10943 || COMPARISON_CLASS_P (arg1))
10945 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10946 arg1, arg0,
10947 /*cond_first_p=*/0);
10948 if (tem != NULL_TREE)
10949 return tem;
10953 switch (code)
10955 case MEM_REF:
10956 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10957 if (TREE_CODE (arg0) == ADDR_EXPR
10958 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10960 tree iref = TREE_OPERAND (arg0, 0);
10961 return fold_build2 (MEM_REF, type,
10962 TREE_OPERAND (iref, 0),
10963 int_const_binop (PLUS_EXPR, arg1,
10964 TREE_OPERAND (iref, 1)));
10967 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10968 if (TREE_CODE (arg0) == ADDR_EXPR
10969 && handled_component_p (TREE_OPERAND (arg0, 0)))
10971 tree base;
10972 poly_int64 coffset;
10973 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10974 &coffset);
10975 if (!base)
10976 return NULL_TREE;
10977 return fold_build2 (MEM_REF, type,
10978 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
10979 int_const_binop (PLUS_EXPR, arg1,
10980 size_int (coffset)));
10983 return NULL_TREE;
10985 case POINTER_PLUS_EXPR:
10986 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10987 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10988 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10989 return fold_convert_loc (loc, type,
10990 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10991 fold_convert_loc (loc, sizetype,
10992 arg1),
10993 fold_convert_loc (loc, sizetype,
10994 arg0)));
10996 return NULL_TREE;
10998 case PLUS_EXPR:
10999 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
11001 /* X + (X / CST) * -CST is X % CST. */
11002 if (TREE_CODE (arg1) == MULT_EXPR
11003 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
11004 && operand_equal_p (arg0,
11005 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
11007 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
11008 tree cst1 = TREE_OPERAND (arg1, 1);
11009 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
11010 cst1, cst0);
11011 if (sum && integer_zerop (sum))
11012 return fold_convert_loc (loc, type,
11013 fold_build2_loc (loc, TRUNC_MOD_EXPR,
11014 TREE_TYPE (arg0), arg0,
11015 cst0));
11019 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
11020 one. Make sure the type is not saturating and has the signedness of
11021 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11022 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11023 if ((TREE_CODE (arg0) == MULT_EXPR
11024 || TREE_CODE (arg1) == MULT_EXPR)
11025 && !TYPE_SATURATING (type)
11026 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11027 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11028 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11030 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11031 if (tem)
11032 return tem;
11035 if (! FLOAT_TYPE_P (type))
11037 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
11038 (plus (plus (mult) (mult)) (foo)) so that we can
11039 take advantage of the factoring cases below. */
11040 if (ANY_INTEGRAL_TYPE_P (type)
11041 && TYPE_OVERFLOW_WRAPS (type)
11042 && (((TREE_CODE (arg0) == PLUS_EXPR
11043 || TREE_CODE (arg0) == MINUS_EXPR)
11044 && TREE_CODE (arg1) == MULT_EXPR)
11045 || ((TREE_CODE (arg1) == PLUS_EXPR
11046 || TREE_CODE (arg1) == MINUS_EXPR)
11047 && TREE_CODE (arg0) == MULT_EXPR)))
11049 tree parg0, parg1, parg, marg;
11050 enum tree_code pcode;
11052 if (TREE_CODE (arg1) == MULT_EXPR)
11053 parg = arg0, marg = arg1;
11054 else
11055 parg = arg1, marg = arg0;
11056 pcode = TREE_CODE (parg);
11057 parg0 = TREE_OPERAND (parg, 0);
11058 parg1 = TREE_OPERAND (parg, 1);
11059 STRIP_NOPS (parg0);
11060 STRIP_NOPS (parg1);
11062 if (TREE_CODE (parg0) == MULT_EXPR
11063 && TREE_CODE (parg1) != MULT_EXPR)
11064 return fold_build2_loc (loc, pcode, type,
11065 fold_build2_loc (loc, PLUS_EXPR, type,
11066 fold_convert_loc (loc, type,
11067 parg0),
11068 fold_convert_loc (loc, type,
11069 marg)),
11070 fold_convert_loc (loc, type, parg1));
11071 if (TREE_CODE (parg0) != MULT_EXPR
11072 && TREE_CODE (parg1) == MULT_EXPR)
11073 return
11074 fold_build2_loc (loc, PLUS_EXPR, type,
11075 fold_convert_loc (loc, type, parg0),
11076 fold_build2_loc (loc, pcode, type,
11077 fold_convert_loc (loc, type, marg),
11078 fold_convert_loc (loc, type,
11079 parg1)));
11082 else
11084 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
11085 to __complex__ ( x, y ). This is not the same for SNaNs or
11086 if signed zeros are involved. */
11087 if (!HONOR_SNANS (arg0)
11088 && !HONOR_SIGNED_ZEROS (arg0)
11089 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11091 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11092 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11093 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11094 bool arg0rz = false, arg0iz = false;
11095 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11096 || (arg0i && (arg0iz = real_zerop (arg0i))))
11098 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11099 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11100 if (arg0rz && arg1i && real_zerop (arg1i))
11102 tree rp = arg1r ? arg1r
11103 : build1 (REALPART_EXPR, rtype, arg1);
11104 tree ip = arg0i ? arg0i
11105 : build1 (IMAGPART_EXPR, rtype, arg0);
11106 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11108 else if (arg0iz && arg1r && real_zerop (arg1r))
11110 tree rp = arg0r ? arg0r
11111 : build1 (REALPART_EXPR, rtype, arg0);
11112 tree ip = arg1i ? arg1i
11113 : build1 (IMAGPART_EXPR, rtype, arg1);
11114 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11119 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11120 We associate floats only if the user has specified
11121 -fassociative-math. */
11122 if (flag_associative_math
11123 && TREE_CODE (arg1) == PLUS_EXPR
11124 && TREE_CODE (arg0) != MULT_EXPR)
11126 tree tree10 = TREE_OPERAND (arg1, 0);
11127 tree tree11 = TREE_OPERAND (arg1, 1);
11128 if (TREE_CODE (tree11) == MULT_EXPR
11129 && TREE_CODE (tree10) == MULT_EXPR)
11131 tree tree0;
11132 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
11133 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
11136 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11137 We associate floats only if the user has specified
11138 -fassociative-math. */
11139 if (flag_associative_math
11140 && TREE_CODE (arg0) == PLUS_EXPR
11141 && TREE_CODE (arg1) != MULT_EXPR)
11143 tree tree00 = TREE_OPERAND (arg0, 0);
11144 tree tree01 = TREE_OPERAND (arg0, 1);
11145 if (TREE_CODE (tree01) == MULT_EXPR
11146 && TREE_CODE (tree00) == MULT_EXPR)
11148 tree tree0;
11149 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
11150 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
11155 bit_rotate:
11156 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11157 is a rotate of A by C1 bits. */
11158 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11159 is a rotate of A by B bits.
11160 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11161 though in this case CODE must be | and not + or ^, otherwise
11162 it doesn't return A when B is 0. */
11164 enum tree_code code0, code1;
11165 tree rtype;
11166 code0 = TREE_CODE (arg0);
11167 code1 = TREE_CODE (arg1);
11168 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
11169 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
11170 && operand_equal_p (TREE_OPERAND (arg0, 0),
11171 TREE_OPERAND (arg1, 0), 0)
11172 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
11173 TYPE_UNSIGNED (rtype))
11174 /* Only create rotates in complete modes. Other cases are not
11175 expanded properly. */
11176 && (element_precision (rtype)
11177 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
11179 tree tree01, tree11;
11180 tree orig_tree01, orig_tree11;
11181 enum tree_code code01, code11;
11183 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
11184 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
11185 STRIP_NOPS (tree01);
11186 STRIP_NOPS (tree11);
11187 code01 = TREE_CODE (tree01);
11188 code11 = TREE_CODE (tree11);
11189 if (code11 != MINUS_EXPR
11190 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
11192 std::swap (code0, code1);
11193 std::swap (code01, code11);
11194 std::swap (tree01, tree11);
11195 std::swap (orig_tree01, orig_tree11);
11197 if (code01 == INTEGER_CST
11198 && code11 == INTEGER_CST
11199 && (wi::to_widest (tree01) + wi::to_widest (tree11)
11200 == element_precision (rtype)))
11202 tem = build2_loc (loc, LROTATE_EXPR,
11203 rtype, TREE_OPERAND (arg0, 0),
11204 code0 == LSHIFT_EXPR
11205 ? orig_tree01 : orig_tree11);
11206 return fold_convert_loc (loc, type, tem);
11208 else if (code11 == MINUS_EXPR)
11210 tree tree110, tree111;
11211 tree110 = TREE_OPERAND (tree11, 0);
11212 tree111 = TREE_OPERAND (tree11, 1);
11213 STRIP_NOPS (tree110);
11214 STRIP_NOPS (tree111);
11215 if (TREE_CODE (tree110) == INTEGER_CST
11216 && compare_tree_int (tree110,
11217 element_precision (rtype)) == 0
11218 && operand_equal_p (tree01, tree111, 0))
11220 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11221 ? LROTATE_EXPR : RROTATE_EXPR),
11222 rtype, TREE_OPERAND (arg0, 0),
11223 orig_tree01);
11224 return fold_convert_loc (loc, type, tem);
11227 else if (code == BIT_IOR_EXPR
11228 && code11 == BIT_AND_EXPR
11229 && pow2p_hwi (element_precision (rtype)))
11231 tree tree110, tree111;
11232 tree110 = TREE_OPERAND (tree11, 0);
11233 tree111 = TREE_OPERAND (tree11, 1);
11234 STRIP_NOPS (tree110);
11235 STRIP_NOPS (tree111);
11236 if (TREE_CODE (tree110) == NEGATE_EXPR
11237 && TREE_CODE (tree111) == INTEGER_CST
11238 && compare_tree_int (tree111,
11239 element_precision (rtype) - 1) == 0
11240 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
11242 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11243 ? LROTATE_EXPR : RROTATE_EXPR),
11244 rtype, TREE_OPERAND (arg0, 0),
11245 orig_tree01);
11246 return fold_convert_loc (loc, type, tem);
11252 associate:
11253 /* In most languages, can't associate operations on floats through
11254 parentheses. Rather than remember where the parentheses were, we
11255 don't associate floats at all, unless the user has specified
11256 -fassociative-math.
11257 And, we need to make sure type is not saturating. */
11259 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
11260 && !TYPE_SATURATING (type))
11262 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
11263 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
11264 tree atype = type;
11265 bool ok = true;
11267 /* Split both trees into variables, constants, and literals. Then
11268 associate each group together, the constants with literals,
11269 then the result with variables. This increases the chances of
11270 literals being recombined later and of generating relocatable
11271 expressions for the sum of a constant and literal. */
11272 var0 = split_tree (arg0, type, code,
11273 &minus_var0, &con0, &minus_con0,
11274 &lit0, &minus_lit0, 0);
11275 var1 = split_tree (arg1, type, code,
11276 &minus_var1, &con1, &minus_con1,
11277 &lit1, &minus_lit1, code == MINUS_EXPR);
11279 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11280 if (code == MINUS_EXPR)
11281 code = PLUS_EXPR;
11283 /* With undefined overflow prefer doing association in a type
11284 which wraps on overflow, if that is one of the operand types. */
11285 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
11286 && !TYPE_OVERFLOW_WRAPS (type))
11288 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11289 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11290 atype = TREE_TYPE (arg0);
11291 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11292 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
11293 atype = TREE_TYPE (arg1);
11294 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
11297 /* With undefined overflow we can only associate constants with one
11298 variable, and constants whose association doesn't overflow. */
11299 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
11300 && !TYPE_OVERFLOW_WRAPS (atype))
11302 if ((var0 && var1) || (minus_var0 && minus_var1))
11304 /* ??? If split_tree would handle NEGATE_EXPR we could
11305 simply reject these cases and the allowed cases would
11306 be the var0/minus_var1 ones. */
11307 tree tmp0 = var0 ? var0 : minus_var0;
11308 tree tmp1 = var1 ? var1 : minus_var1;
11309 bool one_neg = false;
11311 if (TREE_CODE (tmp0) == NEGATE_EXPR)
11313 tmp0 = TREE_OPERAND (tmp0, 0);
11314 one_neg = !one_neg;
11316 if (CONVERT_EXPR_P (tmp0)
11317 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11318 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11319 <= TYPE_PRECISION (atype)))
11320 tmp0 = TREE_OPERAND (tmp0, 0);
11321 if (TREE_CODE (tmp1) == NEGATE_EXPR)
11323 tmp1 = TREE_OPERAND (tmp1, 0);
11324 one_neg = !one_neg;
11326 if (CONVERT_EXPR_P (tmp1)
11327 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11328 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11329 <= TYPE_PRECISION (atype)))
11330 tmp1 = TREE_OPERAND (tmp1, 0);
11331 /* The only case we can still associate with two variables
11332 is if they cancel out. */
11333 if (!one_neg
11334 || !operand_equal_p (tmp0, tmp1, 0))
11335 ok = false;
11337 else if ((var0 && minus_var1
11338 && ! operand_equal_p (var0, minus_var1, 0))
11339 || (minus_var0 && var1
11340 && ! operand_equal_p (minus_var0, var1, 0)))
11341 ok = false;
11344 /* Only do something if we found more than two objects. Otherwise,
11345 nothing has changed and we risk infinite recursion. */
11346 if (ok
11347 && ((var0 != 0) + (var1 != 0)
11348 + (minus_var0 != 0) + (minus_var1 != 0)
11349 + (con0 != 0) + (con1 != 0)
11350 + (minus_con0 != 0) + (minus_con1 != 0)
11351 + (lit0 != 0) + (lit1 != 0)
11352 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
11354 var0 = associate_trees (loc, var0, var1, code, atype);
11355 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
11356 code, atype);
11357 con0 = associate_trees (loc, con0, con1, code, atype);
11358 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
11359 code, atype);
11360 lit0 = associate_trees (loc, lit0, lit1, code, atype);
11361 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
11362 code, atype);
11364 if (minus_var0 && var0)
11366 var0 = associate_trees (loc, var0, minus_var0,
11367 MINUS_EXPR, atype);
11368 minus_var0 = 0;
11370 if (minus_con0 && con0)
11372 con0 = associate_trees (loc, con0, minus_con0,
11373 MINUS_EXPR, atype);
11374 minus_con0 = 0;
11377 /* Preserve the MINUS_EXPR if the negative part of the literal is
11378 greater than the positive part. Otherwise, the multiplicative
11379 folding code (i.e extract_muldiv) may be fooled in case
11380 unsigned constants are subtracted, like in the following
11381 example: ((X*2 + 4) - 8U)/2. */
11382 if (minus_lit0 && lit0)
11384 if (TREE_CODE (lit0) == INTEGER_CST
11385 && TREE_CODE (minus_lit0) == INTEGER_CST
11386 && tree_int_cst_lt (lit0, minus_lit0)
11387 /* But avoid ending up with only negated parts. */
11388 && (var0 || con0))
11390 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
11391 MINUS_EXPR, atype);
11392 lit0 = 0;
11394 else
11396 lit0 = associate_trees (loc, lit0, minus_lit0,
11397 MINUS_EXPR, atype);
11398 minus_lit0 = 0;
11402 /* Don't introduce overflows through reassociation. */
11403 if ((lit0 && TREE_OVERFLOW_P (lit0))
11404 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
11405 return NULL_TREE;
11407 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11408 con0 = associate_trees (loc, con0, lit0, code, atype);
11409 lit0 = 0;
11410 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
11411 code, atype);
11412 minus_lit0 = 0;
11414 /* Eliminate minus_con0. */
11415 if (minus_con0)
11417 if (con0)
11418 con0 = associate_trees (loc, con0, minus_con0,
11419 MINUS_EXPR, atype);
11420 else if (var0)
11421 var0 = associate_trees (loc, var0, minus_con0,
11422 MINUS_EXPR, atype);
11423 else
11424 gcc_unreachable ();
11425 minus_con0 = 0;
11428 /* Eliminate minus_var0. */
11429 if (minus_var0)
11431 if (con0)
11432 con0 = associate_trees (loc, con0, minus_var0,
11433 MINUS_EXPR, atype);
11434 else
11435 gcc_unreachable ();
11436 minus_var0 = 0;
11439 return
11440 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
11441 code, atype));
11445 return NULL_TREE;
11447 case POINTER_DIFF_EXPR:
11448 case MINUS_EXPR:
11449 /* Fold &a[i] - &a[j] to i-j. */
11450 if (TREE_CODE (arg0) == ADDR_EXPR
11451 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11452 && TREE_CODE (arg1) == ADDR_EXPR
11453 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11455 tree tem = fold_addr_of_array_ref_difference (loc, type,
11456 TREE_OPERAND (arg0, 0),
11457 TREE_OPERAND (arg1, 0),
11458 code
11459 == POINTER_DIFF_EXPR);
11460 if (tem)
11461 return tem;
11464 /* Further transformations are not for pointers. */
11465 if (code == POINTER_DIFF_EXPR)
11466 return NULL_TREE;
11468 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11469 if (TREE_CODE (arg0) == NEGATE_EXPR
11470 && negate_expr_p (op1)
11471 /* If arg0 is e.g. unsigned int and type is int, then this could
11472 introduce UB, because if A is INT_MIN at runtime, the original
11473 expression can be well defined while the latter is not.
11474 See PR83269. */
11475 && !(ANY_INTEGRAL_TYPE_P (type)
11476 && TYPE_OVERFLOW_UNDEFINED (type)
11477 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11478 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
11479 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
11480 fold_convert_loc (loc, type,
11481 TREE_OPERAND (arg0, 0)));
11483 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11484 __complex__ ( x, -y ). This is not the same for SNaNs or if
11485 signed zeros are involved. */
11486 if (!HONOR_SNANS (arg0)
11487 && !HONOR_SIGNED_ZEROS (arg0)
11488 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11490 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11491 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11492 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11493 bool arg0rz = false, arg0iz = false;
11494 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11495 || (arg0i && (arg0iz = real_zerop (arg0i))))
11497 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11498 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11499 if (arg0rz && arg1i && real_zerop (arg1i))
11501 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11502 arg1r ? arg1r
11503 : build1 (REALPART_EXPR, rtype, arg1));
11504 tree ip = arg0i ? arg0i
11505 : build1 (IMAGPART_EXPR, rtype, arg0);
11506 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11508 else if (arg0iz && arg1r && real_zerop (arg1r))
11510 tree rp = arg0r ? arg0r
11511 : build1 (REALPART_EXPR, rtype, arg0);
11512 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11513 arg1i ? arg1i
11514 : build1 (IMAGPART_EXPR, rtype, arg1));
11515 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11520 /* A - B -> A + (-B) if B is easily negatable. */
11521 if (negate_expr_p (op1)
11522 && ! TYPE_OVERFLOW_SANITIZED (type)
11523 && ((FLOAT_TYPE_P (type)
11524 /* Avoid this transformation if B is a positive REAL_CST. */
11525 && (TREE_CODE (op1) != REAL_CST
11526 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
11527 || INTEGRAL_TYPE_P (type)))
11528 return fold_build2_loc (loc, PLUS_EXPR, type,
11529 fold_convert_loc (loc, type, arg0),
11530 negate_expr (op1));
11532 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11533 one. Make sure the type is not saturating and has the signedness of
11534 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11535 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11536 if ((TREE_CODE (arg0) == MULT_EXPR
11537 || TREE_CODE (arg1) == MULT_EXPR)
11538 && !TYPE_SATURATING (type)
11539 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11540 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11541 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11543 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11544 if (tem)
11545 return tem;
11548 goto associate;
11550 case MULT_EXPR:
11551 if (! FLOAT_TYPE_P (type))
11553 /* Transform x * -C into -x * C if x is easily negatable. */
11554 if (TREE_CODE (op1) == INTEGER_CST
11555 && tree_int_cst_sgn (op1) == -1
11556 && negate_expr_p (op0)
11557 && negate_expr_p (op1)
11558 && (tem = negate_expr (op1)) != op1
11559 && ! TREE_OVERFLOW (tem))
11560 return fold_build2_loc (loc, MULT_EXPR, type,
11561 fold_convert_loc (loc, type,
11562 negate_expr (op0)), tem);
11564 strict_overflow_p = false;
11565 if (TREE_CODE (arg1) == INTEGER_CST
11566 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11567 &strict_overflow_p)) != 0)
11569 if (strict_overflow_p)
11570 fold_overflow_warning (("assuming signed overflow does not "
11571 "occur when simplifying "
11572 "multiplication"),
11573 WARN_STRICT_OVERFLOW_MISC);
11574 return fold_convert_loc (loc, type, tem);
11577 /* Optimize z * conj(z) for integer complex numbers. */
11578 if (TREE_CODE (arg0) == CONJ_EXPR
11579 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11580 return fold_mult_zconjz (loc, type, arg1);
11581 if (TREE_CODE (arg1) == CONJ_EXPR
11582 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11583 return fold_mult_zconjz (loc, type, arg0);
11585 else
11587 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11588 This is not the same for NaNs or if signed zeros are
11589 involved. */
11590 if (!HONOR_NANS (arg0)
11591 && !HONOR_SIGNED_ZEROS (arg0)
11592 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11593 && TREE_CODE (arg1) == COMPLEX_CST
11594 && real_zerop (TREE_REALPART (arg1)))
11596 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11597 if (real_onep (TREE_IMAGPART (arg1)))
11598 return
11599 fold_build2_loc (loc, COMPLEX_EXPR, type,
11600 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11601 rtype, arg0)),
11602 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11603 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11604 return
11605 fold_build2_loc (loc, COMPLEX_EXPR, type,
11606 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11607 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11608 rtype, arg0)));
11611 /* Optimize z * conj(z) for floating point complex numbers.
11612 Guarded by flag_unsafe_math_optimizations as non-finite
11613 imaginary components don't produce scalar results. */
11614 if (flag_unsafe_math_optimizations
11615 && TREE_CODE (arg0) == CONJ_EXPR
11616 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11617 return fold_mult_zconjz (loc, type, arg1);
11618 if (flag_unsafe_math_optimizations
11619 && TREE_CODE (arg1) == CONJ_EXPR
11620 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11621 return fold_mult_zconjz (loc, type, arg0);
11623 goto associate;
11625 case BIT_IOR_EXPR:
11626 /* Canonicalize (X & C1) | C2. */
11627 if (TREE_CODE (arg0) == BIT_AND_EXPR
11628 && TREE_CODE (arg1) == INTEGER_CST
11629 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11631 int width = TYPE_PRECISION (type), w;
11632 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
11633 wide_int c2 = wi::to_wide (arg1);
11635 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11636 if ((c1 & c2) == c1)
11637 return omit_one_operand_loc (loc, type, arg1,
11638 TREE_OPERAND (arg0, 0));
11640 wide_int msk = wi::mask (width, false,
11641 TYPE_PRECISION (TREE_TYPE (arg1)));
11643 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11644 if (wi::bit_and_not (msk, c1 | c2) == 0)
11646 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11647 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11650 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11651 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11652 mode which allows further optimizations. */
11653 c1 &= msk;
11654 c2 &= msk;
11655 wide_int c3 = wi::bit_and_not (c1, c2);
11656 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11658 wide_int mask = wi::mask (w, false,
11659 TYPE_PRECISION (type));
11660 if (((c1 | c2) & mask) == mask
11661 && wi::bit_and_not (c1, mask) == 0)
11663 c3 = mask;
11664 break;
11668 if (c3 != c1)
11670 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11671 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
11672 wide_int_to_tree (type, c3));
11673 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11677 /* See if this can be simplified into a rotate first. If that
11678 is unsuccessful continue in the association code. */
11679 goto bit_rotate;
11681 case BIT_XOR_EXPR:
11682 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11683 if (TREE_CODE (arg0) == BIT_AND_EXPR
11684 && INTEGRAL_TYPE_P (type)
11685 && integer_onep (TREE_OPERAND (arg0, 1))
11686 && integer_onep (arg1))
11687 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11688 build_zero_cst (TREE_TYPE (arg0)));
11690 /* See if this can be simplified into a rotate first. If that
11691 is unsuccessful continue in the association code. */
11692 goto bit_rotate;
11694 case BIT_AND_EXPR:
11695 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11696 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11697 && INTEGRAL_TYPE_P (type)
11698 && integer_onep (TREE_OPERAND (arg0, 1))
11699 && integer_onep (arg1))
11701 tree tem2;
11702 tem = TREE_OPERAND (arg0, 0);
11703 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11704 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11705 tem, tem2);
11706 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11707 build_zero_cst (TREE_TYPE (tem)));
11709 /* Fold ~X & 1 as (X & 1) == 0. */
11710 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11711 && INTEGRAL_TYPE_P (type)
11712 && integer_onep (arg1))
11714 tree tem2;
11715 tem = TREE_OPERAND (arg0, 0);
11716 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11717 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11718 tem, tem2);
11719 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11720 build_zero_cst (TREE_TYPE (tem)));
11722 /* Fold !X & 1 as X == 0. */
11723 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11724 && integer_onep (arg1))
11726 tem = TREE_OPERAND (arg0, 0);
11727 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11728 build_zero_cst (TREE_TYPE (tem)));
11731 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11732 multiple of 1 << CST. */
11733 if (TREE_CODE (arg1) == INTEGER_CST)
11735 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11736 wide_int ncst1 = -cst1;
11737 if ((cst1 & ncst1) == ncst1
11738 && multiple_of_p (type, arg0,
11739 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11740 return fold_convert_loc (loc, type, arg0);
11743 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11744 bits from CST2. */
11745 if (TREE_CODE (arg1) == INTEGER_CST
11746 && TREE_CODE (arg0) == MULT_EXPR
11747 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11749 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11750 wide_int masked
11751 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11753 if (masked == 0)
11754 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11755 arg0, arg1);
11756 else if (masked != warg1)
11758 /* Avoid the transform if arg1 is a mask of some
11759 mode which allows further optimizations. */
11760 int pop = wi::popcount (warg1);
11761 if (!(pop >= BITS_PER_UNIT
11762 && pow2p_hwi (pop)
11763 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11764 return fold_build2_loc (loc, code, type, op0,
11765 wide_int_to_tree (type, masked));
11769 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11770 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11771 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11773 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11775 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11776 if (mask == -1)
11777 return
11778 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11781 goto associate;
11783 case RDIV_EXPR:
11784 /* Don't touch a floating-point divide by zero unless the mode
11785 of the constant can represent infinity. */
11786 if (TREE_CODE (arg1) == REAL_CST
11787 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11788 && real_zerop (arg1))
11789 return NULL_TREE;
11791 /* (-A) / (-B) -> A / B */
11792 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11793 return fold_build2_loc (loc, RDIV_EXPR, type,
11794 TREE_OPERAND (arg0, 0),
11795 negate_expr (arg1));
11796 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11797 return fold_build2_loc (loc, RDIV_EXPR, type,
11798 negate_expr (arg0),
11799 TREE_OPERAND (arg1, 0));
11800 return NULL_TREE;
11802 case TRUNC_DIV_EXPR:
11803 /* Fall through */
11805 case FLOOR_DIV_EXPR:
11806 /* Simplify A / (B << N) where A and B are positive and B is
11807 a power of 2, to A >> (N + log2(B)). */
11808 strict_overflow_p = false;
11809 if (TREE_CODE (arg1) == LSHIFT_EXPR
11810 && (TYPE_UNSIGNED (type)
11811 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11813 tree sval = TREE_OPERAND (arg1, 0);
11814 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11816 tree sh_cnt = TREE_OPERAND (arg1, 1);
11817 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11818 wi::exact_log2 (wi::to_wide (sval)));
11820 if (strict_overflow_p)
11821 fold_overflow_warning (("assuming signed overflow does not "
11822 "occur when simplifying A / (B << N)"),
11823 WARN_STRICT_OVERFLOW_MISC);
11825 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11826 sh_cnt, pow2);
11827 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11828 fold_convert_loc (loc, type, arg0), sh_cnt);
11832 /* Fall through */
11834 case ROUND_DIV_EXPR:
11835 case CEIL_DIV_EXPR:
11836 case EXACT_DIV_EXPR:
11837 if (integer_zerop (arg1))
11838 return NULL_TREE;
11840 /* Convert -A / -B to A / B when the type is signed and overflow is
11841 undefined. */
11842 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11843 && TREE_CODE (op0) == NEGATE_EXPR
11844 && negate_expr_p (op1))
11846 if (ANY_INTEGRAL_TYPE_P (type))
11847 fold_overflow_warning (("assuming signed overflow does not occur "
11848 "when distributing negation across "
11849 "division"),
11850 WARN_STRICT_OVERFLOW_MISC);
11851 return fold_build2_loc (loc, code, type,
11852 fold_convert_loc (loc, type,
11853 TREE_OPERAND (arg0, 0)),
11854 negate_expr (op1));
11856 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11857 && TREE_CODE (arg1) == NEGATE_EXPR
11858 && negate_expr_p (op0))
11860 if (ANY_INTEGRAL_TYPE_P (type))
11861 fold_overflow_warning (("assuming signed overflow does not occur "
11862 "when distributing negation across "
11863 "division"),
11864 WARN_STRICT_OVERFLOW_MISC);
11865 return fold_build2_loc (loc, code, type,
11866 negate_expr (op0),
11867 fold_convert_loc (loc, type,
11868 TREE_OPERAND (arg1, 0)));
11871 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11872 operation, EXACT_DIV_EXPR.
11874 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11875 At one time others generated faster code, it's not clear if they do
11876 after the last round to changes to the DIV code in expmed.cc. */
11877 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11878 && multiple_of_p (type, arg0, arg1))
11879 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
11880 fold_convert (type, arg0),
11881 fold_convert (type, arg1));
11883 strict_overflow_p = false;
11884 if (TREE_CODE (arg1) == INTEGER_CST
11885 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11886 &strict_overflow_p)) != 0)
11888 if (strict_overflow_p)
11889 fold_overflow_warning (("assuming signed overflow does not occur "
11890 "when simplifying division"),
11891 WARN_STRICT_OVERFLOW_MISC);
11892 return fold_convert_loc (loc, type, tem);
11895 return NULL_TREE;
11897 case CEIL_MOD_EXPR:
11898 case FLOOR_MOD_EXPR:
11899 case ROUND_MOD_EXPR:
11900 case TRUNC_MOD_EXPR:
11901 strict_overflow_p = false;
11902 if (TREE_CODE (arg1) == INTEGER_CST
11903 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11904 &strict_overflow_p)) != 0)
11906 if (strict_overflow_p)
11907 fold_overflow_warning (("assuming signed overflow does not occur "
11908 "when simplifying modulus"),
11909 WARN_STRICT_OVERFLOW_MISC);
11910 return fold_convert_loc (loc, type, tem);
11913 return NULL_TREE;
11915 case LROTATE_EXPR:
11916 case RROTATE_EXPR:
11917 case RSHIFT_EXPR:
11918 case LSHIFT_EXPR:
11919 /* Since negative shift count is not well-defined,
11920 don't try to compute it in the compiler. */
11921 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11922 return NULL_TREE;
11924 prec = element_precision (type);
11926 /* If we have a rotate of a bit operation with the rotate count and
11927 the second operand of the bit operation both constant,
11928 permute the two operations. */
11929 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11930 && (TREE_CODE (arg0) == BIT_AND_EXPR
11931 || TREE_CODE (arg0) == BIT_IOR_EXPR
11932 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11933 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11935 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11936 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11937 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11938 fold_build2_loc (loc, code, type,
11939 arg00, arg1),
11940 fold_build2_loc (loc, code, type,
11941 arg01, arg1));
11944 /* Two consecutive rotates adding up to the some integer
11945 multiple of the precision of the type can be ignored. */
11946 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11947 && TREE_CODE (arg0) == RROTATE_EXPR
11948 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11949 && wi::umod_trunc (wi::to_wide (arg1)
11950 + wi::to_wide (TREE_OPERAND (arg0, 1)),
11951 prec) == 0)
11952 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11954 return NULL_TREE;
11956 case MIN_EXPR:
11957 case MAX_EXPR:
11958 goto associate;
11960 case TRUTH_ANDIF_EXPR:
11961 /* Note that the operands of this must be ints
11962 and their values must be 0 or 1.
11963 ("true" is a fixed value perhaps depending on the language.) */
11964 /* If first arg is constant zero, return it. */
11965 if (integer_zerop (arg0))
11966 return fold_convert_loc (loc, type, arg0);
11967 /* FALLTHRU */
11968 case TRUTH_AND_EXPR:
11969 /* If either arg is constant true, drop it. */
11970 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11971 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11972 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11973 /* Preserve sequence points. */
11974 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11975 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11976 /* If second arg is constant zero, result is zero, but first arg
11977 must be evaluated. */
11978 if (integer_zerop (arg1))
11979 return omit_one_operand_loc (loc, type, arg1, arg0);
11980 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11981 case will be handled here. */
11982 if (integer_zerop (arg0))
11983 return omit_one_operand_loc (loc, type, arg0, arg1);
11985 /* !X && X is always false. */
11986 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11987 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11988 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11989 /* X && !X is always false. */
11990 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11991 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11992 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11994 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11995 means A >= Y && A != MAX, but in this case we know that
11996 A < X <= MAX. */
11998 if (!TREE_SIDE_EFFECTS (arg0)
11999 && !TREE_SIDE_EFFECTS (arg1))
12001 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12002 if (tem && !operand_equal_p (tem, arg0, 0))
12003 return fold_build2_loc (loc, code, type, tem, arg1);
12005 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12006 if (tem && !operand_equal_p (tem, arg1, 0))
12007 return fold_build2_loc (loc, code, type, arg0, tem);
12010 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12011 != NULL_TREE)
12012 return tem;
12014 return NULL_TREE;
12016 case TRUTH_ORIF_EXPR:
12017 /* Note that the operands of this must be ints
12018 and their values must be 0 or true.
12019 ("true" is a fixed value perhaps depending on the language.) */
12020 /* If first arg is constant true, return it. */
12021 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12022 return fold_convert_loc (loc, type, arg0);
12023 /* FALLTHRU */
12024 case TRUTH_OR_EXPR:
12025 /* If either arg is constant zero, drop it. */
12026 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12027 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12028 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12029 /* Preserve sequence points. */
12030 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12031 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12032 /* If second arg is constant true, result is true, but we must
12033 evaluate first arg. */
12034 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12035 return omit_one_operand_loc (loc, type, arg1, arg0);
12036 /* Likewise for first arg, but note this only occurs here for
12037 TRUTH_OR_EXPR. */
12038 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12039 return omit_one_operand_loc (loc, type, arg0, arg1);
12041 /* !X || X is always true. */
12042 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12043 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12044 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12045 /* X || !X is always true. */
12046 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12047 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12048 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12050 /* (X && !Y) || (!X && Y) is X ^ Y */
12051 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12052 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12054 tree a0, a1, l0, l1, n0, n1;
12056 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12057 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12059 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12060 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12062 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12063 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12065 if ((operand_equal_p (n0, a0, 0)
12066 && operand_equal_p (n1, a1, 0))
12067 || (operand_equal_p (n0, a1, 0)
12068 && operand_equal_p (n1, a0, 0)))
12069 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12072 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12073 != NULL_TREE)
12074 return tem;
12076 return NULL_TREE;
12078 case TRUTH_XOR_EXPR:
12079 /* If the second arg is constant zero, drop it. */
12080 if (integer_zerop (arg1))
12081 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12082 /* If the second arg is constant true, this is a logical inversion. */
12083 if (integer_onep (arg1))
12085 tem = invert_truthvalue_loc (loc, arg0);
12086 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12088 /* Identical arguments cancel to zero. */
12089 if (operand_equal_p (arg0, arg1, 0))
12090 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12092 /* !X ^ X is always true. */
12093 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12094 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12095 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12097 /* X ^ !X is always true. */
12098 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12099 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12100 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12102 return NULL_TREE;
12104 case EQ_EXPR:
12105 case NE_EXPR:
12106 STRIP_NOPS (arg0);
12107 STRIP_NOPS (arg1);
12109 tem = fold_comparison (loc, code, type, op0, op1);
12110 if (tem != NULL_TREE)
12111 return tem;
12113 /* bool_var != 1 becomes !bool_var. */
12114 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12115 && code == NE_EXPR)
12116 return fold_convert_loc (loc, type,
12117 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12118 TREE_TYPE (arg0), arg0));
12120 /* bool_var == 0 becomes !bool_var. */
12121 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12122 && code == EQ_EXPR)
12123 return fold_convert_loc (loc, type,
12124 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12125 TREE_TYPE (arg0), arg0));
12127 /* !exp != 0 becomes !exp */
12128 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12129 && code == NE_EXPR)
12130 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12132 /* If this is an EQ or NE comparison with zero and ARG0 is
12133 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12134 two operations, but the latter can be done in one less insn
12135 on machines that have only two-operand insns or on which a
12136 constant cannot be the first operand. */
12137 if (TREE_CODE (arg0) == BIT_AND_EXPR
12138 && integer_zerop (arg1))
12140 tree arg00 = TREE_OPERAND (arg0, 0);
12141 tree arg01 = TREE_OPERAND (arg0, 1);
12142 if (TREE_CODE (arg00) == LSHIFT_EXPR
12143 && integer_onep (TREE_OPERAND (arg00, 0)))
12145 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12146 arg01, TREE_OPERAND (arg00, 1));
12147 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12148 build_one_cst (TREE_TYPE (arg0)));
12149 return fold_build2_loc (loc, code, type,
12150 fold_convert_loc (loc, TREE_TYPE (arg1),
12151 tem), arg1);
12153 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12154 && integer_onep (TREE_OPERAND (arg01, 0)))
12156 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12157 arg00, TREE_OPERAND (arg01, 1));
12158 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12159 build_one_cst (TREE_TYPE (arg0)));
12160 return fold_build2_loc (loc, code, type,
12161 fold_convert_loc (loc, TREE_TYPE (arg1),
12162 tem), arg1);
12166 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12167 C1 is a valid shift constant, and C2 is a power of two, i.e.
12168 a single bit. */
12169 if (TREE_CODE (arg0) == BIT_AND_EXPR
12170 && integer_pow2p (TREE_OPERAND (arg0, 1))
12171 && integer_zerop (arg1))
12173 tree arg00 = TREE_OPERAND (arg0, 0);
12174 STRIP_NOPS (arg00);
12175 if (TREE_CODE (arg00) == RSHIFT_EXPR
12176 && TREE_CODE (TREE_OPERAND (arg00, 1)) == INTEGER_CST)
12178 tree itype = TREE_TYPE (arg00);
12179 tree arg001 = TREE_OPERAND (arg00, 1);
12180 prec = TYPE_PRECISION (itype);
12182 /* Check for a valid shift count. */
12183 if (wi::ltu_p (wi::to_wide (arg001), prec))
12185 tree arg01 = TREE_OPERAND (arg0, 1);
12186 tree arg000 = TREE_OPERAND (arg00, 0);
12187 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12188 /* If (C2 << C1) doesn't overflow, then
12189 ((X >> C1) & C2) != 0 can be rewritten as
12190 (X & (C2 << C1)) != 0. */
12191 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12193 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype,
12194 arg01, arg001);
12195 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype,
12196 arg000, tem);
12197 return fold_build2_loc (loc, code, type, tem,
12198 fold_convert_loc (loc, itype, arg1));
12200 /* Otherwise, for signed (arithmetic) shifts,
12201 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12202 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12203 else if (!TYPE_UNSIGNED (itype))
12204 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR
12205 : LT_EXPR,
12206 type, arg000,
12207 build_int_cst (itype, 0));
12208 /* Otherwise, of unsigned (logical) shifts,
12209 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12210 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12211 else
12212 return omit_one_operand_loc (loc, type,
12213 code == EQ_EXPR ? integer_one_node
12214 : integer_zero_node,
12215 arg000);
12220 /* If this is a comparison of a field, we may be able to simplify it. */
12221 if ((TREE_CODE (arg0) == COMPONENT_REF
12222 || TREE_CODE (arg0) == BIT_FIELD_REF)
12223 /* Handle the constant case even without -O
12224 to make sure the warnings are given. */
12225 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12227 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12228 if (t1)
12229 return t1;
12232 /* Optimize comparisons of strlen vs zero to a compare of the
12233 first character of the string vs zero. To wit,
12234 strlen(ptr) == 0 => *ptr == 0
12235 strlen(ptr) != 0 => *ptr != 0
12236 Other cases should reduce to one of these two (or a constant)
12237 due to the return value of strlen being unsigned. */
12238 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
12240 tree fndecl = get_callee_fndecl (arg0);
12242 if (fndecl
12243 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
12244 && call_expr_nargs (arg0) == 1
12245 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
12246 == POINTER_TYPE))
12248 tree ptrtype
12249 = build_pointer_type (build_qualified_type (char_type_node,
12250 TYPE_QUAL_CONST));
12251 tree ptr = fold_convert_loc (loc, ptrtype,
12252 CALL_EXPR_ARG (arg0, 0));
12253 tree iref = build_fold_indirect_ref_loc (loc, ptr);
12254 return fold_build2_loc (loc, code, type, iref,
12255 build_int_cst (TREE_TYPE (iref), 0));
12259 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12260 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12261 if (TREE_CODE (arg0) == RSHIFT_EXPR
12262 && integer_zerop (arg1)
12263 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12265 tree arg00 = TREE_OPERAND (arg0, 0);
12266 tree arg01 = TREE_OPERAND (arg0, 1);
12267 tree itype = TREE_TYPE (arg00);
12268 if (wi::to_wide (arg01) == element_precision (itype) - 1)
12270 if (TYPE_UNSIGNED (itype))
12272 itype = signed_type_for (itype);
12273 arg00 = fold_convert_loc (loc, itype, arg00);
12275 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12276 type, arg00, build_zero_cst (itype));
12280 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12281 (X & C) == 0 when C is a single bit. */
12282 if (TREE_CODE (arg0) == BIT_AND_EXPR
12283 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12284 && integer_zerop (arg1)
12285 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12287 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12288 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12289 TREE_OPERAND (arg0, 1));
12290 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12291 type, tem,
12292 fold_convert_loc (loc, TREE_TYPE (arg0),
12293 arg1));
12296 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12297 constant C is a power of two, i.e. a single bit. */
12298 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12299 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12300 && integer_zerop (arg1)
12301 && integer_pow2p (TREE_OPERAND (arg0, 1))
12302 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12303 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12305 tree arg00 = TREE_OPERAND (arg0, 0);
12306 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12307 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12310 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12311 when is C is a power of two, i.e. a single bit. */
12312 if (TREE_CODE (arg0) == BIT_AND_EXPR
12313 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12314 && integer_zerop (arg1)
12315 && integer_pow2p (TREE_OPERAND (arg0, 1))
12316 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12317 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12319 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12320 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12321 arg000, TREE_OPERAND (arg0, 1));
12322 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12323 tem, build_int_cst (TREE_TYPE (tem), 0));
12326 if (integer_zerop (arg1)
12327 && tree_expr_nonzero_p (arg0))
12329 tree res = constant_boolean_node (code==NE_EXPR, type);
12330 return omit_one_operand_loc (loc, type, res, arg0);
12333 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12334 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12336 tree arg00 = TREE_OPERAND (arg0, 0);
12337 tree arg01 = TREE_OPERAND (arg0, 1);
12338 tree arg10 = TREE_OPERAND (arg1, 0);
12339 tree arg11 = TREE_OPERAND (arg1, 1);
12340 tree itype = TREE_TYPE (arg0);
12342 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12343 operand_equal_p guarantees no side-effects so we don't need
12344 to use omit_one_operand on Z. */
12345 if (operand_equal_p (arg01, arg11, 0))
12346 return fold_build2_loc (loc, code, type, arg00,
12347 fold_convert_loc (loc, TREE_TYPE (arg00),
12348 arg10));
12349 if (operand_equal_p (arg01, arg10, 0))
12350 return fold_build2_loc (loc, code, type, arg00,
12351 fold_convert_loc (loc, TREE_TYPE (arg00),
12352 arg11));
12353 if (operand_equal_p (arg00, arg11, 0))
12354 return fold_build2_loc (loc, code, type, arg01,
12355 fold_convert_loc (loc, TREE_TYPE (arg01),
12356 arg10));
12357 if (operand_equal_p (arg00, arg10, 0))
12358 return fold_build2_loc (loc, code, type, arg01,
12359 fold_convert_loc (loc, TREE_TYPE (arg01),
12360 arg11));
12362 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12363 if (TREE_CODE (arg01) == INTEGER_CST
12364 && TREE_CODE (arg11) == INTEGER_CST)
12366 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12367 fold_convert_loc (loc, itype, arg11));
12368 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12369 return fold_build2_loc (loc, code, type, tem,
12370 fold_convert_loc (loc, itype, arg10));
12374 /* Attempt to simplify equality/inequality comparisons of complex
12375 values. Only lower the comparison if the result is known or
12376 can be simplified to a single scalar comparison. */
12377 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12378 || TREE_CODE (arg0) == COMPLEX_CST)
12379 && (TREE_CODE (arg1) == COMPLEX_EXPR
12380 || TREE_CODE (arg1) == COMPLEX_CST))
12382 tree real0, imag0, real1, imag1;
12383 tree rcond, icond;
12385 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12387 real0 = TREE_OPERAND (arg0, 0);
12388 imag0 = TREE_OPERAND (arg0, 1);
12390 else
12392 real0 = TREE_REALPART (arg0);
12393 imag0 = TREE_IMAGPART (arg0);
12396 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12398 real1 = TREE_OPERAND (arg1, 0);
12399 imag1 = TREE_OPERAND (arg1, 1);
12401 else
12403 real1 = TREE_REALPART (arg1);
12404 imag1 = TREE_IMAGPART (arg1);
12407 rcond = fold_binary_loc (loc, code, type, real0, real1);
12408 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12410 if (integer_zerop (rcond))
12412 if (code == EQ_EXPR)
12413 return omit_two_operands_loc (loc, type, boolean_false_node,
12414 imag0, imag1);
12415 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12417 else
12419 if (code == NE_EXPR)
12420 return omit_two_operands_loc (loc, type, boolean_true_node,
12421 imag0, imag1);
12422 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12426 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12427 if (icond && TREE_CODE (icond) == INTEGER_CST)
12429 if (integer_zerop (icond))
12431 if (code == EQ_EXPR)
12432 return omit_two_operands_loc (loc, type, boolean_false_node,
12433 real0, real1);
12434 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12436 else
12438 if (code == NE_EXPR)
12439 return omit_two_operands_loc (loc, type, boolean_true_node,
12440 real0, real1);
12441 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12446 return NULL_TREE;
12448 case LT_EXPR:
12449 case GT_EXPR:
12450 case LE_EXPR:
12451 case GE_EXPR:
12452 tem = fold_comparison (loc, code, type, op0, op1);
12453 if (tem != NULL_TREE)
12454 return tem;
12456 /* Transform comparisons of the form X +- C CMP X. */
12457 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12458 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12459 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12460 && !HONOR_SNANS (arg0))
12462 tree arg01 = TREE_OPERAND (arg0, 1);
12463 enum tree_code code0 = TREE_CODE (arg0);
12464 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12466 /* (X - c) > X becomes false. */
12467 if (code == GT_EXPR
12468 && ((code0 == MINUS_EXPR && is_positive >= 0)
12469 || (code0 == PLUS_EXPR && is_positive <= 0)))
12470 return constant_boolean_node (0, type);
12472 /* Likewise (X + c) < X becomes false. */
12473 if (code == LT_EXPR
12474 && ((code0 == PLUS_EXPR && is_positive >= 0)
12475 || (code0 == MINUS_EXPR && is_positive <= 0)))
12476 return constant_boolean_node (0, type);
12478 /* Convert (X - c) <= X to true. */
12479 if (!HONOR_NANS (arg1)
12480 && code == LE_EXPR
12481 && ((code0 == MINUS_EXPR && is_positive >= 0)
12482 || (code0 == PLUS_EXPR && is_positive <= 0)))
12483 return constant_boolean_node (1, type);
12485 /* Convert (X + c) >= X to true. */
12486 if (!HONOR_NANS (arg1)
12487 && code == GE_EXPR
12488 && ((code0 == PLUS_EXPR && is_positive >= 0)
12489 || (code0 == MINUS_EXPR && is_positive <= 0)))
12490 return constant_boolean_node (1, type);
12493 /* If we are comparing an ABS_EXPR with a constant, we can
12494 convert all the cases into explicit comparisons, but they may
12495 well not be faster than doing the ABS and one comparison.
12496 But ABS (X) <= C is a range comparison, which becomes a subtraction
12497 and a comparison, and is probably faster. */
12498 if (code == LE_EXPR
12499 && TREE_CODE (arg1) == INTEGER_CST
12500 && TREE_CODE (arg0) == ABS_EXPR
12501 && ! TREE_SIDE_EFFECTS (arg0)
12502 && (tem = negate_expr (arg1)) != 0
12503 && TREE_CODE (tem) == INTEGER_CST
12504 && !TREE_OVERFLOW (tem))
12505 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12506 build2 (GE_EXPR, type,
12507 TREE_OPERAND (arg0, 0), tem),
12508 build2 (LE_EXPR, type,
12509 TREE_OPERAND (arg0, 0), arg1));
12511 /* Convert ABS_EXPR<x> >= 0 to true. */
12512 strict_overflow_p = false;
12513 if (code == GE_EXPR
12514 && (integer_zerop (arg1)
12515 || (! HONOR_NANS (arg0)
12516 && real_zerop (arg1)))
12517 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12519 if (strict_overflow_p)
12520 fold_overflow_warning (("assuming signed overflow does not occur "
12521 "when simplifying comparison of "
12522 "absolute value and zero"),
12523 WARN_STRICT_OVERFLOW_CONDITIONAL);
12524 return omit_one_operand_loc (loc, type,
12525 constant_boolean_node (true, type),
12526 arg0);
12529 /* Convert ABS_EXPR<x> < 0 to false. */
12530 strict_overflow_p = false;
12531 if (code == LT_EXPR
12532 && (integer_zerop (arg1) || real_zerop (arg1))
12533 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12535 if (strict_overflow_p)
12536 fold_overflow_warning (("assuming signed overflow does not occur "
12537 "when simplifying comparison of "
12538 "absolute value and zero"),
12539 WARN_STRICT_OVERFLOW_CONDITIONAL);
12540 return omit_one_operand_loc (loc, type,
12541 constant_boolean_node (false, type),
12542 arg0);
12545 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12546 and similarly for >= into !=. */
12547 if ((code == LT_EXPR || code == GE_EXPR)
12548 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12549 && TREE_CODE (arg1) == LSHIFT_EXPR
12550 && integer_onep (TREE_OPERAND (arg1, 0)))
12551 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12552 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12553 TREE_OPERAND (arg1, 1)),
12554 build_zero_cst (TREE_TYPE (arg0)));
12556 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12557 otherwise Y might be >= # of bits in X's type and thus e.g.
12558 (unsigned char) (1 << Y) for Y 15 might be 0.
12559 If the cast is widening, then 1 << Y should have unsigned type,
12560 otherwise if Y is number of bits in the signed shift type minus 1,
12561 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12562 31 might be 0xffffffff80000000. */
12563 if ((code == LT_EXPR || code == GE_EXPR)
12564 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12565 || VECTOR_INTEGER_TYPE_P (TREE_TYPE (arg0)))
12566 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12567 && CONVERT_EXPR_P (arg1)
12568 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12569 && (element_precision (TREE_TYPE (arg1))
12570 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12571 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12572 || (element_precision (TREE_TYPE (arg1))
12573 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12574 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12576 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12577 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12578 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12579 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12580 build_zero_cst (TREE_TYPE (arg0)));
12583 return NULL_TREE;
12585 case UNORDERED_EXPR:
12586 case ORDERED_EXPR:
12587 case UNLT_EXPR:
12588 case UNLE_EXPR:
12589 case UNGT_EXPR:
12590 case UNGE_EXPR:
12591 case UNEQ_EXPR:
12592 case LTGT_EXPR:
12593 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12595 tree targ0 = strip_float_extensions (arg0);
12596 tree targ1 = strip_float_extensions (arg1);
12597 tree newtype = TREE_TYPE (targ0);
12599 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12600 newtype = TREE_TYPE (targ1);
12602 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12603 return fold_build2_loc (loc, code, type,
12604 fold_convert_loc (loc, newtype, targ0),
12605 fold_convert_loc (loc, newtype, targ1));
12608 return NULL_TREE;
12610 case COMPOUND_EXPR:
12611 /* When pedantic, a compound expression can be neither an lvalue
12612 nor an integer constant expression. */
12613 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12614 return NULL_TREE;
12615 /* Don't let (0, 0) be null pointer constant. */
12616 tem = integer_zerop (arg1) ? build1_loc (loc, NOP_EXPR, type, arg1)
12617 : fold_convert_loc (loc, type, arg1);
12618 return tem;
12620 case ASSERT_EXPR:
12621 /* An ASSERT_EXPR should never be passed to fold_binary. */
12622 gcc_unreachable ();
12624 default:
12625 return NULL_TREE;
12626 } /* switch (code) */
12629 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12630 ((A & N) + B) & M -> (A + B) & M
12631 Similarly if (N & M) == 0,
12632 ((A | N) + B) & M -> (A + B) & M
12633 and for - instead of + (or unary - instead of +)
12634 and/or ^ instead of |.
12635 If B is constant and (B & M) == 0, fold into A & M.
12637 This function is a helper for match.pd patterns. Return non-NULL
12638 type in which the simplified operation should be performed only
12639 if any optimization is possible.
12641 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12642 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12643 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12644 +/-. */
12645 tree
12646 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12647 tree arg00, enum tree_code code00, tree arg000, tree arg001,
12648 tree arg01, enum tree_code code01, tree arg010, tree arg011,
12649 tree *pmop)
12651 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12652 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12653 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12654 if (~cst1 == 0
12655 || (cst1 & (cst1 + 1)) != 0
12656 || !INTEGRAL_TYPE_P (type)
12657 || (!TYPE_OVERFLOW_WRAPS (type)
12658 && TREE_CODE (type) != INTEGER_TYPE)
12659 || (wi::max_value (type) & cst1) != cst1)
12660 return NULL_TREE;
12662 enum tree_code codes[2] = { code00, code01 };
12663 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12664 int which = 0;
12665 wide_int cst0;
12667 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12668 arg1 (M) is == (1LL << cst) - 1.
12669 Store C into PMOP[0] and D into PMOP[1]. */
12670 pmop[0] = arg00;
12671 pmop[1] = arg01;
12672 which = code != NEGATE_EXPR;
12674 for (; which >= 0; which--)
12675 switch (codes[which])
12677 case BIT_AND_EXPR:
12678 case BIT_IOR_EXPR:
12679 case BIT_XOR_EXPR:
12680 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12681 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12682 if (codes[which] == BIT_AND_EXPR)
12684 if (cst0 != cst1)
12685 break;
12687 else if (cst0 != 0)
12688 break;
12689 /* If C or D is of the form (A & N) where
12690 (N & M) == M, or of the form (A | N) or
12691 (A ^ N) where (N & M) == 0, replace it with A. */
12692 pmop[which] = arg0xx[2 * which];
12693 break;
12694 case ERROR_MARK:
12695 if (TREE_CODE (pmop[which]) != INTEGER_CST)
12696 break;
12697 /* If C or D is a N where (N & M) == 0, it can be
12698 omitted (replaced with 0). */
12699 if ((code == PLUS_EXPR
12700 || (code == MINUS_EXPR && which == 0))
12701 && (cst1 & wi::to_wide (pmop[which])) == 0)
12702 pmop[which] = build_int_cst (type, 0);
12703 /* Similarly, with C - N where (-N & M) == 0. */
12704 if (code == MINUS_EXPR
12705 && which == 1
12706 && (cst1 & -wi::to_wide (pmop[which])) == 0)
12707 pmop[which] = build_int_cst (type, 0);
12708 break;
12709 default:
12710 gcc_unreachable ();
12713 /* Only build anything new if we optimized one or both arguments above. */
12714 if (pmop[0] == arg00 && pmop[1] == arg01)
12715 return NULL_TREE;
12717 if (TYPE_OVERFLOW_WRAPS (type))
12718 return type;
12719 else
12720 return unsigned_type_for (type);
12723 /* Used by contains_label_[p1]. */
12725 struct contains_label_data
12727 hash_set<tree> *pset;
12728 bool inside_switch_p;
12731 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12732 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12733 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12735 static tree
12736 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12738 contains_label_data *d = (contains_label_data *) data;
12739 switch (TREE_CODE (*tp))
12741 case LABEL_EXPR:
12742 return *tp;
12744 case CASE_LABEL_EXPR:
12745 if (!d->inside_switch_p)
12746 return *tp;
12747 return NULL_TREE;
12749 case SWITCH_EXPR:
12750 if (!d->inside_switch_p)
12752 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12753 return *tp;
12754 d->inside_switch_p = true;
12755 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12756 return *tp;
12757 d->inside_switch_p = false;
12758 *walk_subtrees = 0;
12760 return NULL_TREE;
12762 case GOTO_EXPR:
12763 *walk_subtrees = 0;
12764 return NULL_TREE;
12766 default:
12767 return NULL_TREE;
12771 /* Return whether the sub-tree ST contains a label which is accessible from
12772 outside the sub-tree. */
12774 static bool
12775 contains_label_p (tree st)
12777 hash_set<tree> pset;
12778 contains_label_data data = { &pset, false };
12779 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12782 /* Fold a ternary expression of code CODE and type TYPE with operands
12783 OP0, OP1, and OP2. Return the folded expression if folding is
12784 successful. Otherwise, return NULL_TREE. */
12786 tree
12787 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12788 tree op0, tree op1, tree op2)
12790 tree tem;
12791 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12792 enum tree_code_class kind = TREE_CODE_CLASS (code);
12794 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12795 && TREE_CODE_LENGTH (code) == 3);
12797 /* If this is a commutative operation, and OP0 is a constant, move it
12798 to OP1 to reduce the number of tests below. */
12799 if (commutative_ternary_tree_code (code)
12800 && tree_swap_operands_p (op0, op1))
12801 return fold_build3_loc (loc, code, type, op1, op0, op2);
12803 tem = generic_simplify (loc, code, type, op0, op1, op2);
12804 if (tem)
12805 return tem;
12807 /* Strip any conversions that don't change the mode. This is safe
12808 for every expression, except for a comparison expression because
12809 its signedness is derived from its operands. So, in the latter
12810 case, only strip conversions that don't change the signedness.
12812 Note that this is done as an internal manipulation within the
12813 constant folder, in order to find the simplest representation of
12814 the arguments so that their form can be studied. In any cases,
12815 the appropriate type conversions should be put back in the tree
12816 that will get out of the constant folder. */
12817 if (op0)
12819 arg0 = op0;
12820 STRIP_NOPS (arg0);
12823 if (op1)
12825 arg1 = op1;
12826 STRIP_NOPS (arg1);
12829 if (op2)
12831 arg2 = op2;
12832 STRIP_NOPS (arg2);
12835 switch (code)
12837 case COMPONENT_REF:
12838 if (TREE_CODE (arg0) == CONSTRUCTOR
12839 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12841 unsigned HOST_WIDE_INT idx;
12842 tree field, value;
12843 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12844 if (field == arg1)
12845 return value;
12847 return NULL_TREE;
12849 case COND_EXPR:
12850 case VEC_COND_EXPR:
12851 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12852 so all simple results must be passed through pedantic_non_lvalue. */
12853 if (TREE_CODE (arg0) == INTEGER_CST)
12855 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12856 tem = integer_zerop (arg0) ? op2 : op1;
12857 /* Only optimize constant conditions when the selected branch
12858 has the same type as the COND_EXPR. This avoids optimizing
12859 away "c ? x : throw", where the throw has a void type.
12860 Avoid throwing away that operand which contains label. */
12861 if ((!TREE_SIDE_EFFECTS (unused_op)
12862 || !contains_label_p (unused_op))
12863 && (! VOID_TYPE_P (TREE_TYPE (tem))
12864 || VOID_TYPE_P (type)))
12865 return protected_set_expr_location_unshare (tem, loc);
12866 return NULL_TREE;
12868 else if (TREE_CODE (arg0) == VECTOR_CST)
12870 unsigned HOST_WIDE_INT nelts;
12871 if ((TREE_CODE (arg1) == VECTOR_CST
12872 || TREE_CODE (arg1) == CONSTRUCTOR)
12873 && (TREE_CODE (arg2) == VECTOR_CST
12874 || TREE_CODE (arg2) == CONSTRUCTOR)
12875 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12877 vec_perm_builder sel (nelts, nelts, 1);
12878 for (unsigned int i = 0; i < nelts; i++)
12880 tree val = VECTOR_CST_ELT (arg0, i);
12881 if (integer_all_onesp (val))
12882 sel.quick_push (i);
12883 else if (integer_zerop (val))
12884 sel.quick_push (nelts + i);
12885 else /* Currently unreachable. */
12886 return NULL_TREE;
12888 vec_perm_indices indices (sel, 2, nelts);
12889 tree t = fold_vec_perm (type, arg1, arg2, indices);
12890 if (t != NULL_TREE)
12891 return t;
12895 /* If we have A op B ? A : C, we may be able to convert this to a
12896 simpler expression, depending on the operation and the values
12897 of B and C. Signed zeros prevent all of these transformations,
12898 for reasons given above each one.
12900 Also try swapping the arguments and inverting the conditional. */
12901 if (COMPARISON_CLASS_P (arg0)
12902 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
12903 && !HONOR_SIGNED_ZEROS (op1))
12905 tem = fold_cond_expr_with_comparison (loc, type, TREE_CODE (arg0),
12906 TREE_OPERAND (arg0, 0),
12907 TREE_OPERAND (arg0, 1),
12908 op1, op2);
12909 if (tem)
12910 return tem;
12913 if (COMPARISON_CLASS_P (arg0)
12914 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
12915 && !HONOR_SIGNED_ZEROS (op2))
12917 enum tree_code comp_code = TREE_CODE (arg0);
12918 tree arg00 = TREE_OPERAND (arg0, 0);
12919 tree arg01 = TREE_OPERAND (arg0, 1);
12920 comp_code = invert_tree_comparison (comp_code, HONOR_NANS (arg00));
12921 if (comp_code != ERROR_MARK)
12922 tem = fold_cond_expr_with_comparison (loc, type, comp_code,
12923 arg00,
12924 arg01,
12925 op2, op1);
12926 if (tem)
12927 return tem;
12930 /* If the second operand is simpler than the third, swap them
12931 since that produces better jump optimization results. */
12932 if (truth_value_p (TREE_CODE (arg0))
12933 && tree_swap_operands_p (op1, op2))
12935 location_t loc0 = expr_location_or (arg0, loc);
12936 /* See if this can be inverted. If it can't, possibly because
12937 it was a floating-point inequality comparison, don't do
12938 anything. */
12939 tem = fold_invert_truthvalue (loc0, arg0);
12940 if (tem)
12941 return fold_build3_loc (loc, code, type, tem, op2, op1);
12944 /* Convert A ? 1 : 0 to simply A. */
12945 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12946 : (integer_onep (op1)
12947 && !VECTOR_TYPE_P (type)))
12948 && integer_zerop (op2)
12949 /* If we try to convert OP0 to our type, the
12950 call to fold will try to move the conversion inside
12951 a COND, which will recurse. In that case, the COND_EXPR
12952 is probably the best choice, so leave it alone. */
12953 && type == TREE_TYPE (arg0))
12954 return protected_set_expr_location_unshare (arg0, loc);
12956 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12957 over COND_EXPR in cases such as floating point comparisons. */
12958 if (integer_zerop (op1)
12959 && code == COND_EXPR
12960 && integer_onep (op2)
12961 && !VECTOR_TYPE_P (type)
12962 && truth_value_p (TREE_CODE (arg0)))
12963 return fold_convert_loc (loc, type,
12964 invert_truthvalue_loc (loc, arg0));
12966 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12967 if (TREE_CODE (arg0) == LT_EXPR
12968 && integer_zerop (TREE_OPERAND (arg0, 1))
12969 && integer_zerop (op2)
12970 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12972 /* sign_bit_p looks through both zero and sign extensions,
12973 but for this optimization only sign extensions are
12974 usable. */
12975 tree tem2 = TREE_OPERAND (arg0, 0);
12976 while (tem != tem2)
12978 if (TREE_CODE (tem2) != NOP_EXPR
12979 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12981 tem = NULL_TREE;
12982 break;
12984 tem2 = TREE_OPERAND (tem2, 0);
12986 /* sign_bit_p only checks ARG1 bits within A's precision.
12987 If <sign bit of A> has wider type than A, bits outside
12988 of A's precision in <sign bit of A> need to be checked.
12989 If they are all 0, this optimization needs to be done
12990 in unsigned A's type, if they are all 1 in signed A's type,
12991 otherwise this can't be done. */
12992 if (tem
12993 && TYPE_PRECISION (TREE_TYPE (tem))
12994 < TYPE_PRECISION (TREE_TYPE (arg1))
12995 && TYPE_PRECISION (TREE_TYPE (tem))
12996 < TYPE_PRECISION (type))
12998 int inner_width, outer_width;
12999 tree tem_type;
13001 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13002 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13003 if (outer_width > TYPE_PRECISION (type))
13004 outer_width = TYPE_PRECISION (type);
13006 wide_int mask = wi::shifted_mask
13007 (inner_width, outer_width - inner_width, false,
13008 TYPE_PRECISION (TREE_TYPE (arg1)));
13010 wide_int common = mask & wi::to_wide (arg1);
13011 if (common == mask)
13013 tem_type = signed_type_for (TREE_TYPE (tem));
13014 tem = fold_convert_loc (loc, tem_type, tem);
13016 else if (common == 0)
13018 tem_type = unsigned_type_for (TREE_TYPE (tem));
13019 tem = fold_convert_loc (loc, tem_type, tem);
13021 else
13022 tem = NULL;
13025 if (tem)
13026 return
13027 fold_convert_loc (loc, type,
13028 fold_build2_loc (loc, BIT_AND_EXPR,
13029 TREE_TYPE (tem), tem,
13030 fold_convert_loc (loc,
13031 TREE_TYPE (tem),
13032 arg1)));
13035 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13036 already handled above. */
13037 if (TREE_CODE (arg0) == BIT_AND_EXPR
13038 && integer_onep (TREE_OPERAND (arg0, 1))
13039 && integer_zerop (op2)
13040 && integer_pow2p (arg1))
13042 tree tem = TREE_OPERAND (arg0, 0);
13043 STRIP_NOPS (tem);
13044 if (TREE_CODE (tem) == RSHIFT_EXPR
13045 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13046 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
13047 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
13048 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13049 fold_convert_loc (loc, type,
13050 TREE_OPERAND (tem, 0)),
13051 op1);
13054 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13055 is probably obsolete because the first operand should be a
13056 truth value (that's why we have the two cases above), but let's
13057 leave it in until we can confirm this for all front-ends. */
13058 if (integer_zerop (op2)
13059 && TREE_CODE (arg0) == NE_EXPR
13060 && integer_zerop (TREE_OPERAND (arg0, 1))
13061 && integer_pow2p (arg1)
13062 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13063 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13064 arg1, OEP_ONLY_CONST)
13065 /* operand_equal_p compares just value, not precision, so e.g.
13066 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
13067 second operand 32-bit -128, which is not a power of two (or vice
13068 versa. */
13069 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
13070 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
13072 /* Disable the transformations below for vectors, since
13073 fold_binary_op_with_conditional_arg may undo them immediately,
13074 yielding an infinite loop. */
13075 if (code == VEC_COND_EXPR)
13076 return NULL_TREE;
13078 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13079 if (integer_zerop (op2)
13080 && truth_value_p (TREE_CODE (arg0))
13081 && truth_value_p (TREE_CODE (arg1))
13082 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13083 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13084 : TRUTH_ANDIF_EXPR,
13085 type, fold_convert_loc (loc, type, arg0), op1);
13087 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13088 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13089 && truth_value_p (TREE_CODE (arg0))
13090 && truth_value_p (TREE_CODE (arg1))
13091 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13093 location_t loc0 = expr_location_or (arg0, loc);
13094 /* Only perform transformation if ARG0 is easily inverted. */
13095 tem = fold_invert_truthvalue (loc0, arg0);
13096 if (tem)
13097 return fold_build2_loc (loc, code == VEC_COND_EXPR
13098 ? BIT_IOR_EXPR
13099 : TRUTH_ORIF_EXPR,
13100 type, fold_convert_loc (loc, type, tem),
13101 op1);
13104 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13105 if (integer_zerop (arg1)
13106 && truth_value_p (TREE_CODE (arg0))
13107 && truth_value_p (TREE_CODE (op2))
13108 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13110 location_t loc0 = expr_location_or (arg0, loc);
13111 /* Only perform transformation if ARG0 is easily inverted. */
13112 tem = fold_invert_truthvalue (loc0, arg0);
13113 if (tem)
13114 return fold_build2_loc (loc, code == VEC_COND_EXPR
13115 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13116 type, fold_convert_loc (loc, type, tem),
13117 op2);
13120 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13121 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13122 && truth_value_p (TREE_CODE (arg0))
13123 && truth_value_p (TREE_CODE (op2))
13124 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13125 return fold_build2_loc (loc, code == VEC_COND_EXPR
13126 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13127 type, fold_convert_loc (loc, type, arg0), op2);
13129 return NULL_TREE;
13131 case CALL_EXPR:
13132 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13133 of fold_ternary on them. */
13134 gcc_unreachable ();
13136 case BIT_FIELD_REF:
13137 if (TREE_CODE (arg0) == VECTOR_CST
13138 && (type == TREE_TYPE (TREE_TYPE (arg0))
13139 || (VECTOR_TYPE_P (type)
13140 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
13141 && tree_fits_uhwi_p (op1)
13142 && tree_fits_uhwi_p (op2))
13144 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13145 unsigned HOST_WIDE_INT width
13146 = (TREE_CODE (eltype) == BOOLEAN_TYPE
13147 ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
13148 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13149 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13151 if (n != 0
13152 && (idx % width) == 0
13153 && (n % width) == 0
13154 && known_le ((idx + n) / width,
13155 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
13157 idx = idx / width;
13158 n = n / width;
13160 if (TREE_CODE (arg0) == VECTOR_CST)
13162 if (n == 1)
13164 tem = VECTOR_CST_ELT (arg0, idx);
13165 if (VECTOR_TYPE_P (type))
13166 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
13167 return tem;
13170 tree_vector_builder vals (type, n, 1);
13171 for (unsigned i = 0; i < n; ++i)
13172 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
13173 return vals.build ();
13178 /* On constants we can use native encode/interpret to constant
13179 fold (nearly) all BIT_FIELD_REFs. */
13180 if (CONSTANT_CLASS_P (arg0)
13181 && can_native_interpret_type_p (type)
13182 && BITS_PER_UNIT == 8
13183 && tree_fits_uhwi_p (op1)
13184 && tree_fits_uhwi_p (op2))
13186 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13187 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13188 /* Limit us to a reasonable amount of work. To relax the
13189 other limitations we need bit-shifting of the buffer
13190 and rounding up the size. */
13191 if (bitpos % BITS_PER_UNIT == 0
13192 && bitsize % BITS_PER_UNIT == 0
13193 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
13195 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
13196 unsigned HOST_WIDE_INT len
13197 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
13198 bitpos / BITS_PER_UNIT);
13199 if (len > 0
13200 && len * BITS_PER_UNIT >= bitsize)
13202 tree v = native_interpret_expr (type, b,
13203 bitsize / BITS_PER_UNIT);
13204 if (v)
13205 return v;
13210 return NULL_TREE;
13212 case VEC_PERM_EXPR:
13213 /* Perform constant folding of BIT_INSERT_EXPR. */
13214 if (TREE_CODE (arg2) == VECTOR_CST
13215 && TREE_CODE (op0) == VECTOR_CST
13216 && TREE_CODE (op1) == VECTOR_CST)
13218 /* Build a vector of integers from the tree mask. */
13219 vec_perm_builder builder;
13220 if (!tree_to_vec_perm_builder (&builder, arg2))
13221 return NULL_TREE;
13223 /* Create a vec_perm_indices for the integer vector. */
13224 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
13225 bool single_arg = (op0 == op1);
13226 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
13227 return fold_vec_perm (type, op0, op1, sel);
13229 return NULL_TREE;
13231 case BIT_INSERT_EXPR:
13232 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13233 if (TREE_CODE (arg0) == INTEGER_CST
13234 && TREE_CODE (arg1) == INTEGER_CST)
13236 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13237 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
13238 wide_int tem = (wi::to_wide (arg0)
13239 & wi::shifted_mask (bitpos, bitsize, true,
13240 TYPE_PRECISION (type)));
13241 wide_int tem2
13242 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
13243 bitsize), bitpos);
13244 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
13246 else if (TREE_CODE (arg0) == VECTOR_CST
13247 && CONSTANT_CLASS_P (arg1)
13248 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
13249 TREE_TYPE (arg1)))
13251 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13252 unsigned HOST_WIDE_INT elsize
13253 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
13254 if (bitpos % elsize == 0)
13256 unsigned k = bitpos / elsize;
13257 unsigned HOST_WIDE_INT nelts;
13258 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
13259 return arg0;
13260 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
13262 tree_vector_builder elts (type, nelts, 1);
13263 elts.quick_grow (nelts);
13264 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
13265 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
13266 return elts.build ();
13270 return NULL_TREE;
13272 default:
13273 return NULL_TREE;
13274 } /* switch (code) */
13277 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13278 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13279 constructor element index of the value returned. If the element is
13280 not found NULL_TREE is returned and *CTOR_IDX is updated to
13281 the index of the element after the ACCESS_INDEX position (which
13282 may be outside of the CTOR array). */
13284 tree
13285 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
13286 unsigned *ctor_idx)
13288 tree index_type = NULL_TREE;
13289 signop index_sgn = UNSIGNED;
13290 offset_int low_bound = 0;
13292 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
13294 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
13295 if (domain_type && TYPE_MIN_VALUE (domain_type))
13297 /* Static constructors for variably sized objects makes no sense. */
13298 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
13299 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
13300 /* ??? When it is obvious that the range is signed, treat it so. */
13301 if (TYPE_UNSIGNED (index_type)
13302 && TYPE_MAX_VALUE (domain_type)
13303 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
13304 TYPE_MIN_VALUE (domain_type)))
13306 index_sgn = SIGNED;
13307 low_bound
13308 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
13309 SIGNED);
13311 else
13313 index_sgn = TYPE_SIGN (index_type);
13314 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
13319 if (index_type)
13320 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
13321 index_sgn);
13323 offset_int index = low_bound;
13324 if (index_type)
13325 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13327 offset_int max_index = index;
13328 unsigned cnt;
13329 tree cfield, cval;
13330 bool first_p = true;
13332 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
13334 /* Array constructor might explicitly set index, or specify a range,
13335 or leave index NULL meaning that it is next index after previous
13336 one. */
13337 if (cfield)
13339 if (TREE_CODE (cfield) == INTEGER_CST)
13340 max_index = index
13341 = offset_int::from (wi::to_wide (cfield), index_sgn);
13342 else
13344 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
13345 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
13346 index_sgn);
13347 max_index
13348 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
13349 index_sgn);
13350 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
13353 else if (!first_p)
13355 index = max_index + 1;
13356 if (index_type)
13357 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13358 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
13359 max_index = index;
13361 else
13362 first_p = false;
13364 /* Do we have match? */
13365 if (wi::cmp (access_index, index, index_sgn) >= 0)
13367 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
13369 if (ctor_idx)
13370 *ctor_idx = cnt;
13371 return cval;
13374 else if (in_gimple_form)
13375 /* We're past the element we search for. Note during parsing
13376 the elements might not be sorted.
13377 ??? We should use a binary search and a flag on the
13378 CONSTRUCTOR as to whether elements are sorted in declaration
13379 order. */
13380 break;
13382 if (ctor_idx)
13383 *ctor_idx = cnt;
13384 return NULL_TREE;
13387 /* Perform constant folding and related simplification of EXPR.
13388 The related simplifications include x*1 => x, x*0 => 0, etc.,
13389 and application of the associative law.
13390 NOP_EXPR conversions may be removed freely (as long as we
13391 are careful not to change the type of the overall expression).
13392 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13393 but we can constant-fold them if they have constant operands. */
13395 #ifdef ENABLE_FOLD_CHECKING
13396 # define fold(x) fold_1 (x)
13397 static tree fold_1 (tree);
13398 static
13399 #endif
13400 tree
13401 fold (tree expr)
13403 const tree t = expr;
13404 enum tree_code code = TREE_CODE (t);
13405 enum tree_code_class kind = TREE_CODE_CLASS (code);
13406 tree tem;
13407 location_t loc = EXPR_LOCATION (expr);
13409 /* Return right away if a constant. */
13410 if (kind == tcc_constant)
13411 return t;
13413 /* CALL_EXPR-like objects with variable numbers of operands are
13414 treated specially. */
13415 if (kind == tcc_vl_exp)
13417 if (code == CALL_EXPR)
13419 tem = fold_call_expr (loc, expr, false);
13420 return tem ? tem : expr;
13422 return expr;
13425 if (IS_EXPR_CODE_CLASS (kind))
13427 tree type = TREE_TYPE (t);
13428 tree op0, op1, op2;
13430 switch (TREE_CODE_LENGTH (code))
13432 case 1:
13433 op0 = TREE_OPERAND (t, 0);
13434 tem = fold_unary_loc (loc, code, type, op0);
13435 return tem ? tem : expr;
13436 case 2:
13437 op0 = TREE_OPERAND (t, 0);
13438 op1 = TREE_OPERAND (t, 1);
13439 tem = fold_binary_loc (loc, code, type, op0, op1);
13440 return tem ? tem : expr;
13441 case 3:
13442 op0 = TREE_OPERAND (t, 0);
13443 op1 = TREE_OPERAND (t, 1);
13444 op2 = TREE_OPERAND (t, 2);
13445 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13446 return tem ? tem : expr;
13447 default:
13448 break;
13452 switch (code)
13454 case ARRAY_REF:
13456 tree op0 = TREE_OPERAND (t, 0);
13457 tree op1 = TREE_OPERAND (t, 1);
13459 if (TREE_CODE (op1) == INTEGER_CST
13460 && TREE_CODE (op0) == CONSTRUCTOR
13461 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13463 tree val = get_array_ctor_element_at_index (op0,
13464 wi::to_offset (op1));
13465 if (val)
13466 return val;
13469 return t;
13472 /* Return a VECTOR_CST if possible. */
13473 case CONSTRUCTOR:
13475 tree type = TREE_TYPE (t);
13476 if (TREE_CODE (type) != VECTOR_TYPE)
13477 return t;
13479 unsigned i;
13480 tree val;
13481 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
13482 if (! CONSTANT_CLASS_P (val))
13483 return t;
13485 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
13488 case CONST_DECL:
13489 return fold (DECL_INITIAL (t));
13491 default:
13492 return t;
13493 } /* switch (code) */
13496 #ifdef ENABLE_FOLD_CHECKING
13497 #undef fold
13499 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13500 hash_table<nofree_ptr_hash<const tree_node> > *);
13501 static void fold_check_failed (const_tree, const_tree);
13502 void print_fold_checksum (const_tree);
13504 /* When --enable-checking=fold, compute a digest of expr before
13505 and after actual fold call to see if fold did not accidentally
13506 change original expr. */
13508 tree
13509 fold (tree expr)
13511 tree ret;
13512 struct md5_ctx ctx;
13513 unsigned char checksum_before[16], checksum_after[16];
13514 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13516 md5_init_ctx (&ctx);
13517 fold_checksum_tree (expr, &ctx, &ht);
13518 md5_finish_ctx (&ctx, checksum_before);
13519 ht.empty ();
13521 ret = fold_1 (expr);
13523 md5_init_ctx (&ctx);
13524 fold_checksum_tree (expr, &ctx, &ht);
13525 md5_finish_ctx (&ctx, checksum_after);
13527 if (memcmp (checksum_before, checksum_after, 16))
13528 fold_check_failed (expr, ret);
13530 return ret;
13533 void
13534 print_fold_checksum (const_tree expr)
13536 struct md5_ctx ctx;
13537 unsigned char checksum[16], cnt;
13538 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13540 md5_init_ctx (&ctx);
13541 fold_checksum_tree (expr, &ctx, &ht);
13542 md5_finish_ctx (&ctx, checksum);
13543 for (cnt = 0; cnt < 16; ++cnt)
13544 fprintf (stderr, "%02x", checksum[cnt]);
13545 putc ('\n', stderr);
13548 static void
13549 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13551 internal_error ("fold check: original tree changed by fold");
13554 static void
13555 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13556 hash_table<nofree_ptr_hash <const tree_node> > *ht)
13558 const tree_node **slot;
13559 enum tree_code code;
13560 union tree_node *buf;
13561 int i, len;
13563 recursive_label:
13564 if (expr == NULL)
13565 return;
13566 slot = ht->find_slot (expr, INSERT);
13567 if (*slot != NULL)
13568 return;
13569 *slot = expr;
13570 code = TREE_CODE (expr);
13571 if (TREE_CODE_CLASS (code) == tcc_declaration
13572 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13574 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13575 size_t sz = tree_size (expr);
13576 buf = XALLOCAVAR (union tree_node, sz);
13577 memcpy ((char *) buf, expr, sz);
13578 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13579 buf->decl_with_vis.symtab_node = NULL;
13580 buf->base.nowarning_flag = 0;
13581 expr = (tree) buf;
13583 else if (TREE_CODE_CLASS (code) == tcc_type
13584 && (TYPE_POINTER_TO (expr)
13585 || TYPE_REFERENCE_TO (expr)
13586 || TYPE_CACHED_VALUES_P (expr)
13587 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13588 || TYPE_NEXT_VARIANT (expr)
13589 || TYPE_ALIAS_SET_KNOWN_P (expr)))
13591 /* Allow these fields to be modified. */
13592 tree tmp;
13593 size_t sz = tree_size (expr);
13594 buf = XALLOCAVAR (union tree_node, sz);
13595 memcpy ((char *) buf, expr, sz);
13596 expr = tmp = (tree) buf;
13597 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13598 TYPE_POINTER_TO (tmp) = NULL;
13599 TYPE_REFERENCE_TO (tmp) = NULL;
13600 TYPE_NEXT_VARIANT (tmp) = NULL;
13601 TYPE_ALIAS_SET (tmp) = -1;
13602 if (TYPE_CACHED_VALUES_P (tmp))
13604 TYPE_CACHED_VALUES_P (tmp) = 0;
13605 TYPE_CACHED_VALUES (tmp) = NULL;
13608 else if (warning_suppressed_p (expr) && (DECL_P (expr) || EXPR_P (expr)))
13610 /* Allow the no-warning bit to be set. Perhaps we shouldn't allow
13611 that and change builtins.cc etc. instead - see PR89543. */
13612 size_t sz = tree_size (expr);
13613 buf = XALLOCAVAR (union tree_node, sz);
13614 memcpy ((char *) buf, expr, sz);
13615 buf->base.nowarning_flag = 0;
13616 expr = (tree) buf;
13618 md5_process_bytes (expr, tree_size (expr), ctx);
13619 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13620 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13621 if (TREE_CODE_CLASS (code) != tcc_type
13622 && TREE_CODE_CLASS (code) != tcc_declaration
13623 && code != TREE_LIST
13624 && code != SSA_NAME
13625 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13626 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13627 switch (TREE_CODE_CLASS (code))
13629 case tcc_constant:
13630 switch (code)
13632 case STRING_CST:
13633 md5_process_bytes (TREE_STRING_POINTER (expr),
13634 TREE_STRING_LENGTH (expr), ctx);
13635 break;
13636 case COMPLEX_CST:
13637 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13638 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13639 break;
13640 case VECTOR_CST:
13641 len = vector_cst_encoded_nelts (expr);
13642 for (i = 0; i < len; ++i)
13643 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13644 break;
13645 default:
13646 break;
13648 break;
13649 case tcc_exceptional:
13650 switch (code)
13652 case TREE_LIST:
13653 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13654 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13655 expr = TREE_CHAIN (expr);
13656 goto recursive_label;
13657 break;
13658 case TREE_VEC:
13659 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13660 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13661 break;
13662 default:
13663 break;
13665 break;
13666 case tcc_expression:
13667 case tcc_reference:
13668 case tcc_comparison:
13669 case tcc_unary:
13670 case tcc_binary:
13671 case tcc_statement:
13672 case tcc_vl_exp:
13673 len = TREE_OPERAND_LENGTH (expr);
13674 for (i = 0; i < len; ++i)
13675 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13676 break;
13677 case tcc_declaration:
13678 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13679 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13680 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13682 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13683 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13684 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13685 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13686 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13689 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13691 if (TREE_CODE (expr) == FUNCTION_DECL)
13693 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13694 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13696 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13698 break;
13699 case tcc_type:
13700 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13701 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13702 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13703 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13704 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13705 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13706 if (INTEGRAL_TYPE_P (expr)
13707 || SCALAR_FLOAT_TYPE_P (expr))
13709 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13710 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13712 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13713 if (TREE_CODE (expr) == RECORD_TYPE
13714 || TREE_CODE (expr) == UNION_TYPE
13715 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13716 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13717 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13718 break;
13719 default:
13720 break;
13724 /* Helper function for outputting the checksum of a tree T. When
13725 debugging with gdb, you can "define mynext" to be "next" followed
13726 by "call debug_fold_checksum (op0)", then just trace down till the
13727 outputs differ. */
13729 DEBUG_FUNCTION void
13730 debug_fold_checksum (const_tree t)
13732 int i;
13733 unsigned char checksum[16];
13734 struct md5_ctx ctx;
13735 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13737 md5_init_ctx (&ctx);
13738 fold_checksum_tree (t, &ctx, &ht);
13739 md5_finish_ctx (&ctx, checksum);
13740 ht.empty ();
13742 for (i = 0; i < 16; i++)
13743 fprintf (stderr, "%d ", checksum[i]);
13745 fprintf (stderr, "\n");
13748 #endif
13750 /* Fold a unary tree expression with code CODE of type TYPE with an
13751 operand OP0. LOC is the location of the resulting expression.
13752 Return a folded expression if successful. Otherwise, return a tree
13753 expression with code CODE of type TYPE with an operand OP0. */
13755 tree
13756 fold_build1_loc (location_t loc,
13757 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13759 tree tem;
13760 #ifdef ENABLE_FOLD_CHECKING
13761 unsigned char checksum_before[16], checksum_after[16];
13762 struct md5_ctx ctx;
13763 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13765 md5_init_ctx (&ctx);
13766 fold_checksum_tree (op0, &ctx, &ht);
13767 md5_finish_ctx (&ctx, checksum_before);
13768 ht.empty ();
13769 #endif
13771 tem = fold_unary_loc (loc, code, type, op0);
13772 if (!tem)
13773 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13775 #ifdef ENABLE_FOLD_CHECKING
13776 md5_init_ctx (&ctx);
13777 fold_checksum_tree (op0, &ctx, &ht);
13778 md5_finish_ctx (&ctx, checksum_after);
13780 if (memcmp (checksum_before, checksum_after, 16))
13781 fold_check_failed (op0, tem);
13782 #endif
13783 return tem;
13786 /* Fold a binary tree expression with code CODE of type TYPE with
13787 operands OP0 and OP1. LOC is the location of the resulting
13788 expression. Return a folded expression if successful. Otherwise,
13789 return a tree expression with code CODE of type TYPE with operands
13790 OP0 and OP1. */
13792 tree
13793 fold_build2_loc (location_t loc,
13794 enum tree_code code, tree type, tree op0, tree op1
13795 MEM_STAT_DECL)
13797 tree tem;
13798 #ifdef ENABLE_FOLD_CHECKING
13799 unsigned char checksum_before_op0[16],
13800 checksum_before_op1[16],
13801 checksum_after_op0[16],
13802 checksum_after_op1[16];
13803 struct md5_ctx ctx;
13804 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13806 md5_init_ctx (&ctx);
13807 fold_checksum_tree (op0, &ctx, &ht);
13808 md5_finish_ctx (&ctx, checksum_before_op0);
13809 ht.empty ();
13811 md5_init_ctx (&ctx);
13812 fold_checksum_tree (op1, &ctx, &ht);
13813 md5_finish_ctx (&ctx, checksum_before_op1);
13814 ht.empty ();
13815 #endif
13817 tem = fold_binary_loc (loc, code, type, op0, op1);
13818 if (!tem)
13819 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13821 #ifdef ENABLE_FOLD_CHECKING
13822 md5_init_ctx (&ctx);
13823 fold_checksum_tree (op0, &ctx, &ht);
13824 md5_finish_ctx (&ctx, checksum_after_op0);
13825 ht.empty ();
13827 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13828 fold_check_failed (op0, tem);
13830 md5_init_ctx (&ctx);
13831 fold_checksum_tree (op1, &ctx, &ht);
13832 md5_finish_ctx (&ctx, checksum_after_op1);
13834 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13835 fold_check_failed (op1, tem);
13836 #endif
13837 return tem;
13840 /* Fold a ternary tree expression with code CODE of type TYPE with
13841 operands OP0, OP1, and OP2. Return a folded expression if
13842 successful. Otherwise, return a tree expression with code CODE of
13843 type TYPE with operands OP0, OP1, and OP2. */
13845 tree
13846 fold_build3_loc (location_t loc, enum tree_code code, tree type,
13847 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13849 tree tem;
13850 #ifdef ENABLE_FOLD_CHECKING
13851 unsigned char checksum_before_op0[16],
13852 checksum_before_op1[16],
13853 checksum_before_op2[16],
13854 checksum_after_op0[16],
13855 checksum_after_op1[16],
13856 checksum_after_op2[16];
13857 struct md5_ctx ctx;
13858 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13860 md5_init_ctx (&ctx);
13861 fold_checksum_tree (op0, &ctx, &ht);
13862 md5_finish_ctx (&ctx, checksum_before_op0);
13863 ht.empty ();
13865 md5_init_ctx (&ctx);
13866 fold_checksum_tree (op1, &ctx, &ht);
13867 md5_finish_ctx (&ctx, checksum_before_op1);
13868 ht.empty ();
13870 md5_init_ctx (&ctx);
13871 fold_checksum_tree (op2, &ctx, &ht);
13872 md5_finish_ctx (&ctx, checksum_before_op2);
13873 ht.empty ();
13874 #endif
13876 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13877 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13878 if (!tem)
13879 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13881 #ifdef ENABLE_FOLD_CHECKING
13882 md5_init_ctx (&ctx);
13883 fold_checksum_tree (op0, &ctx, &ht);
13884 md5_finish_ctx (&ctx, checksum_after_op0);
13885 ht.empty ();
13887 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13888 fold_check_failed (op0, tem);
13890 md5_init_ctx (&ctx);
13891 fold_checksum_tree (op1, &ctx, &ht);
13892 md5_finish_ctx (&ctx, checksum_after_op1);
13893 ht.empty ();
13895 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13896 fold_check_failed (op1, tem);
13898 md5_init_ctx (&ctx);
13899 fold_checksum_tree (op2, &ctx, &ht);
13900 md5_finish_ctx (&ctx, checksum_after_op2);
13902 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13903 fold_check_failed (op2, tem);
13904 #endif
13905 return tem;
13908 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13909 arguments in ARGARRAY, and a null static chain.
13910 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13911 of type TYPE from the given operands as constructed by build_call_array. */
13913 tree
13914 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13915 int nargs, tree *argarray)
13917 tree tem;
13918 #ifdef ENABLE_FOLD_CHECKING
13919 unsigned char checksum_before_fn[16],
13920 checksum_before_arglist[16],
13921 checksum_after_fn[16],
13922 checksum_after_arglist[16];
13923 struct md5_ctx ctx;
13924 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13925 int i;
13927 md5_init_ctx (&ctx);
13928 fold_checksum_tree (fn, &ctx, &ht);
13929 md5_finish_ctx (&ctx, checksum_before_fn);
13930 ht.empty ();
13932 md5_init_ctx (&ctx);
13933 for (i = 0; i < nargs; i++)
13934 fold_checksum_tree (argarray[i], &ctx, &ht);
13935 md5_finish_ctx (&ctx, checksum_before_arglist);
13936 ht.empty ();
13937 #endif
13939 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13940 if (!tem)
13941 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13943 #ifdef ENABLE_FOLD_CHECKING
13944 md5_init_ctx (&ctx);
13945 fold_checksum_tree (fn, &ctx, &ht);
13946 md5_finish_ctx (&ctx, checksum_after_fn);
13947 ht.empty ();
13949 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13950 fold_check_failed (fn, tem);
13952 md5_init_ctx (&ctx);
13953 for (i = 0; i < nargs; i++)
13954 fold_checksum_tree (argarray[i], &ctx, &ht);
13955 md5_finish_ctx (&ctx, checksum_after_arglist);
13957 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13958 fold_check_failed (NULL_TREE, tem);
13959 #endif
13960 return tem;
13963 /* Perform constant folding and related simplification of initializer
13964 expression EXPR. These behave identically to "fold_buildN" but ignore
13965 potential run-time traps and exceptions that fold must preserve. */
13967 #define START_FOLD_INIT \
13968 int saved_signaling_nans = flag_signaling_nans;\
13969 int saved_trapping_math = flag_trapping_math;\
13970 int saved_rounding_math = flag_rounding_math;\
13971 int saved_trapv = flag_trapv;\
13972 int saved_folding_initializer = folding_initializer;\
13973 flag_signaling_nans = 0;\
13974 flag_trapping_math = 0;\
13975 flag_rounding_math = 0;\
13976 flag_trapv = 0;\
13977 folding_initializer = 1;
13979 #define END_FOLD_INIT \
13980 flag_signaling_nans = saved_signaling_nans;\
13981 flag_trapping_math = saved_trapping_math;\
13982 flag_rounding_math = saved_rounding_math;\
13983 flag_trapv = saved_trapv;\
13984 folding_initializer = saved_folding_initializer;
13986 tree
13987 fold_init (tree expr)
13989 tree result;
13990 START_FOLD_INIT;
13992 result = fold (expr);
13994 END_FOLD_INIT;
13995 return result;
13998 tree
13999 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14000 tree type, tree op)
14002 tree result;
14003 START_FOLD_INIT;
14005 result = fold_build1_loc (loc, code, type, op);
14007 END_FOLD_INIT;
14008 return result;
14011 tree
14012 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14013 tree type, tree op0, tree op1)
14015 tree result;
14016 START_FOLD_INIT;
14018 result = fold_build2_loc (loc, code, type, op0, op1);
14020 END_FOLD_INIT;
14021 return result;
14024 tree
14025 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14026 int nargs, tree *argarray)
14028 tree result;
14029 START_FOLD_INIT;
14031 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14033 END_FOLD_INIT;
14034 return result;
14037 tree
14038 fold_binary_initializer_loc (location_t loc, tree_code code, tree type,
14039 tree lhs, tree rhs)
14041 tree result;
14042 START_FOLD_INIT;
14044 result = fold_binary_loc (loc, code, type, lhs, rhs);
14046 END_FOLD_INIT;
14047 return result;
14050 #undef START_FOLD_INIT
14051 #undef END_FOLD_INIT
14053 /* Determine if first argument is a multiple of second argument. Return 0 if
14054 it is not, or we cannot easily determined it to be.
14056 An example of the sort of thing we care about (at this point; this routine
14057 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14058 fold cases do now) is discovering that
14060 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14062 is a multiple of
14064 SAVE_EXPR (J * 8)
14066 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14068 This code also handles discovering that
14070 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14072 is a multiple of 8 so we don't have to worry about dealing with a
14073 possible remainder.
14075 Note that we *look* inside a SAVE_EXPR only to determine how it was
14076 calculated; it is not safe for fold to do much of anything else with the
14077 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14078 at run time. For example, the latter example above *cannot* be implemented
14079 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14080 evaluation time of the original SAVE_EXPR is not necessarily the same at
14081 the time the new expression is evaluated. The only optimization of this
14082 sort that would be valid is changing
14084 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14086 divided by 8 to
14088 SAVE_EXPR (I) * SAVE_EXPR (J)
14090 (where the same SAVE_EXPR (J) is used in the original and the
14091 transformed version).
14093 NOWRAP specifies whether all outer operations in TYPE should
14094 be considered not wrapping. Any type conversion within TOP acts
14095 as a barrier and we will fall back to NOWRAP being false.
14096 NOWRAP is mostly used to treat expressions in TYPE_SIZE and friends
14097 as not wrapping even though they are generally using unsigned arithmetic. */
14100 multiple_of_p (tree type, const_tree top, const_tree bottom, bool nowrap)
14102 gimple *stmt;
14103 tree op1, op2;
14105 if (operand_equal_p (top, bottom, 0))
14106 return 1;
14108 if (TREE_CODE (type) != INTEGER_TYPE)
14109 return 0;
14111 switch (TREE_CODE (top))
14113 case BIT_AND_EXPR:
14114 /* Bitwise and provides a power of two multiple. If the mask is
14115 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14116 if (!integer_pow2p (bottom))
14117 return 0;
14118 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14119 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14121 case MULT_EXPR:
14122 /* If the multiplication can wrap we cannot recurse further unless
14123 the bottom is a power of two which is where wrapping does not
14124 matter. */
14125 if (!nowrap
14126 && !TYPE_OVERFLOW_UNDEFINED (type)
14127 && !integer_pow2p (bottom))
14128 return 0;
14129 if (TREE_CODE (bottom) == INTEGER_CST)
14131 op1 = TREE_OPERAND (top, 0);
14132 op2 = TREE_OPERAND (top, 1);
14133 if (TREE_CODE (op1) == INTEGER_CST)
14134 std::swap (op1, op2);
14135 if (TREE_CODE (op2) == INTEGER_CST)
14137 if (multiple_of_p (type, op2, bottom, nowrap))
14138 return 1;
14139 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
14140 if (multiple_of_p (type, bottom, op2, nowrap))
14142 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
14143 wi::to_widest (op2));
14144 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
14146 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
14147 return multiple_of_p (type, op1, op2, nowrap);
14150 return multiple_of_p (type, op1, bottom, nowrap);
14153 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14154 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14156 case LSHIFT_EXPR:
14157 /* Handle X << CST as X * (1 << CST) and only process the constant. */
14158 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14160 op1 = TREE_OPERAND (top, 1);
14161 if (wi::to_widest (op1) < TYPE_PRECISION (type))
14163 wide_int mul_op
14164 = wi::one (TYPE_PRECISION (type)) << wi::to_wide (op1);
14165 return multiple_of_p (type,
14166 wide_int_to_tree (type, mul_op), bottom,
14167 nowrap);
14170 return 0;
14172 case MINUS_EXPR:
14173 case PLUS_EXPR:
14174 /* If the addition or subtraction can wrap we cannot recurse further
14175 unless bottom is a power of two which is where wrapping does not
14176 matter. */
14177 if (!nowrap
14178 && !TYPE_OVERFLOW_UNDEFINED (type)
14179 && !integer_pow2p (bottom))
14180 return 0;
14182 /* Handle cases like op0 + 0xfffffffd as op0 - 3 if the expression has
14183 unsigned type. For example, (X / 3) + 0xfffffffd is multiple of 3,
14184 but 0xfffffffd is not. */
14185 op1 = TREE_OPERAND (top, 1);
14186 if (TREE_CODE (top) == PLUS_EXPR
14187 && nowrap
14188 && TYPE_UNSIGNED (type)
14189 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
14190 op1 = fold_build1 (NEGATE_EXPR, type, op1);
14192 /* It is impossible to prove if op0 +- op1 is multiple of bottom
14193 precisely, so be conservative here checking if both op0 and op1
14194 are multiple of bottom. Note we check the second operand first
14195 since it's usually simpler. */
14196 return (multiple_of_p (type, op1, bottom, nowrap)
14197 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14199 CASE_CONVERT:
14200 /* Can't handle conversions from non-integral or wider integral type. */
14201 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14202 || (TYPE_PRECISION (type)
14203 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14204 return 0;
14205 /* NOWRAP only extends to operations in the outermost type so
14206 make sure to strip it off here. */
14207 return multiple_of_p (TREE_TYPE (TREE_OPERAND (top, 0)),
14208 TREE_OPERAND (top, 0), bottom, false);
14210 case SAVE_EXPR:
14211 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap);
14213 case COND_EXPR:
14214 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14215 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom, nowrap));
14217 case INTEGER_CST:
14218 if (TREE_CODE (bottom) != INTEGER_CST || integer_zerop (bottom))
14219 return 0;
14220 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14221 SIGNED);
14223 case SSA_NAME:
14224 if (TREE_CODE (bottom) == INTEGER_CST
14225 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
14226 && gimple_code (stmt) == GIMPLE_ASSIGN)
14228 enum tree_code code = gimple_assign_rhs_code (stmt);
14230 /* Check for special cases to see if top is defined as multiple
14231 of bottom:
14233 top = (X & ~(bottom - 1) ; bottom is power of 2
14237 Y = X % bottom
14238 top = X - Y. */
14239 if (code == BIT_AND_EXPR
14240 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14241 && TREE_CODE (op2) == INTEGER_CST
14242 && integer_pow2p (bottom)
14243 && wi::multiple_of_p (wi::to_widest (op2),
14244 wi::to_widest (bottom), UNSIGNED))
14245 return 1;
14247 op1 = gimple_assign_rhs1 (stmt);
14248 if (code == MINUS_EXPR
14249 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14250 && TREE_CODE (op2) == SSA_NAME
14251 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
14252 && gimple_code (stmt) == GIMPLE_ASSIGN
14253 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
14254 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
14255 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
14256 return 1;
14259 /* fall through */
14261 default:
14262 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
14263 return multiple_p (wi::to_poly_widest (top),
14264 wi::to_poly_widest (bottom));
14266 return 0;
14270 /* Return true if expression X cannot be (or contain) a NaN or infinity.
14271 This function returns true for integer expressions, and returns
14272 false if uncertain. */
14274 bool
14275 tree_expr_finite_p (const_tree x)
14277 machine_mode mode = element_mode (x);
14278 if (!HONOR_NANS (mode) && !HONOR_INFINITIES (mode))
14279 return true;
14280 switch (TREE_CODE (x))
14282 case REAL_CST:
14283 return real_isfinite (TREE_REAL_CST_PTR (x));
14284 case COMPLEX_CST:
14285 return tree_expr_finite_p (TREE_REALPART (x))
14286 && tree_expr_finite_p (TREE_IMAGPART (x));
14287 case FLOAT_EXPR:
14288 return true;
14289 case ABS_EXPR:
14290 case CONVERT_EXPR:
14291 case NON_LVALUE_EXPR:
14292 case NEGATE_EXPR:
14293 case SAVE_EXPR:
14294 return tree_expr_finite_p (TREE_OPERAND (x, 0));
14295 case MIN_EXPR:
14296 case MAX_EXPR:
14297 return tree_expr_finite_p (TREE_OPERAND (x, 0))
14298 && tree_expr_finite_p (TREE_OPERAND (x, 1));
14299 case COND_EXPR:
14300 return tree_expr_finite_p (TREE_OPERAND (x, 1))
14301 && tree_expr_finite_p (TREE_OPERAND (x, 2));
14302 case CALL_EXPR:
14303 switch (get_call_combined_fn (x))
14305 CASE_CFN_FABS:
14306 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0));
14307 CASE_CFN_FMAX:
14308 CASE_CFN_FMIN:
14309 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0))
14310 && tree_expr_finite_p (CALL_EXPR_ARG (x, 1));
14311 default:
14312 return false;
14315 default:
14316 return false;
14320 /* Return true if expression X evaluates to an infinity.
14321 This function returns false for integer expressions. */
14323 bool
14324 tree_expr_infinite_p (const_tree x)
14326 if (!HONOR_INFINITIES (x))
14327 return false;
14328 switch (TREE_CODE (x))
14330 case REAL_CST:
14331 return real_isinf (TREE_REAL_CST_PTR (x));
14332 case ABS_EXPR:
14333 case NEGATE_EXPR:
14334 case NON_LVALUE_EXPR:
14335 case SAVE_EXPR:
14336 return tree_expr_infinite_p (TREE_OPERAND (x, 0));
14337 case COND_EXPR:
14338 return tree_expr_infinite_p (TREE_OPERAND (x, 1))
14339 && tree_expr_infinite_p (TREE_OPERAND (x, 2));
14340 default:
14341 return false;
14345 /* Return true if expression X could evaluate to an infinity.
14346 This function returns false for integer expressions, and returns
14347 true if uncertain. */
14349 bool
14350 tree_expr_maybe_infinite_p (const_tree x)
14352 if (!HONOR_INFINITIES (x))
14353 return false;
14354 switch (TREE_CODE (x))
14356 case REAL_CST:
14357 return real_isinf (TREE_REAL_CST_PTR (x));
14358 case FLOAT_EXPR:
14359 return false;
14360 case ABS_EXPR:
14361 case NEGATE_EXPR:
14362 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 0));
14363 case COND_EXPR:
14364 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 1))
14365 || tree_expr_maybe_infinite_p (TREE_OPERAND (x, 2));
14366 default:
14367 return true;
14371 /* Return true if expression X evaluates to a signaling NaN.
14372 This function returns false for integer expressions. */
14374 bool
14375 tree_expr_signaling_nan_p (const_tree x)
14377 if (!HONOR_SNANS (x))
14378 return false;
14379 switch (TREE_CODE (x))
14381 case REAL_CST:
14382 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14383 case NON_LVALUE_EXPR:
14384 case SAVE_EXPR:
14385 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 0));
14386 case COND_EXPR:
14387 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 1))
14388 && tree_expr_signaling_nan_p (TREE_OPERAND (x, 2));
14389 default:
14390 return false;
14394 /* Return true if expression X could evaluate to a signaling NaN.
14395 This function returns false for integer expressions, and returns
14396 true if uncertain. */
14398 bool
14399 tree_expr_maybe_signaling_nan_p (const_tree x)
14401 if (!HONOR_SNANS (x))
14402 return false;
14403 switch (TREE_CODE (x))
14405 case REAL_CST:
14406 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14407 case FLOAT_EXPR:
14408 return false;
14409 case ABS_EXPR:
14410 case CONVERT_EXPR:
14411 case NEGATE_EXPR:
14412 case NON_LVALUE_EXPR:
14413 case SAVE_EXPR:
14414 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0));
14415 case MIN_EXPR:
14416 case MAX_EXPR:
14417 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0))
14418 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1));
14419 case COND_EXPR:
14420 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1))
14421 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 2));
14422 case CALL_EXPR:
14423 switch (get_call_combined_fn (x))
14425 CASE_CFN_FABS:
14426 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0));
14427 CASE_CFN_FMAX:
14428 CASE_CFN_FMIN:
14429 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0))
14430 || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 1));
14431 default:
14432 return true;
14434 default:
14435 return true;
14439 /* Return true if expression X evaluates to a NaN.
14440 This function returns false for integer expressions. */
14442 bool
14443 tree_expr_nan_p (const_tree x)
14445 if (!HONOR_NANS (x))
14446 return false;
14447 switch (TREE_CODE (x))
14449 case REAL_CST:
14450 return real_isnan (TREE_REAL_CST_PTR (x));
14451 case NON_LVALUE_EXPR:
14452 case SAVE_EXPR:
14453 return tree_expr_nan_p (TREE_OPERAND (x, 0));
14454 case COND_EXPR:
14455 return tree_expr_nan_p (TREE_OPERAND (x, 1))
14456 && tree_expr_nan_p (TREE_OPERAND (x, 2));
14457 default:
14458 return false;
14462 /* Return true if expression X could evaluate to a NaN.
14463 This function returns false for integer expressions, and returns
14464 true if uncertain. */
14466 bool
14467 tree_expr_maybe_nan_p (const_tree x)
14469 if (!HONOR_NANS (x))
14470 return false;
14471 switch (TREE_CODE (x))
14473 case REAL_CST:
14474 return real_isnan (TREE_REAL_CST_PTR (x));
14475 case FLOAT_EXPR:
14476 return false;
14477 case PLUS_EXPR:
14478 case MINUS_EXPR:
14479 case MULT_EXPR:
14480 return !tree_expr_finite_p (TREE_OPERAND (x, 0))
14481 || !tree_expr_finite_p (TREE_OPERAND (x, 1));
14482 case ABS_EXPR:
14483 case CONVERT_EXPR:
14484 case NEGATE_EXPR:
14485 case NON_LVALUE_EXPR:
14486 case SAVE_EXPR:
14487 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0));
14488 case MIN_EXPR:
14489 case MAX_EXPR:
14490 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0))
14491 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 1));
14492 case COND_EXPR:
14493 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 1))
14494 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 2));
14495 case CALL_EXPR:
14496 switch (get_call_combined_fn (x))
14498 CASE_CFN_FABS:
14499 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0));
14500 CASE_CFN_FMAX:
14501 CASE_CFN_FMIN:
14502 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0))
14503 || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 1));
14504 default:
14505 return true;
14507 default:
14508 return true;
14512 /* Return true if expression X could evaluate to -0.0.
14513 This function returns true if uncertain. */
14515 bool
14516 tree_expr_maybe_real_minus_zero_p (const_tree x)
14518 if (!HONOR_SIGNED_ZEROS (x))
14519 return false;
14520 switch (TREE_CODE (x))
14522 case REAL_CST:
14523 return REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (x));
14524 case INTEGER_CST:
14525 case FLOAT_EXPR:
14526 case ABS_EXPR:
14527 return false;
14528 case NON_LVALUE_EXPR:
14529 case SAVE_EXPR:
14530 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 0));
14531 case COND_EXPR:
14532 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 1))
14533 || tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 2));
14534 case CALL_EXPR:
14535 switch (get_call_combined_fn (x))
14537 CASE_CFN_FABS:
14538 return false;
14539 default:
14540 break;
14542 default:
14543 break;
14545 /* Ideally !(tree_expr_nonzero_p (X) || tree_expr_nonnegative_p (X))
14546 * but currently those predicates require tree and not const_tree. */
14547 return true;
14550 #define tree_expr_nonnegative_warnv_p(X, Y) \
14551 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14553 #define RECURSE(X) \
14554 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14556 /* Return true if CODE or TYPE is known to be non-negative. */
14558 static bool
14559 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14561 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14562 && truth_value_p (code))
14563 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14564 have a signed:1 type (where the value is -1 and 0). */
14565 return true;
14566 return false;
14569 /* Return true if (CODE OP0) is known to be non-negative. If the return
14570 value is based on the assumption that signed overflow is undefined,
14571 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14572 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14574 bool
14575 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14576 bool *strict_overflow_p, int depth)
14578 if (TYPE_UNSIGNED (type))
14579 return true;
14581 switch (code)
14583 case ABS_EXPR:
14584 /* We can't return 1 if flag_wrapv is set because
14585 ABS_EXPR<INT_MIN> = INT_MIN. */
14586 if (!ANY_INTEGRAL_TYPE_P (type))
14587 return true;
14588 if (TYPE_OVERFLOW_UNDEFINED (type))
14590 *strict_overflow_p = true;
14591 return true;
14593 break;
14595 case NON_LVALUE_EXPR:
14596 case FLOAT_EXPR:
14597 case FIX_TRUNC_EXPR:
14598 return RECURSE (op0);
14600 CASE_CONVERT:
14602 tree inner_type = TREE_TYPE (op0);
14603 tree outer_type = type;
14605 if (TREE_CODE (outer_type) == REAL_TYPE)
14607 if (TREE_CODE (inner_type) == REAL_TYPE)
14608 return RECURSE (op0);
14609 if (INTEGRAL_TYPE_P (inner_type))
14611 if (TYPE_UNSIGNED (inner_type))
14612 return true;
14613 return RECURSE (op0);
14616 else if (INTEGRAL_TYPE_P (outer_type))
14618 if (TREE_CODE (inner_type) == REAL_TYPE)
14619 return RECURSE (op0);
14620 if (INTEGRAL_TYPE_P (inner_type))
14621 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14622 && TYPE_UNSIGNED (inner_type);
14625 break;
14627 default:
14628 return tree_simple_nonnegative_warnv_p (code, type);
14631 /* We don't know sign of `t', so be conservative and return false. */
14632 return false;
14635 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14636 value is based on the assumption that signed overflow is undefined,
14637 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14638 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14640 bool
14641 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14642 tree op1, bool *strict_overflow_p,
14643 int depth)
14645 if (TYPE_UNSIGNED (type))
14646 return true;
14648 switch (code)
14650 case POINTER_PLUS_EXPR:
14651 case PLUS_EXPR:
14652 if (FLOAT_TYPE_P (type))
14653 return RECURSE (op0) && RECURSE (op1);
14655 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14656 both unsigned and at least 2 bits shorter than the result. */
14657 if (TREE_CODE (type) == INTEGER_TYPE
14658 && TREE_CODE (op0) == NOP_EXPR
14659 && TREE_CODE (op1) == NOP_EXPR)
14661 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14662 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14663 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14664 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14666 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14667 TYPE_PRECISION (inner2)) + 1;
14668 return prec < TYPE_PRECISION (type);
14671 break;
14673 case MULT_EXPR:
14674 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14676 /* x * x is always non-negative for floating point x
14677 or without overflow. */
14678 if (operand_equal_p (op0, op1, 0)
14679 || (RECURSE (op0) && RECURSE (op1)))
14681 if (ANY_INTEGRAL_TYPE_P (type)
14682 && TYPE_OVERFLOW_UNDEFINED (type))
14683 *strict_overflow_p = true;
14684 return true;
14688 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14689 both unsigned and their total bits is shorter than the result. */
14690 if (TREE_CODE (type) == INTEGER_TYPE
14691 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14692 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14694 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14695 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14696 : TREE_TYPE (op0);
14697 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14698 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14699 : TREE_TYPE (op1);
14701 bool unsigned0 = TYPE_UNSIGNED (inner0);
14702 bool unsigned1 = TYPE_UNSIGNED (inner1);
14704 if (TREE_CODE (op0) == INTEGER_CST)
14705 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14707 if (TREE_CODE (op1) == INTEGER_CST)
14708 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14710 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14711 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14713 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14714 ? tree_int_cst_min_precision (op0, UNSIGNED)
14715 : TYPE_PRECISION (inner0);
14717 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14718 ? tree_int_cst_min_precision (op1, UNSIGNED)
14719 : TYPE_PRECISION (inner1);
14721 return precision0 + precision1 < TYPE_PRECISION (type);
14724 return false;
14726 case BIT_AND_EXPR:
14727 return RECURSE (op0) || RECURSE (op1);
14729 case MAX_EXPR:
14730 /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
14731 things. */
14732 if (tree_expr_maybe_nan_p (op0) || tree_expr_maybe_nan_p (op1))
14733 return RECURSE (op0) && RECURSE (op1);
14734 return RECURSE (op0) || RECURSE (op1);
14736 case BIT_IOR_EXPR:
14737 case BIT_XOR_EXPR:
14738 case MIN_EXPR:
14739 case RDIV_EXPR:
14740 case TRUNC_DIV_EXPR:
14741 case CEIL_DIV_EXPR:
14742 case FLOOR_DIV_EXPR:
14743 case ROUND_DIV_EXPR:
14744 return RECURSE (op0) && RECURSE (op1);
14746 case TRUNC_MOD_EXPR:
14747 return RECURSE (op0);
14749 case FLOOR_MOD_EXPR:
14750 return RECURSE (op1);
14752 case CEIL_MOD_EXPR:
14753 case ROUND_MOD_EXPR:
14754 default:
14755 return tree_simple_nonnegative_warnv_p (code, type);
14758 /* We don't know sign of `t', so be conservative and return false. */
14759 return false;
14762 /* Return true if T is known to be non-negative. If the return
14763 value is based on the assumption that signed overflow is undefined,
14764 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14765 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14767 bool
14768 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14770 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14771 return true;
14773 switch (TREE_CODE (t))
14775 case INTEGER_CST:
14776 return tree_int_cst_sgn (t) >= 0;
14778 case REAL_CST:
14779 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14781 case FIXED_CST:
14782 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14784 case COND_EXPR:
14785 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14787 case SSA_NAME:
14788 /* Limit the depth of recursion to avoid quadratic behavior.
14789 This is expected to catch almost all occurrences in practice.
14790 If this code misses important cases that unbounded recursion
14791 would not, passes that need this information could be revised
14792 to provide it through dataflow propagation. */
14793 return (!name_registered_for_update_p (t)
14794 && depth < param_max_ssa_name_query_depth
14795 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
14796 strict_overflow_p, depth));
14798 default:
14799 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14803 /* Return true if T is known to be non-negative. If the return
14804 value is based on the assumption that signed overflow is undefined,
14805 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14806 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14808 bool
14809 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
14810 bool *strict_overflow_p, int depth)
14812 switch (fn)
14814 CASE_CFN_ACOS:
14815 CASE_CFN_ACOSH:
14816 CASE_CFN_CABS:
14817 CASE_CFN_COSH:
14818 CASE_CFN_ERFC:
14819 CASE_CFN_EXP:
14820 CASE_CFN_EXP10:
14821 CASE_CFN_EXP2:
14822 CASE_CFN_FABS:
14823 CASE_CFN_FDIM:
14824 CASE_CFN_HYPOT:
14825 CASE_CFN_POW10:
14826 CASE_CFN_FFS:
14827 CASE_CFN_PARITY:
14828 CASE_CFN_POPCOUNT:
14829 CASE_CFN_CLZ:
14830 CASE_CFN_CLRSB:
14831 case CFN_BUILT_IN_BSWAP16:
14832 case CFN_BUILT_IN_BSWAP32:
14833 case CFN_BUILT_IN_BSWAP64:
14834 case CFN_BUILT_IN_BSWAP128:
14835 /* Always true. */
14836 return true;
14838 CASE_CFN_SQRT:
14839 CASE_CFN_SQRT_FN:
14840 /* sqrt(-0.0) is -0.0. */
14841 if (!HONOR_SIGNED_ZEROS (type))
14842 return true;
14843 return RECURSE (arg0);
14845 CASE_CFN_ASINH:
14846 CASE_CFN_ATAN:
14847 CASE_CFN_ATANH:
14848 CASE_CFN_CBRT:
14849 CASE_CFN_CEIL:
14850 CASE_CFN_CEIL_FN:
14851 CASE_CFN_ERF:
14852 CASE_CFN_EXPM1:
14853 CASE_CFN_FLOOR:
14854 CASE_CFN_FLOOR_FN:
14855 CASE_CFN_FMOD:
14856 CASE_CFN_FREXP:
14857 CASE_CFN_ICEIL:
14858 CASE_CFN_IFLOOR:
14859 CASE_CFN_IRINT:
14860 CASE_CFN_IROUND:
14861 CASE_CFN_LCEIL:
14862 CASE_CFN_LDEXP:
14863 CASE_CFN_LFLOOR:
14864 CASE_CFN_LLCEIL:
14865 CASE_CFN_LLFLOOR:
14866 CASE_CFN_LLRINT:
14867 CASE_CFN_LLROUND:
14868 CASE_CFN_LRINT:
14869 CASE_CFN_LROUND:
14870 CASE_CFN_MODF:
14871 CASE_CFN_NEARBYINT:
14872 CASE_CFN_NEARBYINT_FN:
14873 CASE_CFN_RINT:
14874 CASE_CFN_RINT_FN:
14875 CASE_CFN_ROUND:
14876 CASE_CFN_ROUND_FN:
14877 CASE_CFN_ROUNDEVEN:
14878 CASE_CFN_ROUNDEVEN_FN:
14879 CASE_CFN_SCALB:
14880 CASE_CFN_SCALBLN:
14881 CASE_CFN_SCALBN:
14882 CASE_CFN_SIGNBIT:
14883 CASE_CFN_SIGNIFICAND:
14884 CASE_CFN_SINH:
14885 CASE_CFN_TANH:
14886 CASE_CFN_TRUNC:
14887 CASE_CFN_TRUNC_FN:
14888 /* True if the 1st argument is nonnegative. */
14889 return RECURSE (arg0);
14891 CASE_CFN_FMAX:
14892 CASE_CFN_FMAX_FN:
14893 /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
14894 things. In the presence of sNaNs, we're only guaranteed to be
14895 non-negative if both operands are non-negative. In the presence
14896 of qNaNs, we're non-negative if either operand is non-negative
14897 and can't be a qNaN, or if both operands are non-negative. */
14898 if (tree_expr_maybe_signaling_nan_p (arg0) ||
14899 tree_expr_maybe_signaling_nan_p (arg1))
14900 return RECURSE (arg0) && RECURSE (arg1);
14901 return RECURSE (arg0) ? (!tree_expr_maybe_nan_p (arg0)
14902 || RECURSE (arg1))
14903 : (RECURSE (arg1)
14904 && !tree_expr_maybe_nan_p (arg1));
14906 CASE_CFN_FMIN:
14907 CASE_CFN_FMIN_FN:
14908 /* True if the 1st AND 2nd arguments are nonnegative. */
14909 return RECURSE (arg0) && RECURSE (arg1);
14911 CASE_CFN_COPYSIGN:
14912 CASE_CFN_COPYSIGN_FN:
14913 /* True if the 2nd argument is nonnegative. */
14914 return RECURSE (arg1);
14916 CASE_CFN_POWI:
14917 /* True if the 1st argument is nonnegative or the second
14918 argument is an even integer. */
14919 if (TREE_CODE (arg1) == INTEGER_CST
14920 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14921 return true;
14922 return RECURSE (arg0);
14924 CASE_CFN_POW:
14925 /* True if the 1st argument is nonnegative or the second
14926 argument is an even integer valued real. */
14927 if (TREE_CODE (arg1) == REAL_CST)
14929 REAL_VALUE_TYPE c;
14930 HOST_WIDE_INT n;
14932 c = TREE_REAL_CST (arg1);
14933 n = real_to_integer (&c);
14934 if ((n & 1) == 0)
14936 REAL_VALUE_TYPE cint;
14937 real_from_integer (&cint, VOIDmode, n, SIGNED);
14938 if (real_identical (&c, &cint))
14939 return true;
14942 return RECURSE (arg0);
14944 default:
14945 break;
14947 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
14950 /* Return true if T is known to be non-negative. If the return
14951 value is based on the assumption that signed overflow is undefined,
14952 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14953 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14955 static bool
14956 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14958 enum tree_code code = TREE_CODE (t);
14959 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14960 return true;
14962 switch (code)
14964 case TARGET_EXPR:
14966 tree temp = TARGET_EXPR_SLOT (t);
14967 t = TARGET_EXPR_INITIAL (t);
14969 /* If the initializer is non-void, then it's a normal expression
14970 that will be assigned to the slot. */
14971 if (!VOID_TYPE_P (t))
14972 return RECURSE (t);
14974 /* Otherwise, the initializer sets the slot in some way. One common
14975 way is an assignment statement at the end of the initializer. */
14976 while (1)
14978 if (TREE_CODE (t) == BIND_EXPR)
14979 t = expr_last (BIND_EXPR_BODY (t));
14980 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14981 || TREE_CODE (t) == TRY_CATCH_EXPR)
14982 t = expr_last (TREE_OPERAND (t, 0));
14983 else if (TREE_CODE (t) == STATEMENT_LIST)
14984 t = expr_last (t);
14985 else
14986 break;
14988 if (TREE_CODE (t) == MODIFY_EXPR
14989 && TREE_OPERAND (t, 0) == temp)
14990 return RECURSE (TREE_OPERAND (t, 1));
14992 return false;
14995 case CALL_EXPR:
14997 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14998 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15000 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15001 get_call_combined_fn (t),
15002 arg0,
15003 arg1,
15004 strict_overflow_p, depth);
15006 case COMPOUND_EXPR:
15007 case MODIFY_EXPR:
15008 return RECURSE (TREE_OPERAND (t, 1));
15010 case BIND_EXPR:
15011 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
15013 case SAVE_EXPR:
15014 return RECURSE (TREE_OPERAND (t, 0));
15016 default:
15017 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
15021 #undef RECURSE
15022 #undef tree_expr_nonnegative_warnv_p
15024 /* Return true if T is known to be non-negative. If the return
15025 value is based on the assumption that signed overflow is undefined,
15026 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15027 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15029 bool
15030 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15032 enum tree_code code;
15033 if (t == error_mark_node)
15034 return false;
15036 code = TREE_CODE (t);
15037 switch (TREE_CODE_CLASS (code))
15039 case tcc_binary:
15040 case tcc_comparison:
15041 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15042 TREE_TYPE (t),
15043 TREE_OPERAND (t, 0),
15044 TREE_OPERAND (t, 1),
15045 strict_overflow_p, depth);
15047 case tcc_unary:
15048 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15049 TREE_TYPE (t),
15050 TREE_OPERAND (t, 0),
15051 strict_overflow_p, depth);
15053 case tcc_constant:
15054 case tcc_declaration:
15055 case tcc_reference:
15056 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15058 default:
15059 break;
15062 switch (code)
15064 case TRUTH_AND_EXPR:
15065 case TRUTH_OR_EXPR:
15066 case TRUTH_XOR_EXPR:
15067 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15068 TREE_TYPE (t),
15069 TREE_OPERAND (t, 0),
15070 TREE_OPERAND (t, 1),
15071 strict_overflow_p, depth);
15072 case TRUTH_NOT_EXPR:
15073 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15074 TREE_TYPE (t),
15075 TREE_OPERAND (t, 0),
15076 strict_overflow_p, depth);
15078 case COND_EXPR:
15079 case CONSTRUCTOR:
15080 case OBJ_TYPE_REF:
15081 case ASSERT_EXPR:
15082 case ADDR_EXPR:
15083 case WITH_SIZE_EXPR:
15084 case SSA_NAME:
15085 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15087 default:
15088 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
15092 /* Return true if `t' is known to be non-negative. Handle warnings
15093 about undefined signed overflow. */
15095 bool
15096 tree_expr_nonnegative_p (tree t)
15098 bool ret, strict_overflow_p;
15100 strict_overflow_p = false;
15101 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15102 if (strict_overflow_p)
15103 fold_overflow_warning (("assuming signed overflow does not occur when "
15104 "determining that expression is always "
15105 "non-negative"),
15106 WARN_STRICT_OVERFLOW_MISC);
15107 return ret;
15111 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15112 For floating point we further ensure that T is not denormal.
15113 Similar logic is present in nonzero_address in rtlanal.h.
15115 If the return value is based on the assumption that signed overflow
15116 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15117 change *STRICT_OVERFLOW_P. */
15119 bool
15120 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15121 bool *strict_overflow_p)
15123 switch (code)
15125 case ABS_EXPR:
15126 return tree_expr_nonzero_warnv_p (op0,
15127 strict_overflow_p);
15129 case NOP_EXPR:
15131 tree inner_type = TREE_TYPE (op0);
15132 tree outer_type = type;
15134 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15135 && tree_expr_nonzero_warnv_p (op0,
15136 strict_overflow_p));
15138 break;
15140 case NON_LVALUE_EXPR:
15141 return tree_expr_nonzero_warnv_p (op0,
15142 strict_overflow_p);
15144 default:
15145 break;
15148 return false;
15151 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15152 For floating point we further ensure that T is not denormal.
15153 Similar logic is present in nonzero_address in rtlanal.h.
15155 If the return value is based on the assumption that signed overflow
15156 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15157 change *STRICT_OVERFLOW_P. */
15159 bool
15160 tree_binary_nonzero_warnv_p (enum tree_code code,
15161 tree type,
15162 tree op0,
15163 tree op1, bool *strict_overflow_p)
15165 bool sub_strict_overflow_p;
15166 switch (code)
15168 case POINTER_PLUS_EXPR:
15169 case PLUS_EXPR:
15170 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15172 /* With the presence of negative values it is hard
15173 to say something. */
15174 sub_strict_overflow_p = false;
15175 if (!tree_expr_nonnegative_warnv_p (op0,
15176 &sub_strict_overflow_p)
15177 || !tree_expr_nonnegative_warnv_p (op1,
15178 &sub_strict_overflow_p))
15179 return false;
15180 /* One of operands must be positive and the other non-negative. */
15181 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15182 overflows, on a twos-complement machine the sum of two
15183 nonnegative numbers can never be zero. */
15184 return (tree_expr_nonzero_warnv_p (op0,
15185 strict_overflow_p)
15186 || tree_expr_nonzero_warnv_p (op1,
15187 strict_overflow_p));
15189 break;
15191 case MULT_EXPR:
15192 if (TYPE_OVERFLOW_UNDEFINED (type))
15194 if (tree_expr_nonzero_warnv_p (op0,
15195 strict_overflow_p)
15196 && tree_expr_nonzero_warnv_p (op1,
15197 strict_overflow_p))
15199 *strict_overflow_p = true;
15200 return true;
15203 break;
15205 case MIN_EXPR:
15206 sub_strict_overflow_p = false;
15207 if (tree_expr_nonzero_warnv_p (op0,
15208 &sub_strict_overflow_p)
15209 && tree_expr_nonzero_warnv_p (op1,
15210 &sub_strict_overflow_p))
15212 if (sub_strict_overflow_p)
15213 *strict_overflow_p = true;
15215 break;
15217 case MAX_EXPR:
15218 sub_strict_overflow_p = false;
15219 if (tree_expr_nonzero_warnv_p (op0,
15220 &sub_strict_overflow_p))
15222 if (sub_strict_overflow_p)
15223 *strict_overflow_p = true;
15225 /* When both operands are nonzero, then MAX must be too. */
15226 if (tree_expr_nonzero_warnv_p (op1,
15227 strict_overflow_p))
15228 return true;
15230 /* MAX where operand 0 is positive is positive. */
15231 return tree_expr_nonnegative_warnv_p (op0,
15232 strict_overflow_p);
15234 /* MAX where operand 1 is positive is positive. */
15235 else if (tree_expr_nonzero_warnv_p (op1,
15236 &sub_strict_overflow_p)
15237 && tree_expr_nonnegative_warnv_p (op1,
15238 &sub_strict_overflow_p))
15240 if (sub_strict_overflow_p)
15241 *strict_overflow_p = true;
15242 return true;
15244 break;
15246 case BIT_IOR_EXPR:
15247 return (tree_expr_nonzero_warnv_p (op1,
15248 strict_overflow_p)
15249 || tree_expr_nonzero_warnv_p (op0,
15250 strict_overflow_p));
15252 default:
15253 break;
15256 return false;
15259 /* Return true when T is an address and is known to be nonzero.
15260 For floating point we further ensure that T is not denormal.
15261 Similar logic is present in nonzero_address in rtlanal.h.
15263 If the return value is based on the assumption that signed overflow
15264 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15265 change *STRICT_OVERFLOW_P. */
15267 bool
15268 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15270 bool sub_strict_overflow_p;
15271 switch (TREE_CODE (t))
15273 case INTEGER_CST:
15274 return !integer_zerop (t);
15276 case ADDR_EXPR:
15278 tree base = TREE_OPERAND (t, 0);
15280 if (!DECL_P (base))
15281 base = get_base_address (base);
15283 if (base && TREE_CODE (base) == TARGET_EXPR)
15284 base = TARGET_EXPR_SLOT (base);
15286 if (!base)
15287 return false;
15289 /* For objects in symbol table check if we know they are non-zero.
15290 Don't do anything for variables and functions before symtab is built;
15291 it is quite possible that they will be declared weak later. */
15292 int nonzero_addr = maybe_nonzero_address (base);
15293 if (nonzero_addr >= 0)
15294 return nonzero_addr;
15296 /* Constants are never weak. */
15297 if (CONSTANT_CLASS_P (base))
15298 return true;
15300 return false;
15303 case COND_EXPR:
15304 sub_strict_overflow_p = false;
15305 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15306 &sub_strict_overflow_p)
15307 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15308 &sub_strict_overflow_p))
15310 if (sub_strict_overflow_p)
15311 *strict_overflow_p = true;
15312 return true;
15314 break;
15316 case SSA_NAME:
15317 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
15318 break;
15319 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
15321 default:
15322 break;
15324 return false;
15327 #define integer_valued_real_p(X) \
15328 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15330 #define RECURSE(X) \
15331 ((integer_valued_real_p) (X, depth + 1))
15333 /* Return true if the floating point result of (CODE OP0) has an
15334 integer value. We also allow +Inf, -Inf and NaN to be considered
15335 integer values. Return false for signaling NaN.
15337 DEPTH is the current nesting depth of the query. */
15339 bool
15340 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
15342 switch (code)
15344 case FLOAT_EXPR:
15345 return true;
15347 case ABS_EXPR:
15348 return RECURSE (op0);
15350 CASE_CONVERT:
15352 tree type = TREE_TYPE (op0);
15353 if (TREE_CODE (type) == INTEGER_TYPE)
15354 return true;
15355 if (TREE_CODE (type) == REAL_TYPE)
15356 return RECURSE (op0);
15357 break;
15360 default:
15361 break;
15363 return false;
15366 /* Return true if the floating point result of (CODE OP0 OP1) has an
15367 integer value. We also allow +Inf, -Inf and NaN to be considered
15368 integer values. Return false for signaling NaN.
15370 DEPTH is the current nesting depth of the query. */
15372 bool
15373 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
15375 switch (code)
15377 case PLUS_EXPR:
15378 case MINUS_EXPR:
15379 case MULT_EXPR:
15380 case MIN_EXPR:
15381 case MAX_EXPR:
15382 return RECURSE (op0) && RECURSE (op1);
15384 default:
15385 break;
15387 return false;
15390 /* Return true if the floating point result of calling FNDECL with arguments
15391 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15392 considered integer values. Return false for signaling NaN. If FNDECL
15393 takes fewer than 2 arguments, the remaining ARGn are null.
15395 DEPTH is the current nesting depth of the query. */
15397 bool
15398 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
15400 switch (fn)
15402 CASE_CFN_CEIL:
15403 CASE_CFN_CEIL_FN:
15404 CASE_CFN_FLOOR:
15405 CASE_CFN_FLOOR_FN:
15406 CASE_CFN_NEARBYINT:
15407 CASE_CFN_NEARBYINT_FN:
15408 CASE_CFN_RINT:
15409 CASE_CFN_RINT_FN:
15410 CASE_CFN_ROUND:
15411 CASE_CFN_ROUND_FN:
15412 CASE_CFN_ROUNDEVEN:
15413 CASE_CFN_ROUNDEVEN_FN:
15414 CASE_CFN_TRUNC:
15415 CASE_CFN_TRUNC_FN:
15416 return true;
15418 CASE_CFN_FMIN:
15419 CASE_CFN_FMIN_FN:
15420 CASE_CFN_FMAX:
15421 CASE_CFN_FMAX_FN:
15422 return RECURSE (arg0) && RECURSE (arg1);
15424 default:
15425 break;
15427 return false;
15430 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15431 has an integer value. We also allow +Inf, -Inf and NaN to be
15432 considered integer values. Return false for signaling NaN.
15434 DEPTH is the current nesting depth of the query. */
15436 bool
15437 integer_valued_real_single_p (tree t, int depth)
15439 switch (TREE_CODE (t))
15441 case REAL_CST:
15442 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
15444 case COND_EXPR:
15445 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15447 case SSA_NAME:
15448 /* Limit the depth of recursion to avoid quadratic behavior.
15449 This is expected to catch almost all occurrences in practice.
15450 If this code misses important cases that unbounded recursion
15451 would not, passes that need this information could be revised
15452 to provide it through dataflow propagation. */
15453 return (!name_registered_for_update_p (t)
15454 && depth < param_max_ssa_name_query_depth
15455 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
15456 depth));
15458 default:
15459 break;
15461 return false;
15464 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15465 has an integer value. We also allow +Inf, -Inf and NaN to be
15466 considered integer values. Return false for signaling NaN.
15468 DEPTH is the current nesting depth of the query. */
15470 static bool
15471 integer_valued_real_invalid_p (tree t, int depth)
15473 switch (TREE_CODE (t))
15475 case COMPOUND_EXPR:
15476 case MODIFY_EXPR:
15477 case BIND_EXPR:
15478 return RECURSE (TREE_OPERAND (t, 1));
15480 case SAVE_EXPR:
15481 return RECURSE (TREE_OPERAND (t, 0));
15483 default:
15484 break;
15486 return false;
15489 #undef RECURSE
15490 #undef integer_valued_real_p
15492 /* Return true if the floating point expression T has an integer value.
15493 We also allow +Inf, -Inf and NaN to be considered integer values.
15494 Return false for signaling NaN.
15496 DEPTH is the current nesting depth of the query. */
15498 bool
15499 integer_valued_real_p (tree t, int depth)
15501 if (t == error_mark_node)
15502 return false;
15504 STRIP_ANY_LOCATION_WRAPPER (t);
15506 tree_code code = TREE_CODE (t);
15507 switch (TREE_CODE_CLASS (code))
15509 case tcc_binary:
15510 case tcc_comparison:
15511 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
15512 TREE_OPERAND (t, 1), depth);
15514 case tcc_unary:
15515 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
15517 case tcc_constant:
15518 case tcc_declaration:
15519 case tcc_reference:
15520 return integer_valued_real_single_p (t, depth);
15522 default:
15523 break;
15526 switch (code)
15528 case COND_EXPR:
15529 case SSA_NAME:
15530 return integer_valued_real_single_p (t, depth);
15532 case CALL_EXPR:
15534 tree arg0 = (call_expr_nargs (t) > 0
15535 ? CALL_EXPR_ARG (t, 0)
15536 : NULL_TREE);
15537 tree arg1 = (call_expr_nargs (t) > 1
15538 ? CALL_EXPR_ARG (t, 1)
15539 : NULL_TREE);
15540 return integer_valued_real_call_p (get_call_combined_fn (t),
15541 arg0, arg1, depth);
15544 default:
15545 return integer_valued_real_invalid_p (t, depth);
15549 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15550 attempt to fold the expression to a constant without modifying TYPE,
15551 OP0 or OP1.
15553 If the expression could be simplified to a constant, then return
15554 the constant. If the expression would not be simplified to a
15555 constant, then return NULL_TREE. */
15557 tree
15558 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15560 tree tem = fold_binary (code, type, op0, op1);
15561 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15564 /* Given the components of a unary expression CODE, TYPE and OP0,
15565 attempt to fold the expression to a constant without modifying
15566 TYPE or OP0.
15568 If the expression could be simplified to a constant, then return
15569 the constant. If the expression would not be simplified to a
15570 constant, then return NULL_TREE. */
15572 tree
15573 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15575 tree tem = fold_unary (code, type, op0);
15576 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15579 /* If EXP represents referencing an element in a constant string
15580 (either via pointer arithmetic or array indexing), return the
15581 tree representing the value accessed, otherwise return NULL. */
15583 tree
15584 fold_read_from_constant_string (tree exp)
15586 if ((TREE_CODE (exp) == INDIRECT_REF
15587 || TREE_CODE (exp) == ARRAY_REF)
15588 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15590 tree exp1 = TREE_OPERAND (exp, 0);
15591 tree index;
15592 tree string;
15593 location_t loc = EXPR_LOCATION (exp);
15595 if (TREE_CODE (exp) == INDIRECT_REF)
15596 string = string_constant (exp1, &index, NULL, NULL);
15597 else
15599 tree low_bound = array_ref_low_bound (exp);
15600 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15602 /* Optimize the special-case of a zero lower bound.
15604 We convert the low_bound to sizetype to avoid some problems
15605 with constant folding. (E.g. suppose the lower bound is 1,
15606 and its mode is QI. Without the conversion,l (ARRAY
15607 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15608 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15609 if (! integer_zerop (low_bound))
15610 index = size_diffop_loc (loc, index,
15611 fold_convert_loc (loc, sizetype, low_bound));
15613 string = exp1;
15616 scalar_int_mode char_mode;
15617 if (string
15618 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15619 && TREE_CODE (string) == STRING_CST
15620 && tree_fits_uhwi_p (index)
15621 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15622 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
15623 &char_mode)
15624 && GET_MODE_SIZE (char_mode) == 1)
15625 return build_int_cst_type (TREE_TYPE (exp),
15626 (TREE_STRING_POINTER (string)
15627 [TREE_INT_CST_LOW (index)]));
15629 return NULL;
15632 /* Folds a read from vector element at IDX of vector ARG. */
15634 tree
15635 fold_read_from_vector (tree arg, poly_uint64 idx)
15637 unsigned HOST_WIDE_INT i;
15638 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
15639 && known_ge (idx, 0u)
15640 && idx.is_constant (&i))
15642 if (TREE_CODE (arg) == VECTOR_CST)
15643 return VECTOR_CST_ELT (arg, i);
15644 else if (TREE_CODE (arg) == CONSTRUCTOR)
15646 if (CONSTRUCTOR_NELTS (arg)
15647 && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg, 0)->value)))
15648 return NULL_TREE;
15649 if (i >= CONSTRUCTOR_NELTS (arg))
15650 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
15651 return CONSTRUCTOR_ELT (arg, i)->value;
15654 return NULL_TREE;
15657 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15658 an integer constant, real, or fixed-point constant.
15660 TYPE is the type of the result. */
15662 static tree
15663 fold_negate_const (tree arg0, tree type)
15665 tree t = NULL_TREE;
15667 switch (TREE_CODE (arg0))
15669 case REAL_CST:
15670 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15671 break;
15673 case FIXED_CST:
15675 FIXED_VALUE_TYPE f;
15676 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15677 &(TREE_FIXED_CST (arg0)), NULL,
15678 TYPE_SATURATING (type));
15679 t = build_fixed (type, f);
15680 /* Propagate overflow flags. */
15681 if (overflow_p | TREE_OVERFLOW (arg0))
15682 TREE_OVERFLOW (t) = 1;
15683 break;
15686 default:
15687 if (poly_int_tree_p (arg0))
15689 wi::overflow_type overflow;
15690 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
15691 t = force_fit_type (type, res, 1,
15692 (overflow && ! TYPE_UNSIGNED (type))
15693 || TREE_OVERFLOW (arg0));
15694 break;
15697 gcc_unreachable ();
15700 return t;
15703 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15704 an integer constant or real constant.
15706 TYPE is the type of the result. */
15708 tree
15709 fold_abs_const (tree arg0, tree type)
15711 tree t = NULL_TREE;
15713 switch (TREE_CODE (arg0))
15715 case INTEGER_CST:
15717 /* If the value is unsigned or non-negative, then the absolute value
15718 is the same as the ordinary value. */
15719 wide_int val = wi::to_wide (arg0);
15720 wi::overflow_type overflow = wi::OVF_NONE;
15721 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
15724 /* If the value is negative, then the absolute value is
15725 its negation. */
15726 else
15727 val = wi::neg (val, &overflow);
15729 /* Force to the destination type, set TREE_OVERFLOW for signed
15730 TYPE only. */
15731 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
15733 break;
15735 case REAL_CST:
15736 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15737 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15738 else
15739 t = arg0;
15740 break;
15742 default:
15743 gcc_unreachable ();
15746 return t;
15749 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15750 constant. TYPE is the type of the result. */
15752 static tree
15753 fold_not_const (const_tree arg0, tree type)
15755 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15757 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
15760 /* Given CODE, a relational operator, the target type, TYPE and two
15761 constant operands OP0 and OP1, return the result of the
15762 relational operation. If the result is not a compile time
15763 constant, then return NULL_TREE. */
15765 static tree
15766 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15768 int result, invert;
15770 /* From here on, the only cases we handle are when the result is
15771 known to be a constant. */
15773 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15775 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15776 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15778 /* Handle the cases where either operand is a NaN. */
15779 if (real_isnan (c0) || real_isnan (c1))
15781 switch (code)
15783 case EQ_EXPR:
15784 case ORDERED_EXPR:
15785 result = 0;
15786 break;
15788 case NE_EXPR:
15789 case UNORDERED_EXPR:
15790 case UNLT_EXPR:
15791 case UNLE_EXPR:
15792 case UNGT_EXPR:
15793 case UNGE_EXPR:
15794 case UNEQ_EXPR:
15795 result = 1;
15796 break;
15798 case LT_EXPR:
15799 case LE_EXPR:
15800 case GT_EXPR:
15801 case GE_EXPR:
15802 case LTGT_EXPR:
15803 if (flag_trapping_math)
15804 return NULL_TREE;
15805 result = 0;
15806 break;
15808 default:
15809 gcc_unreachable ();
15812 return constant_boolean_node (result, type);
15815 return constant_boolean_node (real_compare (code, c0, c1), type);
15818 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15820 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15821 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15822 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15825 /* Handle equality/inequality of complex constants. */
15826 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15828 tree rcond = fold_relational_const (code, type,
15829 TREE_REALPART (op0),
15830 TREE_REALPART (op1));
15831 tree icond = fold_relational_const (code, type,
15832 TREE_IMAGPART (op0),
15833 TREE_IMAGPART (op1));
15834 if (code == EQ_EXPR)
15835 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15836 else if (code == NE_EXPR)
15837 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15838 else
15839 return NULL_TREE;
15842 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15844 if (!VECTOR_TYPE_P (type))
15846 /* Have vector comparison with scalar boolean result. */
15847 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
15848 && known_eq (VECTOR_CST_NELTS (op0),
15849 VECTOR_CST_NELTS (op1)));
15850 unsigned HOST_WIDE_INT nunits;
15851 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
15852 return NULL_TREE;
15853 for (unsigned i = 0; i < nunits; i++)
15855 tree elem0 = VECTOR_CST_ELT (op0, i);
15856 tree elem1 = VECTOR_CST_ELT (op1, i);
15857 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
15858 if (tmp == NULL_TREE)
15859 return NULL_TREE;
15860 if (integer_zerop (tmp))
15861 return constant_boolean_node (code == NE_EXPR, type);
15863 return constant_boolean_node (code == EQ_EXPR, type);
15865 tree_vector_builder elts;
15866 if (!elts.new_binary_operation (type, op0, op1, false))
15867 return NULL_TREE;
15868 unsigned int count = elts.encoded_nelts ();
15869 for (unsigned i = 0; i < count; i++)
15871 tree elem_type = TREE_TYPE (type);
15872 tree elem0 = VECTOR_CST_ELT (op0, i);
15873 tree elem1 = VECTOR_CST_ELT (op1, i);
15875 tree tem = fold_relational_const (code, elem_type,
15876 elem0, elem1);
15878 if (tem == NULL_TREE)
15879 return NULL_TREE;
15881 elts.quick_push (build_int_cst (elem_type,
15882 integer_zerop (tem) ? 0 : -1));
15885 return elts.build ();
15888 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15890 To compute GT, swap the arguments and do LT.
15891 To compute GE, do LT and invert the result.
15892 To compute LE, swap the arguments, do LT and invert the result.
15893 To compute NE, do EQ and invert the result.
15895 Therefore, the code below must handle only EQ and LT. */
15897 if (code == LE_EXPR || code == GT_EXPR)
15899 std::swap (op0, op1);
15900 code = swap_tree_comparison (code);
15903 /* Note that it is safe to invert for real values here because we
15904 have already handled the one case that it matters. */
15906 invert = 0;
15907 if (code == NE_EXPR || code == GE_EXPR)
15909 invert = 1;
15910 code = invert_tree_comparison (code, false);
15913 /* Compute a result for LT or EQ if args permit;
15914 Otherwise return T. */
15915 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15917 if (code == EQ_EXPR)
15918 result = tree_int_cst_equal (op0, op1);
15919 else
15920 result = tree_int_cst_lt (op0, op1);
15922 else
15923 return NULL_TREE;
15925 if (invert)
15926 result ^= 1;
15927 return constant_boolean_node (result, type);
15930 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15931 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15932 itself. */
15934 tree
15935 fold_build_cleanup_point_expr (tree type, tree expr)
15937 /* If the expression does not have side effects then we don't have to wrap
15938 it with a cleanup point expression. */
15939 if (!TREE_SIDE_EFFECTS (expr))
15940 return expr;
15942 /* If the expression is a return, check to see if the expression inside the
15943 return has no side effects or the right hand side of the modify expression
15944 inside the return. If either don't have side effects set we don't need to
15945 wrap the expression in a cleanup point expression. Note we don't check the
15946 left hand side of the modify because it should always be a return decl. */
15947 if (TREE_CODE (expr) == RETURN_EXPR)
15949 tree op = TREE_OPERAND (expr, 0);
15950 if (!op || !TREE_SIDE_EFFECTS (op))
15951 return expr;
15952 op = TREE_OPERAND (op, 1);
15953 if (!TREE_SIDE_EFFECTS (op))
15954 return expr;
15957 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
15960 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15961 of an indirection through OP0, or NULL_TREE if no simplification is
15962 possible. */
15964 tree
15965 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15967 tree sub = op0;
15968 tree subtype;
15969 poly_uint64 const_op01;
15971 STRIP_NOPS (sub);
15972 subtype = TREE_TYPE (sub);
15973 if (!POINTER_TYPE_P (subtype)
15974 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
15975 return NULL_TREE;
15977 if (TREE_CODE (sub) == ADDR_EXPR)
15979 tree op = TREE_OPERAND (sub, 0);
15980 tree optype = TREE_TYPE (op);
15982 /* *&CONST_DECL -> to the value of the const decl. */
15983 if (TREE_CODE (op) == CONST_DECL)
15984 return DECL_INITIAL (op);
15985 /* *&p => p; make sure to handle *&"str"[cst] here. */
15986 if (type == optype)
15988 tree fop = fold_read_from_constant_string (op);
15989 if (fop)
15990 return fop;
15991 else
15992 return op;
15994 /* *(foo *)&fooarray => fooarray[0] */
15995 else if (TREE_CODE (optype) == ARRAY_TYPE
15996 && type == TREE_TYPE (optype)
15997 && (!in_gimple_form
15998 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16000 tree type_domain = TYPE_DOMAIN (optype);
16001 tree min_val = size_zero_node;
16002 if (type_domain && TYPE_MIN_VALUE (type_domain))
16003 min_val = TYPE_MIN_VALUE (type_domain);
16004 if (in_gimple_form
16005 && TREE_CODE (min_val) != INTEGER_CST)
16006 return NULL_TREE;
16007 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16008 NULL_TREE, NULL_TREE);
16010 /* *(foo *)&complexfoo => __real__ complexfoo */
16011 else if (TREE_CODE (optype) == COMPLEX_TYPE
16012 && type == TREE_TYPE (optype))
16013 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16014 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16015 else if (VECTOR_TYPE_P (optype)
16016 && type == TREE_TYPE (optype))
16018 tree part_width = TYPE_SIZE (type);
16019 tree index = bitsize_int (0);
16020 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
16021 index);
16025 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16026 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
16028 tree op00 = TREE_OPERAND (sub, 0);
16029 tree op01 = TREE_OPERAND (sub, 1);
16031 STRIP_NOPS (op00);
16032 if (TREE_CODE (op00) == ADDR_EXPR)
16034 tree op00type;
16035 op00 = TREE_OPERAND (op00, 0);
16036 op00type = TREE_TYPE (op00);
16038 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16039 if (VECTOR_TYPE_P (op00type)
16040 && type == TREE_TYPE (op00type)
16041 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
16042 but we want to treat offsets with MSB set as negative.
16043 For the code below negative offsets are invalid and
16044 TYPE_SIZE of the element is something unsigned, so
16045 check whether op01 fits into poly_int64, which implies
16046 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
16047 then just use poly_uint64 because we want to treat the
16048 value as unsigned. */
16049 && tree_fits_poly_int64_p (op01))
16051 tree part_width = TYPE_SIZE (type);
16052 poly_uint64 max_offset
16053 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
16054 * TYPE_VECTOR_SUBPARTS (op00type));
16055 if (known_lt (const_op01, max_offset))
16057 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
16058 return fold_build3_loc (loc,
16059 BIT_FIELD_REF, type, op00,
16060 part_width, index);
16063 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16064 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16065 && type == TREE_TYPE (op00type))
16067 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
16068 const_op01))
16069 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16071 /* ((foo *)&fooarray)[1] => fooarray[1] */
16072 else if (TREE_CODE (op00type) == ARRAY_TYPE
16073 && type == TREE_TYPE (op00type))
16075 tree type_domain = TYPE_DOMAIN (op00type);
16076 tree min_val = size_zero_node;
16077 if (type_domain && TYPE_MIN_VALUE (type_domain))
16078 min_val = TYPE_MIN_VALUE (type_domain);
16079 poly_uint64 type_size, index;
16080 if (poly_int_tree_p (min_val)
16081 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
16082 && multiple_p (const_op01, type_size, &index))
16084 poly_offset_int off = index + wi::to_poly_offset (min_val);
16085 op01 = wide_int_to_tree (sizetype, off);
16086 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16087 NULL_TREE, NULL_TREE);
16093 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16094 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16095 && type == TREE_TYPE (TREE_TYPE (subtype))
16096 && (!in_gimple_form
16097 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16099 tree type_domain;
16100 tree min_val = size_zero_node;
16101 sub = build_fold_indirect_ref_loc (loc, sub);
16102 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16103 if (type_domain && TYPE_MIN_VALUE (type_domain))
16104 min_val = TYPE_MIN_VALUE (type_domain);
16105 if (in_gimple_form
16106 && TREE_CODE (min_val) != INTEGER_CST)
16107 return NULL_TREE;
16108 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16109 NULL_TREE);
16112 return NULL_TREE;
16115 /* Builds an expression for an indirection through T, simplifying some
16116 cases. */
16118 tree
16119 build_fold_indirect_ref_loc (location_t loc, tree t)
16121 tree type = TREE_TYPE (TREE_TYPE (t));
16122 tree sub = fold_indirect_ref_1 (loc, type, t);
16124 if (sub)
16125 return sub;
16127 return build1_loc (loc, INDIRECT_REF, type, t);
16130 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16132 tree
16133 fold_indirect_ref_loc (location_t loc, tree t)
16135 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16137 if (sub)
16138 return sub;
16139 else
16140 return t;
16143 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16144 whose result is ignored. The type of the returned tree need not be
16145 the same as the original expression. */
16147 tree
16148 fold_ignored_result (tree t)
16150 if (!TREE_SIDE_EFFECTS (t))
16151 return integer_zero_node;
16153 for (;;)
16154 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16156 case tcc_unary:
16157 t = TREE_OPERAND (t, 0);
16158 break;
16160 case tcc_binary:
16161 case tcc_comparison:
16162 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16163 t = TREE_OPERAND (t, 0);
16164 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16165 t = TREE_OPERAND (t, 1);
16166 else
16167 return t;
16168 break;
16170 case tcc_expression:
16171 switch (TREE_CODE (t))
16173 case COMPOUND_EXPR:
16174 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16175 return t;
16176 t = TREE_OPERAND (t, 0);
16177 break;
16179 case COND_EXPR:
16180 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16181 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16182 return t;
16183 t = TREE_OPERAND (t, 0);
16184 break;
16186 default:
16187 return t;
16189 break;
16191 default:
16192 return t;
16196 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16198 tree
16199 round_up_loc (location_t loc, tree value, unsigned int divisor)
16201 tree div = NULL_TREE;
16203 if (divisor == 1)
16204 return value;
16206 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16207 have to do anything. Only do this when we are not given a const,
16208 because in that case, this check is more expensive than just
16209 doing it. */
16210 if (TREE_CODE (value) != INTEGER_CST)
16212 div = build_int_cst (TREE_TYPE (value), divisor);
16214 if (multiple_of_p (TREE_TYPE (value), value, div))
16215 return value;
16218 /* If divisor is a power of two, simplify this to bit manipulation. */
16219 if (pow2_or_zerop (divisor))
16221 if (TREE_CODE (value) == INTEGER_CST)
16223 wide_int val = wi::to_wide (value);
16224 bool overflow_p;
16226 if ((val & (divisor - 1)) == 0)
16227 return value;
16229 overflow_p = TREE_OVERFLOW (value);
16230 val += divisor - 1;
16231 val &= (int) -divisor;
16232 if (val == 0)
16233 overflow_p = true;
16235 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16237 else
16239 tree t;
16241 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16242 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16243 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16244 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16247 else
16249 if (!div)
16250 div = build_int_cst (TREE_TYPE (value), divisor);
16251 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16252 value = size_binop_loc (loc, MULT_EXPR, value, div);
16255 return value;
16258 /* Likewise, but round down. */
16260 tree
16261 round_down_loc (location_t loc, tree value, int divisor)
16263 tree div = NULL_TREE;
16265 gcc_assert (divisor > 0);
16266 if (divisor == 1)
16267 return value;
16269 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16270 have to do anything. Only do this when we are not given a const,
16271 because in that case, this check is more expensive than just
16272 doing it. */
16273 if (TREE_CODE (value) != INTEGER_CST)
16275 div = build_int_cst (TREE_TYPE (value), divisor);
16277 if (multiple_of_p (TREE_TYPE (value), value, div))
16278 return value;
16281 /* If divisor is a power of two, simplify this to bit manipulation. */
16282 if (pow2_or_zerop (divisor))
16284 tree t;
16286 t = build_int_cst (TREE_TYPE (value), -divisor);
16287 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16289 else
16291 if (!div)
16292 div = build_int_cst (TREE_TYPE (value), divisor);
16293 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16294 value = size_binop_loc (loc, MULT_EXPR, value, div);
16297 return value;
16300 /* Returns the pointer to the base of the object addressed by EXP and
16301 extracts the information about the offset of the access, storing it
16302 to PBITPOS and POFFSET. */
16304 static tree
16305 split_address_to_core_and_offset (tree exp,
16306 poly_int64_pod *pbitpos, tree *poffset)
16308 tree core;
16309 machine_mode mode;
16310 int unsignedp, reversep, volatilep;
16311 poly_int64 bitsize;
16312 location_t loc = EXPR_LOCATION (exp);
16314 if (TREE_CODE (exp) == ADDR_EXPR)
16316 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16317 poffset, &mode, &unsignedp, &reversep,
16318 &volatilep);
16319 core = build_fold_addr_expr_loc (loc, core);
16321 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
16323 core = TREE_OPERAND (exp, 0);
16324 STRIP_NOPS (core);
16325 *pbitpos = 0;
16326 *poffset = TREE_OPERAND (exp, 1);
16327 if (poly_int_tree_p (*poffset))
16329 poly_offset_int tem
16330 = wi::sext (wi::to_poly_offset (*poffset),
16331 TYPE_PRECISION (TREE_TYPE (*poffset)));
16332 tem <<= LOG2_BITS_PER_UNIT;
16333 if (tem.to_shwi (pbitpos))
16334 *poffset = NULL_TREE;
16337 else
16339 core = exp;
16340 *pbitpos = 0;
16341 *poffset = NULL_TREE;
16344 return core;
16347 /* Returns true if addresses of E1 and E2 differ by a constant, false
16348 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16350 bool
16351 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
16353 tree core1, core2;
16354 poly_int64 bitpos1, bitpos2;
16355 tree toffset1, toffset2, tdiff, type;
16357 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16358 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16360 poly_int64 bytepos1, bytepos2;
16361 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
16362 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
16363 || !operand_equal_p (core1, core2, 0))
16364 return false;
16366 if (toffset1 && toffset2)
16368 type = TREE_TYPE (toffset1);
16369 if (type != TREE_TYPE (toffset2))
16370 toffset2 = fold_convert (type, toffset2);
16372 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16373 if (!cst_and_fits_in_hwi (tdiff))
16374 return false;
16376 *diff = int_cst_value (tdiff);
16378 else if (toffset1 || toffset2)
16380 /* If only one of the offsets is non-constant, the difference cannot
16381 be a constant. */
16382 return false;
16384 else
16385 *diff = 0;
16387 *diff += bytepos1 - bytepos2;
16388 return true;
16391 /* Return OFF converted to a pointer offset type suitable as offset for
16392 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16393 tree
16394 convert_to_ptrofftype_loc (location_t loc, tree off)
16396 if (ptrofftype_p (TREE_TYPE (off)))
16397 return off;
16398 return fold_convert_loc (loc, sizetype, off);
16401 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16402 tree
16403 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16405 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16406 ptr, convert_to_ptrofftype_loc (loc, off));
16409 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16410 tree
16411 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16413 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16414 ptr, size_int (off));
16417 /* Return a pointer to a NUL-terminated string containing the sequence
16418 of bytes corresponding to the representation of the object referred to
16419 by SRC (or a subsequence of such bytes within it if SRC is a reference
16420 to an initialized constant array plus some constant offset).
16421 Set *STRSIZE the number of bytes in the constant sequence including
16422 the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16423 where A is the array that stores the constant sequence that SRC points
16424 to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16425 need not point to a string or even an array of characters but may point
16426 to an object of any type. */
16428 const char *
16429 getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
16431 /* The offset into the array A storing the string, and A's byte size. */
16432 tree offset_node;
16433 tree mem_size;
16435 if (strsize)
16436 *strsize = 0;
16438 if (strsize)
16439 src = byte_representation (src, &offset_node, &mem_size, NULL);
16440 else
16441 src = string_constant (src, &offset_node, &mem_size, NULL);
16442 if (!src)
16443 return NULL;
16445 unsigned HOST_WIDE_INT offset = 0;
16446 if (offset_node != NULL_TREE)
16448 if (!tree_fits_uhwi_p (offset_node))
16449 return NULL;
16450 else
16451 offset = tree_to_uhwi (offset_node);
16454 if (!tree_fits_uhwi_p (mem_size))
16455 return NULL;
16457 /* ARRAY_SIZE is the byte size of the array the constant sequence
16458 is stored in and equal to sizeof A. INIT_BYTES is the number
16459 of bytes in the constant sequence used to initialize the array,
16460 including any embedded NULs as well as the terminating NUL (for
16461 strings), but not including any trailing zeros/NULs past
16462 the terminating one appended implicitly to a string literal to
16463 zero out the remainder of the array it's stored in. For example,
16464 given:
16465 const char a[7] = "abc\0d";
16466 n = strlen (a + 1);
16467 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16468 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16469 is equal to strlen (A) + 1. */
16470 const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
16471 unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
16472 const char *string = TREE_STRING_POINTER (src);
16474 /* Ideally this would turn into a gcc_checking_assert over time. */
16475 if (init_bytes > array_size)
16476 init_bytes = array_size;
16478 if (init_bytes == 0 || offset >= array_size)
16479 return NULL;
16481 if (strsize)
16483 /* Compute and store the number of characters from the beginning
16484 of the substring at OFFSET to the end, including the terminating
16485 nul. Offsets past the initial length refer to null strings. */
16486 if (offset < init_bytes)
16487 *strsize = init_bytes - offset;
16488 else
16489 *strsize = 1;
16491 else
16493 tree eltype = TREE_TYPE (TREE_TYPE (src));
16494 /* Support only properly NUL-terminated single byte strings. */
16495 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
16496 return NULL;
16497 if (string[init_bytes - 1] != '\0')
16498 return NULL;
16501 return offset < init_bytes ? string + offset : "";
16504 /* Return a pointer to a NUL-terminated string corresponding to
16505 the expression STR referencing a constant string, possibly
16506 involving a constant offset. Return null if STR either doesn't
16507 reference a constant string or if it involves a nonconstant
16508 offset. */
16510 const char *
16511 c_getstr (tree str)
16513 return getbyterep (str, NULL);
16516 /* Given a tree T, compute which bits in T may be nonzero. */
16518 wide_int
16519 tree_nonzero_bits (const_tree t)
16521 switch (TREE_CODE (t))
16523 case INTEGER_CST:
16524 return wi::to_wide (t);
16525 case SSA_NAME:
16526 return get_nonzero_bits (t);
16527 case NON_LVALUE_EXPR:
16528 case SAVE_EXPR:
16529 return tree_nonzero_bits (TREE_OPERAND (t, 0));
16530 case BIT_AND_EXPR:
16531 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16532 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16533 case BIT_IOR_EXPR:
16534 case BIT_XOR_EXPR:
16535 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16536 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16537 case COND_EXPR:
16538 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
16539 tree_nonzero_bits (TREE_OPERAND (t, 2)));
16540 CASE_CONVERT:
16541 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16542 TYPE_PRECISION (TREE_TYPE (t)),
16543 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
16544 case PLUS_EXPR:
16545 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
16547 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
16548 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
16549 if (wi::bit_and (nzbits1, nzbits2) == 0)
16550 return wi::bit_or (nzbits1, nzbits2);
16552 break;
16553 case LSHIFT_EXPR:
16554 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16556 tree type = TREE_TYPE (t);
16557 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16558 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16559 TYPE_PRECISION (type));
16560 return wi::neg_p (arg1)
16561 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
16562 : wi::lshift (nzbits, arg1);
16564 break;
16565 case RSHIFT_EXPR:
16566 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16568 tree type = TREE_TYPE (t);
16569 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16570 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16571 TYPE_PRECISION (type));
16572 return wi::neg_p (arg1)
16573 ? wi::lshift (nzbits, -arg1)
16574 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
16576 break;
16577 default:
16578 break;
16581 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
16584 /* Helper function for address compare simplifications in match.pd.
16585 OP0 and OP1 are ADDR_EXPR operands being compared by CODE.
16586 TYPE is the type of comparison operands.
16587 BASE0, BASE1, OFF0 and OFF1 are set by the function.
16588 GENERIC is true if GENERIC folding and false for GIMPLE folding.
16589 Returns 0 if OP0 is known to be unequal to OP1 regardless of OFF{0,1},
16590 1 if bases are known to be equal and OP0 cmp OP1 depends on OFF0 cmp OFF1,
16591 and 2 if unknown. */
16594 address_compare (tree_code code, tree type, tree op0, tree op1,
16595 tree &base0, tree &base1, poly_int64 &off0, poly_int64 &off1,
16596 bool generic)
16598 gcc_checking_assert (TREE_CODE (op0) == ADDR_EXPR);
16599 gcc_checking_assert (TREE_CODE (op1) == ADDR_EXPR);
16600 base0 = get_addr_base_and_unit_offset (TREE_OPERAND (op0, 0), &off0);
16601 base1 = get_addr_base_and_unit_offset (TREE_OPERAND (op1, 0), &off1);
16602 if (base0 && TREE_CODE (base0) == MEM_REF)
16604 off0 += mem_ref_offset (base0).force_shwi ();
16605 base0 = TREE_OPERAND (base0, 0);
16607 if (base1 && TREE_CODE (base1) == MEM_REF)
16609 off1 += mem_ref_offset (base1).force_shwi ();
16610 base1 = TREE_OPERAND (base1, 0);
16612 if (base0 == NULL_TREE || base1 == NULL_TREE)
16613 return 2;
16615 int equal = 2;
16616 /* Punt in GENERIC on variables with value expressions;
16617 the value expressions might point to fields/elements
16618 of other vars etc. */
16619 if (generic
16620 && ((VAR_P (base0) && DECL_HAS_VALUE_EXPR_P (base0))
16621 || (VAR_P (base1) && DECL_HAS_VALUE_EXPR_P (base1))))
16622 return 2;
16623 else if (decl_in_symtab_p (base0) && decl_in_symtab_p (base1))
16625 symtab_node *node0 = symtab_node::get_create (base0);
16626 symtab_node *node1 = symtab_node::get_create (base1);
16627 equal = node0->equal_address_to (node1);
16629 else if ((DECL_P (base0)
16630 || TREE_CODE (base0) == SSA_NAME
16631 || TREE_CODE (base0) == STRING_CST)
16632 && (DECL_P (base1)
16633 || TREE_CODE (base1) == SSA_NAME
16634 || TREE_CODE (base1) == STRING_CST))
16635 equal = (base0 == base1);
16636 /* Assume different STRING_CSTs with the same content will be
16637 merged. */
16638 if (equal == 0
16639 && TREE_CODE (base0) == STRING_CST
16640 && TREE_CODE (base1) == STRING_CST
16641 && TREE_STRING_LENGTH (base0) == TREE_STRING_LENGTH (base1)
16642 && memcmp (TREE_STRING_POINTER (base0), TREE_STRING_POINTER (base1),
16643 TREE_STRING_LENGTH (base0)) == 0)
16644 equal = 1;
16645 if (equal == 1)
16647 if (code == EQ_EXPR
16648 || code == NE_EXPR
16649 /* If the offsets are equal we can ignore overflow. */
16650 || known_eq (off0, off1)
16651 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
16652 /* Or if we compare using pointers to decls or strings. */
16653 || (POINTER_TYPE_P (type)
16654 && (DECL_P (base0) || TREE_CODE (base0) == STRING_CST)))
16655 return 1;
16656 return 2;
16658 if (equal != 0)
16659 return equal;
16660 if (code != EQ_EXPR && code != NE_EXPR)
16661 return 2;
16663 /* At this point we know (or assume) the two pointers point at
16664 different objects. */
16665 HOST_WIDE_INT ioff0 = -1, ioff1 = -1;
16666 off0.is_constant (&ioff0);
16667 off1.is_constant (&ioff1);
16668 /* Punt on non-zero offsets from functions. */
16669 if ((TREE_CODE (base0) == FUNCTION_DECL && ioff0)
16670 || (TREE_CODE (base1) == FUNCTION_DECL && ioff1))
16671 return 2;
16672 /* Or if the bases are neither decls nor string literals. */
16673 if (!DECL_P (base0) && TREE_CODE (base0) != STRING_CST)
16674 return 2;
16675 if (!DECL_P (base1) && TREE_CODE (base1) != STRING_CST)
16676 return 2;
16677 /* For initializers, assume addresses of different functions are
16678 different. */
16679 if (folding_initializer
16680 && TREE_CODE (base0) == FUNCTION_DECL
16681 && TREE_CODE (base1) == FUNCTION_DECL)
16682 return 0;
16684 /* Compute whether one address points to the start of one
16685 object and another one to the end of another one. */
16686 poly_int64 size0 = 0, size1 = 0;
16687 if (TREE_CODE (base0) == STRING_CST)
16689 if (ioff0 < 0 || ioff0 > TREE_STRING_LENGTH (base0))
16690 equal = 2;
16691 else
16692 size0 = TREE_STRING_LENGTH (base0);
16694 else if (TREE_CODE (base0) == FUNCTION_DECL)
16695 size0 = 1;
16696 else
16698 tree sz0 = DECL_SIZE_UNIT (base0);
16699 if (!tree_fits_poly_int64_p (sz0))
16700 equal = 2;
16701 else
16702 size0 = tree_to_poly_int64 (sz0);
16704 if (TREE_CODE (base1) == STRING_CST)
16706 if (ioff1 < 0 || ioff1 > TREE_STRING_LENGTH (base1))
16707 equal = 2;
16708 else
16709 size1 = TREE_STRING_LENGTH (base1);
16711 else if (TREE_CODE (base1) == FUNCTION_DECL)
16712 size1 = 1;
16713 else
16715 tree sz1 = DECL_SIZE_UNIT (base1);
16716 if (!tree_fits_poly_int64_p (sz1))
16717 equal = 2;
16718 else
16719 size1 = tree_to_poly_int64 (sz1);
16721 if (equal == 0)
16723 /* If one offset is pointing (or could be) to the beginning of one
16724 object and the other is pointing to one past the last byte of the
16725 other object, punt. */
16726 if (maybe_eq (off0, 0) && maybe_eq (off1, size1))
16727 equal = 2;
16728 else if (maybe_eq (off1, 0) && maybe_eq (off0, size0))
16729 equal = 2;
16730 /* If both offsets are the same, there are some cases we know that are
16731 ok. Either if we know they aren't zero, or if we know both sizes
16732 are no zero. */
16733 if (equal == 2
16734 && known_eq (off0, off1)
16735 && (known_ne (off0, 0)
16736 || (known_ne (size0, 0) && known_ne (size1, 0))))
16737 equal = 0;
16740 /* At this point, equal is 2 if either one or both pointers are out of
16741 bounds of their object, or one points to start of its object and the
16742 other points to end of its object. This is unspecified behavior
16743 e.g. in C++. Otherwise equal is 0. */
16744 if (folding_cxx_constexpr && equal)
16745 return equal;
16747 /* When both pointers point to string literals, even when equal is 0,
16748 due to tail merging of string literals the pointers might be the same. */
16749 if (TREE_CODE (base0) == STRING_CST && TREE_CODE (base1) == STRING_CST)
16751 if (ioff0 < 0
16752 || ioff1 < 0
16753 || ioff0 > TREE_STRING_LENGTH (base0)
16754 || ioff1 > TREE_STRING_LENGTH (base1))
16755 return 2;
16757 /* If the bytes in the string literals starting at the pointers
16758 differ, the pointers need to be different. */
16759 if (memcmp (TREE_STRING_POINTER (base0) + ioff0,
16760 TREE_STRING_POINTER (base1) + ioff1,
16761 MIN (TREE_STRING_LENGTH (base0) - ioff0,
16762 TREE_STRING_LENGTH (base1) - ioff1)) == 0)
16764 HOST_WIDE_INT ioffmin = MIN (ioff0, ioff1);
16765 if (memcmp (TREE_STRING_POINTER (base0) + ioff0 - ioffmin,
16766 TREE_STRING_POINTER (base1) + ioff1 - ioffmin,
16767 ioffmin) == 0)
16768 /* If even the bytes in the string literal before the
16769 pointers are the same, the string literals could be
16770 tail merged. */
16771 return 2;
16773 return 0;
16776 if (folding_cxx_constexpr)
16777 return 0;
16779 /* If this is a pointer comparison, ignore for now even
16780 valid equalities where one pointer is the offset zero
16781 of one object and the other to one past end of another one. */
16782 if (!INTEGRAL_TYPE_P (type))
16783 return 0;
16785 /* Assume that string literals can't be adjacent to variables
16786 (automatic or global). */
16787 if (TREE_CODE (base0) == STRING_CST || TREE_CODE (base1) == STRING_CST)
16788 return 0;
16790 /* Assume that automatic variables can't be adjacent to global
16791 variables. */
16792 if (is_global_var (base0) != is_global_var (base1))
16793 return 0;
16795 return equal;
16798 /* Return the single non-zero element of a CONSTRUCTOR or NULL_TREE. */
16799 tree
16800 ctor_single_nonzero_element (const_tree t)
16802 unsigned HOST_WIDE_INT idx;
16803 constructor_elt *ce;
16804 tree elt = NULL_TREE;
16806 if (TREE_CODE (t) != CONSTRUCTOR)
16807 return NULL_TREE;
16808 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
16809 if (!integer_zerop (ce->value) && !real_zerop (ce->value))
16811 if (elt)
16812 return NULL_TREE;
16813 elt = ce->value;
16815 return elt;
16818 #if CHECKING_P
16820 namespace selftest {
16822 /* Helper functions for writing tests of folding trees. */
16824 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
16826 static void
16827 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
16828 tree constant)
16830 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
16833 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
16834 wrapping WRAPPED_EXPR. */
16836 static void
16837 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
16838 tree wrapped_expr)
16840 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
16841 ASSERT_NE (wrapped_expr, result);
16842 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
16843 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
16846 /* Verify that various arithmetic binary operations are folded
16847 correctly. */
16849 static void
16850 test_arithmetic_folding ()
16852 tree type = integer_type_node;
16853 tree x = create_tmp_var_raw (type, "x");
16854 tree zero = build_zero_cst (type);
16855 tree one = build_int_cst (type, 1);
16857 /* Addition. */
16858 /* 1 <-- (0 + 1) */
16859 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
16860 one);
16861 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
16862 one);
16864 /* (nonlvalue)x <-- (x + 0) */
16865 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
16868 /* Subtraction. */
16869 /* 0 <-- (x - x) */
16870 assert_binop_folds_to_const (x, MINUS_EXPR, x,
16871 zero);
16872 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
16875 /* Multiplication. */
16876 /* 0 <-- (x * 0) */
16877 assert_binop_folds_to_const (x, MULT_EXPR, zero,
16878 zero);
16880 /* (nonlvalue)x <-- (x * 1) */
16881 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
16885 /* Verify that various binary operations on vectors are folded
16886 correctly. */
16888 static void
16889 test_vector_folding ()
16891 tree inner_type = integer_type_node;
16892 tree type = build_vector_type (inner_type, 4);
16893 tree zero = build_zero_cst (type);
16894 tree one = build_one_cst (type);
16895 tree index = build_index_vector (type, 0, 1);
16897 /* Verify equality tests that return a scalar boolean result. */
16898 tree res_type = boolean_type_node;
16899 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
16900 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
16901 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
16902 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
16903 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
16904 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16905 index, one)));
16906 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
16907 index, index)));
16908 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16909 index, index)));
16912 /* Verify folding of VEC_DUPLICATE_EXPRs. */
16914 static void
16915 test_vec_duplicate_folding ()
16917 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
16918 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
16919 /* This will be 1 if VEC_MODE isn't a vector mode. */
16920 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
16922 tree type = build_vector_type (ssizetype, nunits);
16923 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
16924 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
16925 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
16928 /* Run all of the selftests within this file. */
16930 void
16931 fold_const_cc_tests ()
16933 test_arithmetic_folding ();
16934 test_vector_folding ();
16935 test_vec_duplicate_folding ();
16938 } // namespace selftest
16940 #endif /* CHECKING_P */