compiler: only build thunk struct type when it is needed
[official-gcc.git] / gcc / fold-const.cc
blob9f7beae14e504ae9837162faac796d4f8433c044
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-iterator.h"
74 #include "gimple-fold.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82 #include "stringpool.h"
83 #include "attribs.h"
84 #include "tree-vector-builder.h"
85 #include "vec-perm-indices.h"
86 #include "asan.h"
87 #include "gimple-range.h"
89 /* Nonzero if we are folding constants inside an initializer or a C++
90 manifestly-constant-evaluated context; zero otherwise.
91 Should be used when folding in initializer enables additional
92 optimizations. */
93 int folding_initializer = 0;
95 /* Nonzero if we are folding C++ manifestly-constant-evaluated context; zero
96 otherwise.
97 Should be used when certain constructs shouldn't be optimized
98 during folding in that context. */
99 bool folding_cxx_constexpr = false;
101 /* The following constants represent a bit based encoding of GCC's
102 comparison operators. This encoding simplifies transformations
103 on relational comparison operators, such as AND and OR. */
104 enum comparison_code {
105 COMPCODE_FALSE = 0,
106 COMPCODE_LT = 1,
107 COMPCODE_EQ = 2,
108 COMPCODE_LE = 3,
109 COMPCODE_GT = 4,
110 COMPCODE_LTGT = 5,
111 COMPCODE_GE = 6,
112 COMPCODE_ORD = 7,
113 COMPCODE_UNORD = 8,
114 COMPCODE_UNLT = 9,
115 COMPCODE_UNEQ = 10,
116 COMPCODE_UNLE = 11,
117 COMPCODE_UNGT = 12,
118 COMPCODE_NE = 13,
119 COMPCODE_UNGE = 14,
120 COMPCODE_TRUE = 15
123 static bool negate_expr_p (tree);
124 static tree negate_expr (tree);
125 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
126 static enum comparison_code comparison_to_compcode (enum tree_code);
127 static enum tree_code compcode_to_comparison (enum comparison_code);
128 static bool twoval_comparison_p (tree, tree *, tree *);
129 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
130 static tree optimize_bit_field_compare (location_t, enum tree_code,
131 tree, tree, tree);
132 static bool simple_operand_p (const_tree);
133 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
134 static tree range_predecessor (tree);
135 static tree range_successor (tree);
136 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
137 static tree fold_cond_expr_with_comparison (location_t, tree, enum tree_code,
138 tree, tree, tree, tree);
139 static tree unextend (tree, int, int, tree);
140 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
141 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
142 static tree fold_binary_op_with_conditional_arg (location_t,
143 enum tree_code, tree,
144 tree, tree,
145 tree, tree, int);
146 static tree fold_negate_const (tree, tree);
147 static tree fold_not_const (const_tree, tree);
148 static tree fold_relational_const (enum tree_code, tree, tree, tree);
149 static tree fold_convert_const (enum tree_code, tree, tree);
150 static tree fold_view_convert_expr (tree, tree);
151 static tree fold_negate_expr (location_t, tree);
154 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
155 Otherwise, return LOC. */
157 static location_t
158 expr_location_or (tree t, location_t loc)
160 location_t tloc = EXPR_LOCATION (t);
161 return tloc == UNKNOWN_LOCATION ? loc : tloc;
164 /* Similar to protected_set_expr_location, but never modify x in place,
165 if location can and needs to be set, unshare it. */
167 static inline tree
168 protected_set_expr_location_unshare (tree x, location_t loc)
170 if (CAN_HAVE_LOCATION_P (x)
171 && EXPR_LOCATION (x) != loc
172 && !(TREE_CODE (x) == SAVE_EXPR
173 || TREE_CODE (x) == TARGET_EXPR
174 || TREE_CODE (x) == BIND_EXPR))
176 x = copy_node (x);
177 SET_EXPR_LOCATION (x, loc);
179 return x;
182 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
183 division and returns the quotient. Otherwise returns
184 NULL_TREE. */
186 tree
187 div_if_zero_remainder (const_tree arg1, const_tree arg2)
189 widest_int quo;
191 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
192 SIGNED, &quo))
193 return wide_int_to_tree (TREE_TYPE (arg1), quo);
195 return NULL_TREE;
198 /* This is nonzero if we should defer warnings about undefined
199 overflow. This facility exists because these warnings are a
200 special case. The code to estimate loop iterations does not want
201 to issue any warnings, since it works with expressions which do not
202 occur in user code. Various bits of cleanup code call fold(), but
203 only use the result if it has certain characteristics (e.g., is a
204 constant); that code only wants to issue a warning if the result is
205 used. */
207 static int fold_deferring_overflow_warnings;
209 /* If a warning about undefined overflow is deferred, this is the
210 warning. Note that this may cause us to turn two warnings into
211 one, but that is fine since it is sufficient to only give one
212 warning per expression. */
214 static const char* fold_deferred_overflow_warning;
216 /* If a warning about undefined overflow is deferred, this is the
217 level at which the warning should be emitted. */
219 static enum warn_strict_overflow_code fold_deferred_overflow_code;
221 /* Start deferring overflow warnings. We could use a stack here to
222 permit nested calls, but at present it is not necessary. */
224 void
225 fold_defer_overflow_warnings (void)
227 ++fold_deferring_overflow_warnings;
230 /* Stop deferring overflow warnings. If there is a pending warning,
231 and ISSUE is true, then issue the warning if appropriate. STMT is
232 the statement with which the warning should be associated (used for
233 location information); STMT may be NULL. CODE is the level of the
234 warning--a warn_strict_overflow_code value. This function will use
235 the smaller of CODE and the deferred code when deciding whether to
236 issue the warning. CODE may be zero to mean to always use the
237 deferred code. */
239 void
240 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
242 const char *warnmsg;
243 location_t locus;
245 gcc_assert (fold_deferring_overflow_warnings > 0);
246 --fold_deferring_overflow_warnings;
247 if (fold_deferring_overflow_warnings > 0)
249 if (fold_deferred_overflow_warning != NULL
250 && code != 0
251 && code < (int) fold_deferred_overflow_code)
252 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
253 return;
256 warnmsg = fold_deferred_overflow_warning;
257 fold_deferred_overflow_warning = NULL;
259 if (!issue || warnmsg == NULL)
260 return;
262 if (warning_suppressed_p (stmt, OPT_Wstrict_overflow))
263 return;
265 /* Use the smallest code level when deciding to issue the
266 warning. */
267 if (code == 0 || code > (int) fold_deferred_overflow_code)
268 code = fold_deferred_overflow_code;
270 if (!issue_strict_overflow_warning (code))
271 return;
273 if (stmt == NULL)
274 locus = input_location;
275 else
276 locus = gimple_location (stmt);
277 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
280 /* Stop deferring overflow warnings, ignoring any deferred
281 warnings. */
283 void
284 fold_undefer_and_ignore_overflow_warnings (void)
286 fold_undefer_overflow_warnings (false, NULL, 0);
289 /* Whether we are deferring overflow warnings. */
291 bool
292 fold_deferring_overflow_warnings_p (void)
294 return fold_deferring_overflow_warnings > 0;
297 /* This is called when we fold something based on the fact that signed
298 overflow is undefined. */
300 void
301 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
303 if (fold_deferring_overflow_warnings > 0)
305 if (fold_deferred_overflow_warning == NULL
306 || wc < fold_deferred_overflow_code)
308 fold_deferred_overflow_warning = gmsgid;
309 fold_deferred_overflow_code = wc;
312 else if (issue_strict_overflow_warning (wc))
313 warning (OPT_Wstrict_overflow, gmsgid);
316 /* Return true if the built-in mathematical function specified by CODE
317 is odd, i.e. -f(x) == f(-x). */
319 bool
320 negate_mathfn_p (combined_fn fn)
322 switch (fn)
324 CASE_CFN_ASIN:
325 CASE_CFN_ASINH:
326 CASE_CFN_ATAN:
327 CASE_CFN_ATANH:
328 CASE_CFN_CASIN:
329 CASE_CFN_CASINH:
330 CASE_CFN_CATAN:
331 CASE_CFN_CATANH:
332 CASE_CFN_CBRT:
333 CASE_CFN_CPROJ:
334 CASE_CFN_CSIN:
335 CASE_CFN_CSINH:
336 CASE_CFN_CTAN:
337 CASE_CFN_CTANH:
338 CASE_CFN_ERF:
339 CASE_CFN_LLROUND:
340 CASE_CFN_LROUND:
341 CASE_CFN_ROUND:
342 CASE_CFN_ROUNDEVEN:
343 CASE_CFN_ROUNDEVEN_FN:
344 CASE_CFN_SIN:
345 CASE_CFN_SINH:
346 CASE_CFN_TAN:
347 CASE_CFN_TANH:
348 CASE_CFN_TRUNC:
349 return true;
351 CASE_CFN_LLRINT:
352 CASE_CFN_LRINT:
353 CASE_CFN_NEARBYINT:
354 CASE_CFN_RINT:
355 return !flag_rounding_math;
357 default:
358 break;
360 return false;
363 /* Check whether we may negate an integer constant T without causing
364 overflow. */
366 bool
367 may_negate_without_overflow_p (const_tree t)
369 tree type;
371 gcc_assert (TREE_CODE (t) == INTEGER_CST);
373 type = TREE_TYPE (t);
374 if (TYPE_UNSIGNED (type))
375 return false;
377 return !wi::only_sign_bit_p (wi::to_wide (t));
380 /* Determine whether an expression T can be cheaply negated using
381 the function negate_expr without introducing undefined overflow. */
383 static bool
384 negate_expr_p (tree t)
386 tree type;
388 if (t == 0)
389 return false;
391 type = TREE_TYPE (t);
393 STRIP_SIGN_NOPS (t);
394 switch (TREE_CODE (t))
396 case INTEGER_CST:
397 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
398 return true;
400 /* Check that -CST will not overflow type. */
401 return may_negate_without_overflow_p (t);
402 case BIT_NOT_EXPR:
403 return (INTEGRAL_TYPE_P (type)
404 && TYPE_OVERFLOW_WRAPS (type));
406 case FIXED_CST:
407 return true;
409 case NEGATE_EXPR:
410 return !TYPE_OVERFLOW_SANITIZED (type);
412 case REAL_CST:
413 /* We want to canonicalize to positive real constants. Pretend
414 that only negative ones can be easily negated. */
415 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
417 case COMPLEX_CST:
418 return negate_expr_p (TREE_REALPART (t))
419 && negate_expr_p (TREE_IMAGPART (t));
421 case VECTOR_CST:
423 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
424 return true;
426 /* Steps don't prevent negation. */
427 unsigned int count = vector_cst_encoded_nelts (t);
428 for (unsigned int i = 0; i < count; ++i)
429 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
430 return false;
432 return true;
435 case COMPLEX_EXPR:
436 return negate_expr_p (TREE_OPERAND (t, 0))
437 && negate_expr_p (TREE_OPERAND (t, 1));
439 case CONJ_EXPR:
440 return negate_expr_p (TREE_OPERAND (t, 0));
442 case PLUS_EXPR:
443 if (HONOR_SIGN_DEPENDENT_ROUNDING (type)
444 || HONOR_SIGNED_ZEROS (type)
445 || (ANY_INTEGRAL_TYPE_P (type)
446 && ! TYPE_OVERFLOW_WRAPS (type)))
447 return false;
448 /* -(A + B) -> (-B) - A. */
449 if (negate_expr_p (TREE_OPERAND (t, 1)))
450 return true;
451 /* -(A + B) -> (-A) - B. */
452 return negate_expr_p (TREE_OPERAND (t, 0));
454 case MINUS_EXPR:
455 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
456 return !HONOR_SIGN_DEPENDENT_ROUNDING (type)
457 && !HONOR_SIGNED_ZEROS (type)
458 && (! ANY_INTEGRAL_TYPE_P (type)
459 || TYPE_OVERFLOW_WRAPS (type));
461 case MULT_EXPR:
462 if (TYPE_UNSIGNED (type))
463 break;
464 /* INT_MIN/n * n doesn't overflow while negating one operand it does
465 if n is a (negative) power of two. */
466 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
467 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
468 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
469 && (wi::popcount
470 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
471 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
472 && (wi::popcount
473 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
474 break;
476 /* Fall through. */
478 case RDIV_EXPR:
479 if (! HONOR_SIGN_DEPENDENT_ROUNDING (t))
480 return negate_expr_p (TREE_OPERAND (t, 1))
481 || negate_expr_p (TREE_OPERAND (t, 0));
482 break;
484 case TRUNC_DIV_EXPR:
485 case ROUND_DIV_EXPR:
486 case EXACT_DIV_EXPR:
487 if (TYPE_UNSIGNED (type))
488 break;
489 /* In general we can't negate A in A / B, because if A is INT_MIN and
490 B is not 1 we change the sign of the result. */
491 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
492 && negate_expr_p (TREE_OPERAND (t, 0)))
493 return true;
494 /* In general we can't negate B in A / B, because if A is INT_MIN and
495 B is 1, we may turn this into INT_MIN / -1 which is undefined
496 and actually traps on some architectures. */
497 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
498 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
499 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
500 && ! integer_onep (TREE_OPERAND (t, 1))))
501 return negate_expr_p (TREE_OPERAND (t, 1));
502 break;
504 case NOP_EXPR:
505 /* Negate -((double)float) as (double)(-float). */
506 if (TREE_CODE (type) == REAL_TYPE)
508 tree tem = strip_float_extensions (t);
509 if (tem != t)
510 return negate_expr_p (tem);
512 break;
514 case CALL_EXPR:
515 /* Negate -f(x) as f(-x). */
516 if (negate_mathfn_p (get_call_combined_fn (t)))
517 return negate_expr_p (CALL_EXPR_ARG (t, 0));
518 break;
520 case RSHIFT_EXPR:
521 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
522 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
524 tree op1 = TREE_OPERAND (t, 1);
525 if (wi::to_wide (op1) == element_precision (type) - 1)
526 return true;
528 break;
530 default:
531 break;
533 return false;
536 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
537 simplification is possible.
538 If negate_expr_p would return true for T, NULL_TREE will never be
539 returned. */
541 static tree
542 fold_negate_expr_1 (location_t loc, tree t)
544 tree type = TREE_TYPE (t);
545 tree tem;
547 switch (TREE_CODE (t))
549 /* Convert - (~A) to A + 1. */
550 case BIT_NOT_EXPR:
551 if (INTEGRAL_TYPE_P (type))
552 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
553 build_one_cst (type));
554 break;
556 case INTEGER_CST:
557 tem = fold_negate_const (t, type);
558 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
559 || (ANY_INTEGRAL_TYPE_P (type)
560 && !TYPE_OVERFLOW_TRAPS (type)
561 && TYPE_OVERFLOW_WRAPS (type))
562 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
563 return tem;
564 break;
566 case POLY_INT_CST:
567 case REAL_CST:
568 case FIXED_CST:
569 tem = fold_negate_const (t, type);
570 return tem;
572 case COMPLEX_CST:
574 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
575 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
576 if (rpart && ipart)
577 return build_complex (type, rpart, ipart);
579 break;
581 case VECTOR_CST:
583 tree_vector_builder elts;
584 elts.new_unary_operation (type, t, true);
585 unsigned int count = elts.encoded_nelts ();
586 for (unsigned int i = 0; i < count; ++i)
588 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
589 if (elt == NULL_TREE)
590 return NULL_TREE;
591 elts.quick_push (elt);
594 return elts.build ();
597 case COMPLEX_EXPR:
598 if (negate_expr_p (t))
599 return fold_build2_loc (loc, COMPLEX_EXPR, type,
600 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
601 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
602 break;
604 case CONJ_EXPR:
605 if (negate_expr_p (t))
606 return fold_build1_loc (loc, CONJ_EXPR, type,
607 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
608 break;
610 case NEGATE_EXPR:
611 if (!TYPE_OVERFLOW_SANITIZED (type))
612 return TREE_OPERAND (t, 0);
613 break;
615 case PLUS_EXPR:
616 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
617 && !HONOR_SIGNED_ZEROS (type))
619 /* -(A + B) -> (-B) - A. */
620 if (negate_expr_p (TREE_OPERAND (t, 1)))
622 tem = negate_expr (TREE_OPERAND (t, 1));
623 return fold_build2_loc (loc, MINUS_EXPR, type,
624 tem, TREE_OPERAND (t, 0));
627 /* -(A + B) -> (-A) - B. */
628 if (negate_expr_p (TREE_OPERAND (t, 0)))
630 tem = negate_expr (TREE_OPERAND (t, 0));
631 return fold_build2_loc (loc, MINUS_EXPR, type,
632 tem, TREE_OPERAND (t, 1));
635 break;
637 case MINUS_EXPR:
638 /* - (A - B) -> B - A */
639 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
640 && !HONOR_SIGNED_ZEROS (type))
641 return fold_build2_loc (loc, MINUS_EXPR, type,
642 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
643 break;
645 case MULT_EXPR:
646 if (TYPE_UNSIGNED (type))
647 break;
649 /* Fall through. */
651 case RDIV_EXPR:
652 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
654 tem = TREE_OPERAND (t, 1);
655 if (negate_expr_p (tem))
656 return fold_build2_loc (loc, TREE_CODE (t), type,
657 TREE_OPERAND (t, 0), negate_expr (tem));
658 tem = TREE_OPERAND (t, 0);
659 if (negate_expr_p (tem))
660 return fold_build2_loc (loc, TREE_CODE (t), type,
661 negate_expr (tem), TREE_OPERAND (t, 1));
663 break;
665 case TRUNC_DIV_EXPR:
666 case ROUND_DIV_EXPR:
667 case EXACT_DIV_EXPR:
668 if (TYPE_UNSIGNED (type))
669 break;
670 /* In general we can't negate A in A / B, because if A is INT_MIN and
671 B is not 1 we change the sign of the result. */
672 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
673 && negate_expr_p (TREE_OPERAND (t, 0)))
674 return fold_build2_loc (loc, TREE_CODE (t), type,
675 negate_expr (TREE_OPERAND (t, 0)),
676 TREE_OPERAND (t, 1));
677 /* In general we can't negate B in A / B, because if A is INT_MIN and
678 B is 1, we may turn this into INT_MIN / -1 which is undefined
679 and actually traps on some architectures. */
680 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
681 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
682 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
683 && ! integer_onep (TREE_OPERAND (t, 1))))
684 && negate_expr_p (TREE_OPERAND (t, 1)))
685 return fold_build2_loc (loc, TREE_CODE (t), type,
686 TREE_OPERAND (t, 0),
687 negate_expr (TREE_OPERAND (t, 1)));
688 break;
690 case NOP_EXPR:
691 /* Convert -((double)float) into (double)(-float). */
692 if (TREE_CODE (type) == REAL_TYPE)
694 tem = strip_float_extensions (t);
695 if (tem != t && negate_expr_p (tem))
696 return fold_convert_loc (loc, type, negate_expr (tem));
698 break;
700 case CALL_EXPR:
701 /* Negate -f(x) as f(-x). */
702 if (negate_mathfn_p (get_call_combined_fn (t))
703 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
705 tree fndecl, arg;
707 fndecl = get_callee_fndecl (t);
708 arg = negate_expr (CALL_EXPR_ARG (t, 0));
709 return build_call_expr_loc (loc, fndecl, 1, arg);
711 break;
713 case RSHIFT_EXPR:
714 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
715 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
717 tree op1 = TREE_OPERAND (t, 1);
718 if (wi::to_wide (op1) == element_precision (type) - 1)
720 tree ntype = TYPE_UNSIGNED (type)
721 ? signed_type_for (type)
722 : unsigned_type_for (type);
723 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
724 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
725 return fold_convert_loc (loc, type, temp);
728 break;
730 default:
731 break;
734 return NULL_TREE;
737 /* A wrapper for fold_negate_expr_1. */
739 static tree
740 fold_negate_expr (location_t loc, tree t)
742 tree type = TREE_TYPE (t);
743 STRIP_SIGN_NOPS (t);
744 tree tem = fold_negate_expr_1 (loc, t);
745 if (tem == NULL_TREE)
746 return NULL_TREE;
747 return fold_convert_loc (loc, type, tem);
750 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
751 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
752 return NULL_TREE. */
754 static tree
755 negate_expr (tree t)
757 tree type, tem;
758 location_t loc;
760 if (t == NULL_TREE)
761 return NULL_TREE;
763 loc = EXPR_LOCATION (t);
764 type = TREE_TYPE (t);
765 STRIP_SIGN_NOPS (t);
767 tem = fold_negate_expr (loc, t);
768 if (!tem)
769 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
770 return fold_convert_loc (loc, type, tem);
773 /* Split a tree IN into a constant, literal and variable parts that could be
774 combined with CODE to make IN. "constant" means an expression with
775 TREE_CONSTANT but that isn't an actual constant. CODE must be a
776 commutative arithmetic operation. Store the constant part into *CONP,
777 the literal in *LITP and return the variable part. If a part isn't
778 present, set it to null. If the tree does not decompose in this way,
779 return the entire tree as the variable part and the other parts as null.
781 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
782 case, we negate an operand that was subtracted. Except if it is a
783 literal for which we use *MINUS_LITP instead.
785 If NEGATE_P is true, we are negating all of IN, again except a literal
786 for which we use *MINUS_LITP instead. If a variable part is of pointer
787 type, it is negated after converting to TYPE. This prevents us from
788 generating illegal MINUS pointer expression. LOC is the location of
789 the converted variable part.
791 If IN is itself a literal or constant, return it as appropriate.
793 Note that we do not guarantee that any of the three values will be the
794 same type as IN, but they will have the same signedness and mode. */
796 static tree
797 split_tree (tree in, tree type, enum tree_code code,
798 tree *minus_varp, tree *conp, tree *minus_conp,
799 tree *litp, tree *minus_litp, int negate_p)
801 tree var = 0;
802 *minus_varp = 0;
803 *conp = 0;
804 *minus_conp = 0;
805 *litp = 0;
806 *minus_litp = 0;
808 /* Strip any conversions that don't change the machine mode or signedness. */
809 STRIP_SIGN_NOPS (in);
811 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
812 || TREE_CODE (in) == FIXED_CST)
813 *litp = in;
814 else if (TREE_CODE (in) == code
815 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
816 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
817 /* We can associate addition and subtraction together (even
818 though the C standard doesn't say so) for integers because
819 the value is not affected. For reals, the value might be
820 affected, so we can't. */
821 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
822 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
823 || (code == MINUS_EXPR
824 && (TREE_CODE (in) == PLUS_EXPR
825 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
827 tree op0 = TREE_OPERAND (in, 0);
828 tree op1 = TREE_OPERAND (in, 1);
829 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
830 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
832 /* First see if either of the operands is a literal, then a constant. */
833 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
834 || TREE_CODE (op0) == FIXED_CST)
835 *litp = op0, op0 = 0;
836 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
837 || TREE_CODE (op1) == FIXED_CST)
838 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
840 if (op0 != 0 && TREE_CONSTANT (op0))
841 *conp = op0, op0 = 0;
842 else if (op1 != 0 && TREE_CONSTANT (op1))
843 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
845 /* If we haven't dealt with either operand, this is not a case we can
846 decompose. Otherwise, VAR is either of the ones remaining, if any. */
847 if (op0 != 0 && op1 != 0)
848 var = in;
849 else if (op0 != 0)
850 var = op0;
851 else
852 var = op1, neg_var_p = neg1_p;
854 /* Now do any needed negations. */
855 if (neg_litp_p)
856 *minus_litp = *litp, *litp = 0;
857 if (neg_conp_p && *conp)
858 *minus_conp = *conp, *conp = 0;
859 if (neg_var_p && var)
860 *minus_varp = var, var = 0;
862 else if (TREE_CONSTANT (in))
863 *conp = in;
864 else if (TREE_CODE (in) == BIT_NOT_EXPR
865 && code == PLUS_EXPR)
867 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
868 when IN is constant. */
869 *litp = build_minus_one_cst (type);
870 *minus_varp = TREE_OPERAND (in, 0);
872 else
873 var = in;
875 if (negate_p)
877 if (*litp)
878 *minus_litp = *litp, *litp = 0;
879 else if (*minus_litp)
880 *litp = *minus_litp, *minus_litp = 0;
881 if (*conp)
882 *minus_conp = *conp, *conp = 0;
883 else if (*minus_conp)
884 *conp = *minus_conp, *minus_conp = 0;
885 if (var)
886 *minus_varp = var, var = 0;
887 else if (*minus_varp)
888 var = *minus_varp, *minus_varp = 0;
891 if (*litp
892 && TREE_OVERFLOW_P (*litp))
893 *litp = drop_tree_overflow (*litp);
894 if (*minus_litp
895 && TREE_OVERFLOW_P (*minus_litp))
896 *minus_litp = drop_tree_overflow (*minus_litp);
898 return var;
901 /* Re-associate trees split by the above function. T1 and T2 are
902 either expressions to associate or null. Return the new
903 expression, if any. LOC is the location of the new expression. If
904 we build an operation, do it in TYPE and with CODE. */
906 static tree
907 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
909 if (t1 == 0)
911 gcc_assert (t2 == 0 || code != MINUS_EXPR);
912 return t2;
914 else if (t2 == 0)
915 return t1;
917 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
918 try to fold this since we will have infinite recursion. But do
919 deal with any NEGATE_EXPRs. */
920 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
921 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
922 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
924 if (code == PLUS_EXPR)
926 if (TREE_CODE (t1) == NEGATE_EXPR)
927 return build2_loc (loc, MINUS_EXPR, type,
928 fold_convert_loc (loc, type, t2),
929 fold_convert_loc (loc, type,
930 TREE_OPERAND (t1, 0)));
931 else if (TREE_CODE (t2) == NEGATE_EXPR)
932 return build2_loc (loc, MINUS_EXPR, type,
933 fold_convert_loc (loc, type, t1),
934 fold_convert_loc (loc, type,
935 TREE_OPERAND (t2, 0)));
936 else if (integer_zerop (t2))
937 return fold_convert_loc (loc, type, t1);
939 else if (code == MINUS_EXPR)
941 if (integer_zerop (t2))
942 return fold_convert_loc (loc, type, t1);
945 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
946 fold_convert_loc (loc, type, t2));
949 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
950 fold_convert_loc (loc, type, t2));
953 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
954 for use in int_const_binop, size_binop and size_diffop. */
956 static bool
957 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
959 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
960 return false;
961 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
962 return false;
964 switch (code)
966 case LSHIFT_EXPR:
967 case RSHIFT_EXPR:
968 case LROTATE_EXPR:
969 case RROTATE_EXPR:
970 return true;
972 default:
973 break;
976 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
977 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
978 && TYPE_MODE (type1) == TYPE_MODE (type2);
981 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
982 a new constant in RES. Return FALSE if we don't know how to
983 evaluate CODE at compile-time. */
985 bool
986 wide_int_binop (wide_int &res,
987 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
988 signop sign, wi::overflow_type *overflow)
990 wide_int tmp;
991 *overflow = wi::OVF_NONE;
992 switch (code)
994 case BIT_IOR_EXPR:
995 res = wi::bit_or (arg1, arg2);
996 break;
998 case BIT_XOR_EXPR:
999 res = wi::bit_xor (arg1, arg2);
1000 break;
1002 case BIT_AND_EXPR:
1003 res = wi::bit_and (arg1, arg2);
1004 break;
1006 case LSHIFT_EXPR:
1007 if (wi::neg_p (arg2))
1008 return false;
1009 res = wi::lshift (arg1, arg2);
1010 break;
1012 case RSHIFT_EXPR:
1013 if (wi::neg_p (arg2))
1014 return false;
1015 /* It's unclear from the C standard whether shifts can overflow.
1016 The following code ignores overflow; perhaps a C standard
1017 interpretation ruling is needed. */
1018 res = wi::rshift (arg1, arg2, sign);
1019 break;
1021 case RROTATE_EXPR:
1022 case LROTATE_EXPR:
1023 if (wi::neg_p (arg2))
1025 tmp = -arg2;
1026 if (code == RROTATE_EXPR)
1027 code = LROTATE_EXPR;
1028 else
1029 code = RROTATE_EXPR;
1031 else
1032 tmp = arg2;
1034 if (code == RROTATE_EXPR)
1035 res = wi::rrotate (arg1, tmp);
1036 else
1037 res = wi::lrotate (arg1, tmp);
1038 break;
1040 case PLUS_EXPR:
1041 res = wi::add (arg1, arg2, sign, overflow);
1042 break;
1044 case MINUS_EXPR:
1045 res = wi::sub (arg1, arg2, sign, overflow);
1046 break;
1048 case MULT_EXPR:
1049 res = wi::mul (arg1, arg2, sign, overflow);
1050 break;
1052 case MULT_HIGHPART_EXPR:
1053 res = wi::mul_high (arg1, arg2, sign);
1054 break;
1056 case TRUNC_DIV_EXPR:
1057 case EXACT_DIV_EXPR:
1058 if (arg2 == 0)
1059 return false;
1060 res = wi::div_trunc (arg1, arg2, sign, overflow);
1061 break;
1063 case FLOOR_DIV_EXPR:
1064 if (arg2 == 0)
1065 return false;
1066 res = wi::div_floor (arg1, arg2, sign, overflow);
1067 break;
1069 case CEIL_DIV_EXPR:
1070 if (arg2 == 0)
1071 return false;
1072 res = wi::div_ceil (arg1, arg2, sign, overflow);
1073 break;
1075 case ROUND_DIV_EXPR:
1076 if (arg2 == 0)
1077 return false;
1078 res = wi::div_round (arg1, arg2, sign, overflow);
1079 break;
1081 case TRUNC_MOD_EXPR:
1082 if (arg2 == 0)
1083 return false;
1084 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1085 break;
1087 case FLOOR_MOD_EXPR:
1088 if (arg2 == 0)
1089 return false;
1090 res = wi::mod_floor (arg1, arg2, sign, overflow);
1091 break;
1093 case CEIL_MOD_EXPR:
1094 if (arg2 == 0)
1095 return false;
1096 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1097 break;
1099 case ROUND_MOD_EXPR:
1100 if (arg2 == 0)
1101 return false;
1102 res = wi::mod_round (arg1, arg2, sign, overflow);
1103 break;
1105 case MIN_EXPR:
1106 res = wi::min (arg1, arg2, sign);
1107 break;
1109 case MAX_EXPR:
1110 res = wi::max (arg1, arg2, sign);
1111 break;
1113 default:
1114 return false;
1116 return true;
1119 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1120 produce a new constant in RES. Return FALSE if we don't know how
1121 to evaluate CODE at compile-time. */
1123 static bool
1124 poly_int_binop (poly_wide_int &res, enum tree_code code,
1125 const_tree arg1, const_tree arg2,
1126 signop sign, wi::overflow_type *overflow)
1128 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1129 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1130 switch (code)
1132 case PLUS_EXPR:
1133 res = wi::add (wi::to_poly_wide (arg1),
1134 wi::to_poly_wide (arg2), sign, overflow);
1135 break;
1137 case MINUS_EXPR:
1138 res = wi::sub (wi::to_poly_wide (arg1),
1139 wi::to_poly_wide (arg2), sign, overflow);
1140 break;
1142 case MULT_EXPR:
1143 if (TREE_CODE (arg2) == INTEGER_CST)
1144 res = wi::mul (wi::to_poly_wide (arg1),
1145 wi::to_wide (arg2), sign, overflow);
1146 else if (TREE_CODE (arg1) == INTEGER_CST)
1147 res = wi::mul (wi::to_poly_wide (arg2),
1148 wi::to_wide (arg1), sign, overflow);
1149 else
1150 return NULL_TREE;
1151 break;
1153 case LSHIFT_EXPR:
1154 if (TREE_CODE (arg2) == INTEGER_CST)
1155 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1156 else
1157 return false;
1158 break;
1160 case BIT_IOR_EXPR:
1161 if (TREE_CODE (arg2) != INTEGER_CST
1162 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1163 &res))
1164 return false;
1165 break;
1167 default:
1168 return false;
1170 return true;
1173 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1174 produce a new constant. Return NULL_TREE if we don't know how to
1175 evaluate CODE at compile-time. */
1177 tree
1178 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1179 int overflowable)
1181 poly_wide_int poly_res;
1182 tree type = TREE_TYPE (arg1);
1183 signop sign = TYPE_SIGN (type);
1184 wi::overflow_type overflow = wi::OVF_NONE;
1186 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1188 wide_int warg1 = wi::to_wide (arg1), res;
1189 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1190 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1191 return NULL_TREE;
1192 poly_res = res;
1194 else if (!poly_int_tree_p (arg1)
1195 || !poly_int_tree_p (arg2)
1196 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1197 return NULL_TREE;
1198 return force_fit_type (type, poly_res, overflowable,
1199 (((sign == SIGNED || overflowable == -1)
1200 && overflow)
1201 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1204 /* Return true if binary operation OP distributes over addition in operand
1205 OPNO, with the other operand being held constant. OPNO counts from 1. */
1207 static bool
1208 distributes_over_addition_p (tree_code op, int opno)
1210 switch (op)
1212 case PLUS_EXPR:
1213 case MINUS_EXPR:
1214 case MULT_EXPR:
1215 return true;
1217 case LSHIFT_EXPR:
1218 return opno == 1;
1220 default:
1221 return false;
1225 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1226 constant. We assume ARG1 and ARG2 have the same data type, or at least
1227 are the same kind of constant and the same machine mode. Return zero if
1228 combining the constants is not allowed in the current operating mode. */
1230 static tree
1231 const_binop (enum tree_code code, tree arg1, tree arg2)
1233 /* Sanity check for the recursive cases. */
1234 if (!arg1 || !arg2)
1235 return NULL_TREE;
1237 STRIP_NOPS (arg1);
1238 STRIP_NOPS (arg2);
1240 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1242 if (code == POINTER_PLUS_EXPR)
1243 return int_const_binop (PLUS_EXPR,
1244 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1246 return int_const_binop (code, arg1, arg2);
1249 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1251 machine_mode mode;
1252 REAL_VALUE_TYPE d1;
1253 REAL_VALUE_TYPE d2;
1254 REAL_VALUE_TYPE value;
1255 REAL_VALUE_TYPE result;
1256 bool inexact;
1257 tree t, type;
1259 /* The following codes are handled by real_arithmetic. */
1260 switch (code)
1262 case PLUS_EXPR:
1263 case MINUS_EXPR:
1264 case MULT_EXPR:
1265 case RDIV_EXPR:
1266 case MIN_EXPR:
1267 case MAX_EXPR:
1268 break;
1270 default:
1271 return NULL_TREE;
1274 d1 = TREE_REAL_CST (arg1);
1275 d2 = TREE_REAL_CST (arg2);
1277 type = TREE_TYPE (arg1);
1278 mode = TYPE_MODE (type);
1280 /* Don't perform operation if we honor signaling NaNs and
1281 either operand is a signaling NaN. */
1282 if (HONOR_SNANS (mode)
1283 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1284 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1285 return NULL_TREE;
1287 /* Don't perform operation if it would raise a division
1288 by zero exception. */
1289 if (code == RDIV_EXPR
1290 && real_equal (&d2, &dconst0)
1291 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1292 return NULL_TREE;
1294 /* If either operand is a NaN, just return it. Otherwise, set up
1295 for floating-point trap; we return an overflow. */
1296 if (REAL_VALUE_ISNAN (d1))
1298 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1299 is off. */
1300 d1.signalling = 0;
1301 t = build_real (type, d1);
1302 return t;
1304 else if (REAL_VALUE_ISNAN (d2))
1306 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1307 is off. */
1308 d2.signalling = 0;
1309 t = build_real (type, d2);
1310 return t;
1313 inexact = real_arithmetic (&value, code, &d1, &d2);
1314 real_convert (&result, mode, &value);
1316 /* Don't constant fold this floating point operation if
1317 both operands are not NaN but the result is NaN, and
1318 flag_trapping_math. Such operations should raise an
1319 invalid operation exception. */
1320 if (flag_trapping_math
1321 && MODE_HAS_NANS (mode)
1322 && REAL_VALUE_ISNAN (result)
1323 && !REAL_VALUE_ISNAN (d1)
1324 && !REAL_VALUE_ISNAN (d2))
1325 return NULL_TREE;
1327 /* Don't constant fold this floating point operation if
1328 the result has overflowed and flag_trapping_math. */
1329 if (flag_trapping_math
1330 && MODE_HAS_INFINITIES (mode)
1331 && REAL_VALUE_ISINF (result)
1332 && !REAL_VALUE_ISINF (d1)
1333 && !REAL_VALUE_ISINF (d2))
1334 return NULL_TREE;
1336 /* Don't constant fold this floating point operation if the
1337 result may dependent upon the run-time rounding mode and
1338 flag_rounding_math is set, or if GCC's software emulation
1339 is unable to accurately represent the result. */
1340 if ((flag_rounding_math
1341 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1342 && (inexact || !real_identical (&result, &value)))
1343 return NULL_TREE;
1345 t = build_real (type, result);
1347 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1348 return t;
1351 if (TREE_CODE (arg1) == FIXED_CST)
1353 FIXED_VALUE_TYPE f1;
1354 FIXED_VALUE_TYPE f2;
1355 FIXED_VALUE_TYPE result;
1356 tree t, type;
1357 int sat_p;
1358 bool overflow_p;
1360 /* The following codes are handled by fixed_arithmetic. */
1361 switch (code)
1363 case PLUS_EXPR:
1364 case MINUS_EXPR:
1365 case MULT_EXPR:
1366 case TRUNC_DIV_EXPR:
1367 if (TREE_CODE (arg2) != FIXED_CST)
1368 return NULL_TREE;
1369 f2 = TREE_FIXED_CST (arg2);
1370 break;
1372 case LSHIFT_EXPR:
1373 case RSHIFT_EXPR:
1375 if (TREE_CODE (arg2) != INTEGER_CST)
1376 return NULL_TREE;
1377 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1378 f2.data.high = w2.elt (1);
1379 f2.data.low = w2.ulow ();
1380 f2.mode = SImode;
1382 break;
1384 default:
1385 return NULL_TREE;
1388 f1 = TREE_FIXED_CST (arg1);
1389 type = TREE_TYPE (arg1);
1390 sat_p = TYPE_SATURATING (type);
1391 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1392 t = build_fixed (type, result);
1393 /* Propagate overflow flags. */
1394 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1395 TREE_OVERFLOW (t) = 1;
1396 return t;
1399 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1401 tree type = TREE_TYPE (arg1);
1402 tree r1 = TREE_REALPART (arg1);
1403 tree i1 = TREE_IMAGPART (arg1);
1404 tree r2 = TREE_REALPART (arg2);
1405 tree i2 = TREE_IMAGPART (arg2);
1406 tree real, imag;
1408 switch (code)
1410 case PLUS_EXPR:
1411 case MINUS_EXPR:
1412 real = const_binop (code, r1, r2);
1413 imag = const_binop (code, i1, i2);
1414 break;
1416 case MULT_EXPR:
1417 if (COMPLEX_FLOAT_TYPE_P (type))
1418 return do_mpc_arg2 (arg1, arg2, type,
1419 /* do_nonfinite= */ folding_initializer,
1420 mpc_mul);
1422 real = const_binop (MINUS_EXPR,
1423 const_binop (MULT_EXPR, r1, r2),
1424 const_binop (MULT_EXPR, i1, i2));
1425 imag = const_binop (PLUS_EXPR,
1426 const_binop (MULT_EXPR, r1, i2),
1427 const_binop (MULT_EXPR, i1, r2));
1428 break;
1430 case RDIV_EXPR:
1431 if (COMPLEX_FLOAT_TYPE_P (type))
1432 return do_mpc_arg2 (arg1, arg2, type,
1433 /* do_nonfinite= */ folding_initializer,
1434 mpc_div);
1435 /* Fallthru. */
1436 case TRUNC_DIV_EXPR:
1437 case CEIL_DIV_EXPR:
1438 case FLOOR_DIV_EXPR:
1439 case ROUND_DIV_EXPR:
1440 if (flag_complex_method == 0)
1442 /* Keep this algorithm in sync with
1443 tree-complex.cc:expand_complex_div_straight().
1445 Expand complex division to scalars, straightforward algorithm.
1446 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1447 t = br*br + bi*bi
1449 tree magsquared
1450 = const_binop (PLUS_EXPR,
1451 const_binop (MULT_EXPR, r2, r2),
1452 const_binop (MULT_EXPR, i2, i2));
1453 tree t1
1454 = const_binop (PLUS_EXPR,
1455 const_binop (MULT_EXPR, r1, r2),
1456 const_binop (MULT_EXPR, i1, i2));
1457 tree t2
1458 = const_binop (MINUS_EXPR,
1459 const_binop (MULT_EXPR, i1, r2),
1460 const_binop (MULT_EXPR, r1, i2));
1462 real = const_binop (code, t1, magsquared);
1463 imag = const_binop (code, t2, magsquared);
1465 else
1467 /* Keep this algorithm in sync with
1468 tree-complex.cc:expand_complex_div_wide().
1470 Expand complex division to scalars, modified algorithm to minimize
1471 overflow with wide input ranges. */
1472 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1473 fold_abs_const (r2, TREE_TYPE (type)),
1474 fold_abs_const (i2, TREE_TYPE (type)));
1476 if (integer_nonzerop (compare))
1478 /* In the TRUE branch, we compute
1479 ratio = br/bi;
1480 div = (br * ratio) + bi;
1481 tr = (ar * ratio) + ai;
1482 ti = (ai * ratio) - ar;
1483 tr = tr / div;
1484 ti = ti / div; */
1485 tree ratio = const_binop (code, r2, i2);
1486 tree div = const_binop (PLUS_EXPR, i2,
1487 const_binop (MULT_EXPR, r2, ratio));
1488 real = const_binop (MULT_EXPR, r1, ratio);
1489 real = const_binop (PLUS_EXPR, real, i1);
1490 real = const_binop (code, real, div);
1492 imag = const_binop (MULT_EXPR, i1, ratio);
1493 imag = const_binop (MINUS_EXPR, imag, r1);
1494 imag = const_binop (code, imag, div);
1496 else
1498 /* In the FALSE branch, we compute
1499 ratio = d/c;
1500 divisor = (d * ratio) + c;
1501 tr = (b * ratio) + a;
1502 ti = b - (a * ratio);
1503 tr = tr / div;
1504 ti = ti / div; */
1505 tree ratio = const_binop (code, i2, r2);
1506 tree div = const_binop (PLUS_EXPR, r2,
1507 const_binop (MULT_EXPR, i2, ratio));
1509 real = const_binop (MULT_EXPR, i1, ratio);
1510 real = const_binop (PLUS_EXPR, real, r1);
1511 real = const_binop (code, real, div);
1513 imag = const_binop (MULT_EXPR, r1, ratio);
1514 imag = const_binop (MINUS_EXPR, i1, imag);
1515 imag = const_binop (code, imag, div);
1518 break;
1520 default:
1521 return NULL_TREE;
1524 if (real && imag)
1525 return build_complex (type, real, imag);
1528 if (TREE_CODE (arg1) == VECTOR_CST
1529 && TREE_CODE (arg2) == VECTOR_CST
1530 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1531 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1533 tree type = TREE_TYPE (arg1);
1534 bool step_ok_p;
1535 if (VECTOR_CST_STEPPED_P (arg1)
1536 && VECTOR_CST_STEPPED_P (arg2))
1537 /* We can operate directly on the encoding if:
1539 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1540 implies
1541 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1543 Addition and subtraction are the supported operators
1544 for which this is true. */
1545 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1546 else if (VECTOR_CST_STEPPED_P (arg1))
1547 /* We can operate directly on stepped encodings if:
1549 a3 - a2 == a2 - a1
1550 implies:
1551 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1553 which is true if (x -> x op c) distributes over addition. */
1554 step_ok_p = distributes_over_addition_p (code, 1);
1555 else
1556 /* Similarly in reverse. */
1557 step_ok_p = distributes_over_addition_p (code, 2);
1558 tree_vector_builder elts;
1559 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1560 return NULL_TREE;
1561 unsigned int count = elts.encoded_nelts ();
1562 for (unsigned int i = 0; i < count; ++i)
1564 tree elem1 = VECTOR_CST_ELT (arg1, i);
1565 tree elem2 = VECTOR_CST_ELT (arg2, i);
1567 tree elt = const_binop (code, elem1, elem2);
1569 /* It is possible that const_binop cannot handle the given
1570 code and return NULL_TREE */
1571 if (elt == NULL_TREE)
1572 return NULL_TREE;
1573 elts.quick_push (elt);
1576 return elts.build ();
1579 /* Shifts allow a scalar offset for a vector. */
1580 if (TREE_CODE (arg1) == VECTOR_CST
1581 && TREE_CODE (arg2) == INTEGER_CST)
1583 tree type = TREE_TYPE (arg1);
1584 bool step_ok_p = distributes_over_addition_p (code, 1);
1585 tree_vector_builder elts;
1586 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1587 return NULL_TREE;
1588 unsigned int count = elts.encoded_nelts ();
1589 for (unsigned int i = 0; i < count; ++i)
1591 tree elem1 = VECTOR_CST_ELT (arg1, i);
1593 tree elt = const_binop (code, elem1, arg2);
1595 /* It is possible that const_binop cannot handle the given
1596 code and return NULL_TREE. */
1597 if (elt == NULL_TREE)
1598 return NULL_TREE;
1599 elts.quick_push (elt);
1602 return elts.build ();
1604 return NULL_TREE;
1607 /* Overload that adds a TYPE parameter to be able to dispatch
1608 to fold_relational_const. */
1610 tree
1611 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1613 if (TREE_CODE_CLASS (code) == tcc_comparison)
1614 return fold_relational_const (code, type, arg1, arg2);
1616 /* ??? Until we make the const_binop worker take the type of the
1617 result as argument put those cases that need it here. */
1618 switch (code)
1620 case VEC_SERIES_EXPR:
1621 if (CONSTANT_CLASS_P (arg1)
1622 && CONSTANT_CLASS_P (arg2))
1623 return build_vec_series (type, arg1, arg2);
1624 return NULL_TREE;
1626 case COMPLEX_EXPR:
1627 if ((TREE_CODE (arg1) == REAL_CST
1628 && TREE_CODE (arg2) == REAL_CST)
1629 || (TREE_CODE (arg1) == INTEGER_CST
1630 && TREE_CODE (arg2) == INTEGER_CST))
1631 return build_complex (type, arg1, arg2);
1632 return NULL_TREE;
1634 case POINTER_DIFF_EXPR:
1635 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1637 poly_offset_int res = (wi::to_poly_offset (arg1)
1638 - wi::to_poly_offset (arg2));
1639 return force_fit_type (type, res, 1,
1640 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1642 return NULL_TREE;
1644 case VEC_PACK_TRUNC_EXPR:
1645 case VEC_PACK_FIX_TRUNC_EXPR:
1646 case VEC_PACK_FLOAT_EXPR:
1648 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1650 if (TREE_CODE (arg1) != VECTOR_CST
1651 || TREE_CODE (arg2) != VECTOR_CST)
1652 return NULL_TREE;
1654 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1655 return NULL_TREE;
1657 out_nelts = in_nelts * 2;
1658 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1659 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1661 tree_vector_builder elts (type, out_nelts, 1);
1662 for (i = 0; i < out_nelts; i++)
1664 tree elt = (i < in_nelts
1665 ? VECTOR_CST_ELT (arg1, i)
1666 : VECTOR_CST_ELT (arg2, i - in_nelts));
1667 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1668 ? NOP_EXPR
1669 : code == VEC_PACK_FLOAT_EXPR
1670 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1671 TREE_TYPE (type), elt);
1672 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1673 return NULL_TREE;
1674 elts.quick_push (elt);
1677 return elts.build ();
1680 case VEC_WIDEN_MULT_LO_EXPR:
1681 case VEC_WIDEN_MULT_HI_EXPR:
1682 case VEC_WIDEN_MULT_EVEN_EXPR:
1683 case VEC_WIDEN_MULT_ODD_EXPR:
1685 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1687 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1688 return NULL_TREE;
1690 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1691 return NULL_TREE;
1692 out_nelts = in_nelts / 2;
1693 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1694 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1696 if (code == VEC_WIDEN_MULT_LO_EXPR)
1697 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1698 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1699 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1700 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1701 scale = 1, ofs = 0;
1702 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1703 scale = 1, ofs = 1;
1705 tree_vector_builder elts (type, out_nelts, 1);
1706 for (out = 0; out < out_nelts; out++)
1708 unsigned int in = (out << scale) + ofs;
1709 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1710 VECTOR_CST_ELT (arg1, in));
1711 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1712 VECTOR_CST_ELT (arg2, in));
1714 if (t1 == NULL_TREE || t2 == NULL_TREE)
1715 return NULL_TREE;
1716 tree elt = const_binop (MULT_EXPR, t1, t2);
1717 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1718 return NULL_TREE;
1719 elts.quick_push (elt);
1722 return elts.build ();
1725 default:;
1728 if (TREE_CODE_CLASS (code) != tcc_binary)
1729 return NULL_TREE;
1731 /* Make sure type and arg0 have the same saturating flag. */
1732 gcc_checking_assert (TYPE_SATURATING (type)
1733 == TYPE_SATURATING (TREE_TYPE (arg1)));
1735 return const_binop (code, arg1, arg2);
1738 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1739 Return zero if computing the constants is not possible. */
1741 tree
1742 const_unop (enum tree_code code, tree type, tree arg0)
1744 /* Don't perform the operation, other than NEGATE and ABS, if
1745 flag_signaling_nans is on and the operand is a signaling NaN. */
1746 if (TREE_CODE (arg0) == REAL_CST
1747 && HONOR_SNANS (arg0)
1748 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1749 && code != NEGATE_EXPR
1750 && code != ABS_EXPR
1751 && code != ABSU_EXPR)
1752 return NULL_TREE;
1754 switch (code)
1756 CASE_CONVERT:
1757 case FLOAT_EXPR:
1758 case FIX_TRUNC_EXPR:
1759 case FIXED_CONVERT_EXPR:
1760 return fold_convert_const (code, type, arg0);
1762 case ADDR_SPACE_CONVERT_EXPR:
1763 /* If the source address is 0, and the source address space
1764 cannot have a valid object at 0, fold to dest type null. */
1765 if (integer_zerop (arg0)
1766 && !(targetm.addr_space.zero_address_valid
1767 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1768 return fold_convert_const (code, type, arg0);
1769 break;
1771 case VIEW_CONVERT_EXPR:
1772 return fold_view_convert_expr (type, arg0);
1774 case NEGATE_EXPR:
1776 /* Can't call fold_negate_const directly here as that doesn't
1777 handle all cases and we might not be able to negate some
1778 constants. */
1779 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1780 if (tem && CONSTANT_CLASS_P (tem))
1781 return tem;
1782 break;
1785 case ABS_EXPR:
1786 case ABSU_EXPR:
1787 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1788 return fold_abs_const (arg0, type);
1789 break;
1791 case CONJ_EXPR:
1792 if (TREE_CODE (arg0) == COMPLEX_CST)
1794 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1795 TREE_TYPE (type));
1796 return build_complex (type, TREE_REALPART (arg0), ipart);
1798 break;
1800 case BIT_NOT_EXPR:
1801 if (TREE_CODE (arg0) == INTEGER_CST)
1802 return fold_not_const (arg0, type);
1803 else if (POLY_INT_CST_P (arg0))
1804 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1805 /* Perform BIT_NOT_EXPR on each element individually. */
1806 else if (TREE_CODE (arg0) == VECTOR_CST)
1808 tree elem;
1810 /* This can cope with stepped encodings because ~x == -1 - x. */
1811 tree_vector_builder elements;
1812 elements.new_unary_operation (type, arg0, true);
1813 unsigned int i, count = elements.encoded_nelts ();
1814 for (i = 0; i < count; ++i)
1816 elem = VECTOR_CST_ELT (arg0, i);
1817 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1818 if (elem == NULL_TREE)
1819 break;
1820 elements.quick_push (elem);
1822 if (i == count)
1823 return elements.build ();
1825 break;
1827 case TRUTH_NOT_EXPR:
1828 if (TREE_CODE (arg0) == INTEGER_CST)
1829 return constant_boolean_node (integer_zerop (arg0), type);
1830 break;
1832 case REALPART_EXPR:
1833 if (TREE_CODE (arg0) == COMPLEX_CST)
1834 return fold_convert (type, TREE_REALPART (arg0));
1835 break;
1837 case IMAGPART_EXPR:
1838 if (TREE_CODE (arg0) == COMPLEX_CST)
1839 return fold_convert (type, TREE_IMAGPART (arg0));
1840 break;
1842 case VEC_UNPACK_LO_EXPR:
1843 case VEC_UNPACK_HI_EXPR:
1844 case VEC_UNPACK_FLOAT_LO_EXPR:
1845 case VEC_UNPACK_FLOAT_HI_EXPR:
1846 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1847 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1849 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1850 enum tree_code subcode;
1852 if (TREE_CODE (arg0) != VECTOR_CST)
1853 return NULL_TREE;
1855 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1856 return NULL_TREE;
1857 out_nelts = in_nelts / 2;
1858 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1860 unsigned int offset = 0;
1861 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1862 || code == VEC_UNPACK_FLOAT_LO_EXPR
1863 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1864 offset = out_nelts;
1866 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1867 subcode = NOP_EXPR;
1868 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1869 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1870 subcode = FLOAT_EXPR;
1871 else
1872 subcode = FIX_TRUNC_EXPR;
1874 tree_vector_builder elts (type, out_nelts, 1);
1875 for (i = 0; i < out_nelts; i++)
1877 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1878 VECTOR_CST_ELT (arg0, i + offset));
1879 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1880 return NULL_TREE;
1881 elts.quick_push (elt);
1884 return elts.build ();
1887 case VEC_DUPLICATE_EXPR:
1888 if (CONSTANT_CLASS_P (arg0))
1889 return build_vector_from_val (type, arg0);
1890 return NULL_TREE;
1892 default:
1893 break;
1896 return NULL_TREE;
1899 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1900 indicates which particular sizetype to create. */
1902 tree
1903 size_int_kind (poly_int64 number, enum size_type_kind kind)
1905 return build_int_cst (sizetype_tab[(int) kind], number);
1908 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1909 is a tree code. The type of the result is taken from the operands.
1910 Both must be equivalent integer types, ala int_binop_types_match_p.
1911 If the operands are constant, so is the result. */
1913 tree
1914 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1916 tree type = TREE_TYPE (arg0);
1918 if (arg0 == error_mark_node || arg1 == error_mark_node)
1919 return error_mark_node;
1921 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1922 TREE_TYPE (arg1)));
1924 /* Handle the special case of two poly_int constants faster. */
1925 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1927 /* And some specific cases even faster than that. */
1928 if (code == PLUS_EXPR)
1930 if (integer_zerop (arg0)
1931 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1932 return arg1;
1933 if (integer_zerop (arg1)
1934 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1935 return arg0;
1937 else if (code == MINUS_EXPR)
1939 if (integer_zerop (arg1)
1940 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1941 return arg0;
1943 else if (code == MULT_EXPR)
1945 if (integer_onep (arg0)
1946 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1947 return arg1;
1950 /* Handle general case of two integer constants. For sizetype
1951 constant calculations we always want to know about overflow,
1952 even in the unsigned case. */
1953 tree res = int_const_binop (code, arg0, arg1, -1);
1954 if (res != NULL_TREE)
1955 return res;
1958 return fold_build2_loc (loc, code, type, arg0, arg1);
1961 /* Given two values, either both of sizetype or both of bitsizetype,
1962 compute the difference between the two values. Return the value
1963 in signed type corresponding to the type of the operands. */
1965 tree
1966 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1968 tree type = TREE_TYPE (arg0);
1969 tree ctype;
1971 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1972 TREE_TYPE (arg1)));
1974 /* If the type is already signed, just do the simple thing. */
1975 if (!TYPE_UNSIGNED (type))
1976 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1978 if (type == sizetype)
1979 ctype = ssizetype;
1980 else if (type == bitsizetype)
1981 ctype = sbitsizetype;
1982 else
1983 ctype = signed_type_for (type);
1985 /* If either operand is not a constant, do the conversions to the signed
1986 type and subtract. The hardware will do the right thing with any
1987 overflow in the subtraction. */
1988 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1989 return size_binop_loc (loc, MINUS_EXPR,
1990 fold_convert_loc (loc, ctype, arg0),
1991 fold_convert_loc (loc, ctype, arg1));
1993 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1994 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1995 overflow) and negate (which can't either). Special-case a result
1996 of zero while we're here. */
1997 if (tree_int_cst_equal (arg0, arg1))
1998 return build_int_cst (ctype, 0);
1999 else if (tree_int_cst_lt (arg1, arg0))
2000 return fold_convert_loc (loc, ctype,
2001 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2002 else
2003 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2004 fold_convert_loc (loc, ctype,
2005 size_binop_loc (loc,
2006 MINUS_EXPR,
2007 arg1, arg0)));
2010 /* A subroutine of fold_convert_const handling conversions of an
2011 INTEGER_CST to another integer type. */
2013 static tree
2014 fold_convert_const_int_from_int (tree type, const_tree arg1)
2016 /* Given an integer constant, make new constant with new type,
2017 appropriately sign-extended or truncated. Use widest_int
2018 so that any extension is done according ARG1's type. */
2019 return force_fit_type (type, wi::to_widest (arg1),
2020 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2021 TREE_OVERFLOW (arg1));
2024 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2025 to an integer type. */
2027 static tree
2028 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2030 bool overflow = false;
2031 tree t;
2033 /* The following code implements the floating point to integer
2034 conversion rules required by the Java Language Specification,
2035 that IEEE NaNs are mapped to zero and values that overflow
2036 the target precision saturate, i.e. values greater than
2037 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2038 are mapped to INT_MIN. These semantics are allowed by the
2039 C and C++ standards that simply state that the behavior of
2040 FP-to-integer conversion is unspecified upon overflow. */
2042 wide_int val;
2043 REAL_VALUE_TYPE r;
2044 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2046 switch (code)
2048 case FIX_TRUNC_EXPR:
2049 real_trunc (&r, VOIDmode, &x);
2050 break;
2052 default:
2053 gcc_unreachable ();
2056 /* If R is NaN, return zero and show we have an overflow. */
2057 if (REAL_VALUE_ISNAN (r))
2059 overflow = true;
2060 val = wi::zero (TYPE_PRECISION (type));
2063 /* See if R is less than the lower bound or greater than the
2064 upper bound. */
2066 if (! overflow)
2068 tree lt = TYPE_MIN_VALUE (type);
2069 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2070 if (real_less (&r, &l))
2072 overflow = true;
2073 val = wi::to_wide (lt);
2077 if (! overflow)
2079 tree ut = TYPE_MAX_VALUE (type);
2080 if (ut)
2082 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2083 if (real_less (&u, &r))
2085 overflow = true;
2086 val = wi::to_wide (ut);
2091 if (! overflow)
2092 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2094 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2095 return t;
2098 /* A subroutine of fold_convert_const handling conversions of a
2099 FIXED_CST to an integer type. */
2101 static tree
2102 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2104 tree t;
2105 double_int temp, temp_trunc;
2106 scalar_mode mode;
2108 /* Right shift FIXED_CST to temp by fbit. */
2109 temp = TREE_FIXED_CST (arg1).data;
2110 mode = TREE_FIXED_CST (arg1).mode;
2111 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2113 temp = temp.rshift (GET_MODE_FBIT (mode),
2114 HOST_BITS_PER_DOUBLE_INT,
2115 SIGNED_FIXED_POINT_MODE_P (mode));
2117 /* Left shift temp to temp_trunc by fbit. */
2118 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2119 HOST_BITS_PER_DOUBLE_INT,
2120 SIGNED_FIXED_POINT_MODE_P (mode));
2122 else
2124 temp = double_int_zero;
2125 temp_trunc = double_int_zero;
2128 /* If FIXED_CST is negative, we need to round the value toward 0.
2129 By checking if the fractional bits are not zero to add 1 to temp. */
2130 if (SIGNED_FIXED_POINT_MODE_P (mode)
2131 && temp_trunc.is_negative ()
2132 && TREE_FIXED_CST (arg1).data != temp_trunc)
2133 temp += double_int_one;
2135 /* Given a fixed-point constant, make new constant with new type,
2136 appropriately sign-extended or truncated. */
2137 t = force_fit_type (type, temp, -1,
2138 (temp.is_negative ()
2139 && (TYPE_UNSIGNED (type)
2140 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2141 | TREE_OVERFLOW (arg1));
2143 return t;
2146 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2147 to another floating point type. */
2149 static tree
2150 fold_convert_const_real_from_real (tree type, const_tree arg1)
2152 REAL_VALUE_TYPE value;
2153 tree t;
2155 /* Don't perform the operation if flag_signaling_nans is on
2156 and the operand is a signaling NaN. */
2157 if (HONOR_SNANS (arg1)
2158 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2159 return NULL_TREE;
2161 /* With flag_rounding_math we should respect the current rounding mode
2162 unless the conversion is exact. */
2163 if (HONOR_SIGN_DEPENDENT_ROUNDING (arg1)
2164 && !exact_real_truncate (TYPE_MODE (type), &TREE_REAL_CST (arg1)))
2165 return NULL_TREE;
2167 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2168 t = build_real (type, value);
2170 /* If converting an infinity or NAN to a representation that doesn't
2171 have one, set the overflow bit so that we can produce some kind of
2172 error message at the appropriate point if necessary. It's not the
2173 most user-friendly message, but it's better than nothing. */
2174 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2175 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2176 TREE_OVERFLOW (t) = 1;
2177 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2178 && !MODE_HAS_NANS (TYPE_MODE (type)))
2179 TREE_OVERFLOW (t) = 1;
2180 /* Regular overflow, conversion produced an infinity in a mode that
2181 can't represent them. */
2182 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2183 && REAL_VALUE_ISINF (value)
2184 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2185 TREE_OVERFLOW (t) = 1;
2186 else
2187 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2188 return t;
2191 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2192 to a floating point type. */
2194 static tree
2195 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2197 REAL_VALUE_TYPE value;
2198 tree t;
2200 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2201 &TREE_FIXED_CST (arg1));
2202 t = build_real (type, value);
2204 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2205 return t;
2208 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2209 to another fixed-point type. */
2211 static tree
2212 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2214 FIXED_VALUE_TYPE value;
2215 tree t;
2216 bool overflow_p;
2218 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2219 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2220 t = build_fixed (type, value);
2222 /* Propagate overflow flags. */
2223 if (overflow_p | TREE_OVERFLOW (arg1))
2224 TREE_OVERFLOW (t) = 1;
2225 return t;
2228 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2229 to a fixed-point type. */
2231 static tree
2232 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2234 FIXED_VALUE_TYPE value;
2235 tree t;
2236 bool overflow_p;
2237 double_int di;
2239 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2241 di.low = TREE_INT_CST_ELT (arg1, 0);
2242 if (TREE_INT_CST_NUNITS (arg1) == 1)
2243 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2244 else
2245 di.high = TREE_INT_CST_ELT (arg1, 1);
2247 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2248 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2249 TYPE_SATURATING (type));
2250 t = build_fixed (type, value);
2252 /* Propagate overflow flags. */
2253 if (overflow_p | TREE_OVERFLOW (arg1))
2254 TREE_OVERFLOW (t) = 1;
2255 return t;
2258 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2259 to a fixed-point type. */
2261 static tree
2262 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2264 FIXED_VALUE_TYPE value;
2265 tree t;
2266 bool overflow_p;
2268 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2269 &TREE_REAL_CST (arg1),
2270 TYPE_SATURATING (type));
2271 t = build_fixed (type, value);
2273 /* Propagate overflow flags. */
2274 if (overflow_p | TREE_OVERFLOW (arg1))
2275 TREE_OVERFLOW (t) = 1;
2276 return t;
2279 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2280 type TYPE. If no simplification can be done return NULL_TREE. */
2282 static tree
2283 fold_convert_const (enum tree_code code, tree type, tree arg1)
2285 tree arg_type = TREE_TYPE (arg1);
2286 if (arg_type == type)
2287 return arg1;
2289 /* We can't widen types, since the runtime value could overflow the
2290 original type before being extended to the new type. */
2291 if (POLY_INT_CST_P (arg1)
2292 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2293 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2294 return build_poly_int_cst (type,
2295 poly_wide_int::from (poly_int_cst_value (arg1),
2296 TYPE_PRECISION (type),
2297 TYPE_SIGN (arg_type)));
2299 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2300 || TREE_CODE (type) == OFFSET_TYPE)
2302 if (TREE_CODE (arg1) == INTEGER_CST)
2303 return fold_convert_const_int_from_int (type, arg1);
2304 else if (TREE_CODE (arg1) == REAL_CST)
2305 return fold_convert_const_int_from_real (code, type, arg1);
2306 else if (TREE_CODE (arg1) == FIXED_CST)
2307 return fold_convert_const_int_from_fixed (type, arg1);
2309 else if (TREE_CODE (type) == REAL_TYPE)
2311 if (TREE_CODE (arg1) == INTEGER_CST)
2313 tree res = build_real_from_int_cst (type, arg1);
2314 /* Avoid the folding if flag_rounding_math is on and the
2315 conversion is not exact. */
2316 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
2318 bool fail = false;
2319 wide_int w = real_to_integer (&TREE_REAL_CST (res), &fail,
2320 TYPE_PRECISION (TREE_TYPE (arg1)));
2321 if (fail || wi::ne_p (w, wi::to_wide (arg1)))
2322 return NULL_TREE;
2324 return res;
2326 else if (TREE_CODE (arg1) == REAL_CST)
2327 return fold_convert_const_real_from_real (type, arg1);
2328 else if (TREE_CODE (arg1) == FIXED_CST)
2329 return fold_convert_const_real_from_fixed (type, arg1);
2331 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2333 if (TREE_CODE (arg1) == FIXED_CST)
2334 return fold_convert_const_fixed_from_fixed (type, arg1);
2335 else if (TREE_CODE (arg1) == INTEGER_CST)
2336 return fold_convert_const_fixed_from_int (type, arg1);
2337 else if (TREE_CODE (arg1) == REAL_CST)
2338 return fold_convert_const_fixed_from_real (type, arg1);
2340 else if (TREE_CODE (type) == VECTOR_TYPE)
2342 if (TREE_CODE (arg1) == VECTOR_CST
2343 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2345 tree elttype = TREE_TYPE (type);
2346 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2347 /* We can't handle steps directly when extending, since the
2348 values need to wrap at the original precision first. */
2349 bool step_ok_p
2350 = (INTEGRAL_TYPE_P (elttype)
2351 && INTEGRAL_TYPE_P (arg1_elttype)
2352 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2353 tree_vector_builder v;
2354 if (!v.new_unary_operation (type, arg1, step_ok_p))
2355 return NULL_TREE;
2356 unsigned int len = v.encoded_nelts ();
2357 for (unsigned int i = 0; i < len; ++i)
2359 tree elt = VECTOR_CST_ELT (arg1, i);
2360 tree cvt = fold_convert_const (code, elttype, elt);
2361 if (cvt == NULL_TREE)
2362 return NULL_TREE;
2363 v.quick_push (cvt);
2365 return v.build ();
2368 return NULL_TREE;
2371 /* Construct a vector of zero elements of vector type TYPE. */
2373 static tree
2374 build_zero_vector (tree type)
2376 tree t;
2378 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2379 return build_vector_from_val (type, t);
2382 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2384 bool
2385 fold_convertible_p (const_tree type, const_tree arg)
2387 const_tree orig = TREE_TYPE (arg);
2389 if (type == orig)
2390 return true;
2392 if (TREE_CODE (arg) == ERROR_MARK
2393 || TREE_CODE (type) == ERROR_MARK
2394 || TREE_CODE (orig) == ERROR_MARK)
2395 return false;
2397 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2398 return true;
2400 switch (TREE_CODE (type))
2402 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2403 case POINTER_TYPE: case REFERENCE_TYPE:
2404 case OFFSET_TYPE:
2405 return (INTEGRAL_TYPE_P (orig)
2406 || (POINTER_TYPE_P (orig)
2407 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2408 || TREE_CODE (orig) == OFFSET_TYPE);
2410 case REAL_TYPE:
2411 case FIXED_POINT_TYPE:
2412 case VOID_TYPE:
2413 return TREE_CODE (type) == TREE_CODE (orig);
2415 case VECTOR_TYPE:
2416 return (VECTOR_TYPE_P (orig)
2417 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2418 TYPE_VECTOR_SUBPARTS (orig))
2419 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2421 default:
2422 return false;
2426 /* Convert expression ARG to type TYPE. Used by the middle-end for
2427 simple conversions in preference to calling the front-end's convert. */
2429 tree
2430 fold_convert_loc (location_t loc, tree type, tree arg)
2432 tree orig = TREE_TYPE (arg);
2433 tree tem;
2435 if (type == orig)
2436 return arg;
2438 if (TREE_CODE (arg) == ERROR_MARK
2439 || TREE_CODE (type) == ERROR_MARK
2440 || TREE_CODE (orig) == ERROR_MARK)
2441 return error_mark_node;
2443 switch (TREE_CODE (type))
2445 case POINTER_TYPE:
2446 case REFERENCE_TYPE:
2447 /* Handle conversions between pointers to different address spaces. */
2448 if (POINTER_TYPE_P (orig)
2449 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2450 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2451 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2452 /* fall through */
2454 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2455 case OFFSET_TYPE:
2456 if (TREE_CODE (arg) == INTEGER_CST)
2458 tem = fold_convert_const (NOP_EXPR, type, arg);
2459 if (tem != NULL_TREE)
2460 return tem;
2462 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2463 || TREE_CODE (orig) == OFFSET_TYPE)
2464 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2465 if (TREE_CODE (orig) == COMPLEX_TYPE)
2466 return fold_convert_loc (loc, type,
2467 fold_build1_loc (loc, REALPART_EXPR,
2468 TREE_TYPE (orig), arg));
2469 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2470 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2471 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2473 case REAL_TYPE:
2474 if (TREE_CODE (arg) == INTEGER_CST)
2476 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2477 if (tem != NULL_TREE)
2478 return tem;
2480 else if (TREE_CODE (arg) == REAL_CST)
2482 tem = fold_convert_const (NOP_EXPR, type, arg);
2483 if (tem != NULL_TREE)
2484 return tem;
2486 else if (TREE_CODE (arg) == FIXED_CST)
2488 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2489 if (tem != NULL_TREE)
2490 return tem;
2493 switch (TREE_CODE (orig))
2495 case INTEGER_TYPE:
2496 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2497 case POINTER_TYPE: case REFERENCE_TYPE:
2498 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2500 case REAL_TYPE:
2501 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2503 case FIXED_POINT_TYPE:
2504 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2506 case COMPLEX_TYPE:
2507 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2508 return fold_convert_loc (loc, type, tem);
2510 default:
2511 gcc_unreachable ();
2514 case FIXED_POINT_TYPE:
2515 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2516 || TREE_CODE (arg) == REAL_CST)
2518 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2519 if (tem != NULL_TREE)
2520 goto fold_convert_exit;
2523 switch (TREE_CODE (orig))
2525 case FIXED_POINT_TYPE:
2526 case INTEGER_TYPE:
2527 case ENUMERAL_TYPE:
2528 case BOOLEAN_TYPE:
2529 case REAL_TYPE:
2530 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2532 case COMPLEX_TYPE:
2533 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2534 return fold_convert_loc (loc, type, tem);
2536 default:
2537 gcc_unreachable ();
2540 case COMPLEX_TYPE:
2541 switch (TREE_CODE (orig))
2543 case INTEGER_TYPE:
2544 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2545 case POINTER_TYPE: case REFERENCE_TYPE:
2546 case REAL_TYPE:
2547 case FIXED_POINT_TYPE:
2548 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2549 fold_convert_loc (loc, TREE_TYPE (type), arg),
2550 fold_convert_loc (loc, TREE_TYPE (type),
2551 integer_zero_node));
2552 case COMPLEX_TYPE:
2554 tree rpart, ipart;
2556 if (TREE_CODE (arg) == COMPLEX_EXPR)
2558 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2559 TREE_OPERAND (arg, 0));
2560 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2561 TREE_OPERAND (arg, 1));
2562 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2565 arg = save_expr (arg);
2566 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2567 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2568 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2569 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2570 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2573 default:
2574 gcc_unreachable ();
2577 case VECTOR_TYPE:
2578 if (integer_zerop (arg))
2579 return build_zero_vector (type);
2580 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2581 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2582 || TREE_CODE (orig) == VECTOR_TYPE);
2583 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2585 case VOID_TYPE:
2586 tem = fold_ignored_result (arg);
2587 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2589 default:
2590 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2591 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2592 gcc_unreachable ();
2594 fold_convert_exit:
2595 protected_set_expr_location_unshare (tem, loc);
2596 return tem;
2599 /* Return false if expr can be assumed not to be an lvalue, true
2600 otherwise. */
2602 static bool
2603 maybe_lvalue_p (const_tree x)
2605 /* We only need to wrap lvalue tree codes. */
2606 switch (TREE_CODE (x))
2608 case VAR_DECL:
2609 case PARM_DECL:
2610 case RESULT_DECL:
2611 case LABEL_DECL:
2612 case FUNCTION_DECL:
2613 case SSA_NAME:
2615 case COMPONENT_REF:
2616 case MEM_REF:
2617 case INDIRECT_REF:
2618 case ARRAY_REF:
2619 case ARRAY_RANGE_REF:
2620 case BIT_FIELD_REF:
2621 case OBJ_TYPE_REF:
2623 case REALPART_EXPR:
2624 case IMAGPART_EXPR:
2625 case PREINCREMENT_EXPR:
2626 case PREDECREMENT_EXPR:
2627 case SAVE_EXPR:
2628 case TRY_CATCH_EXPR:
2629 case WITH_CLEANUP_EXPR:
2630 case COMPOUND_EXPR:
2631 case MODIFY_EXPR:
2632 case TARGET_EXPR:
2633 case COND_EXPR:
2634 case BIND_EXPR:
2635 case VIEW_CONVERT_EXPR:
2636 break;
2638 default:
2639 /* Assume the worst for front-end tree codes. */
2640 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2641 break;
2642 return false;
2645 return true;
2648 /* Return an expr equal to X but certainly not valid as an lvalue. */
2650 tree
2651 non_lvalue_loc (location_t loc, tree x)
2653 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2654 us. */
2655 if (in_gimple_form)
2656 return x;
2658 if (! maybe_lvalue_p (x))
2659 return x;
2660 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2663 /* Given a tree comparison code, return the code that is the logical inverse.
2664 It is generally not safe to do this for floating-point comparisons, except
2665 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2666 ERROR_MARK in this case. */
2668 enum tree_code
2669 invert_tree_comparison (enum tree_code code, bool honor_nans)
2671 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2672 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2673 return ERROR_MARK;
2675 switch (code)
2677 case EQ_EXPR:
2678 return NE_EXPR;
2679 case NE_EXPR:
2680 return EQ_EXPR;
2681 case GT_EXPR:
2682 return honor_nans ? UNLE_EXPR : LE_EXPR;
2683 case GE_EXPR:
2684 return honor_nans ? UNLT_EXPR : LT_EXPR;
2685 case LT_EXPR:
2686 return honor_nans ? UNGE_EXPR : GE_EXPR;
2687 case LE_EXPR:
2688 return honor_nans ? UNGT_EXPR : GT_EXPR;
2689 case LTGT_EXPR:
2690 return UNEQ_EXPR;
2691 case UNEQ_EXPR:
2692 return LTGT_EXPR;
2693 case UNGT_EXPR:
2694 return LE_EXPR;
2695 case UNGE_EXPR:
2696 return LT_EXPR;
2697 case UNLT_EXPR:
2698 return GE_EXPR;
2699 case UNLE_EXPR:
2700 return GT_EXPR;
2701 case ORDERED_EXPR:
2702 return UNORDERED_EXPR;
2703 case UNORDERED_EXPR:
2704 return ORDERED_EXPR;
2705 default:
2706 gcc_unreachable ();
2710 /* Similar, but return the comparison that results if the operands are
2711 swapped. This is safe for floating-point. */
2713 enum tree_code
2714 swap_tree_comparison (enum tree_code code)
2716 switch (code)
2718 case EQ_EXPR:
2719 case NE_EXPR:
2720 case ORDERED_EXPR:
2721 case UNORDERED_EXPR:
2722 case LTGT_EXPR:
2723 case UNEQ_EXPR:
2724 return code;
2725 case GT_EXPR:
2726 return LT_EXPR;
2727 case GE_EXPR:
2728 return LE_EXPR;
2729 case LT_EXPR:
2730 return GT_EXPR;
2731 case LE_EXPR:
2732 return GE_EXPR;
2733 case UNGT_EXPR:
2734 return UNLT_EXPR;
2735 case UNGE_EXPR:
2736 return UNLE_EXPR;
2737 case UNLT_EXPR:
2738 return UNGT_EXPR;
2739 case UNLE_EXPR:
2740 return UNGE_EXPR;
2741 default:
2742 gcc_unreachable ();
2747 /* Convert a comparison tree code from an enum tree_code representation
2748 into a compcode bit-based encoding. This function is the inverse of
2749 compcode_to_comparison. */
2751 static enum comparison_code
2752 comparison_to_compcode (enum tree_code code)
2754 switch (code)
2756 case LT_EXPR:
2757 return COMPCODE_LT;
2758 case EQ_EXPR:
2759 return COMPCODE_EQ;
2760 case LE_EXPR:
2761 return COMPCODE_LE;
2762 case GT_EXPR:
2763 return COMPCODE_GT;
2764 case NE_EXPR:
2765 return COMPCODE_NE;
2766 case GE_EXPR:
2767 return COMPCODE_GE;
2768 case ORDERED_EXPR:
2769 return COMPCODE_ORD;
2770 case UNORDERED_EXPR:
2771 return COMPCODE_UNORD;
2772 case UNLT_EXPR:
2773 return COMPCODE_UNLT;
2774 case UNEQ_EXPR:
2775 return COMPCODE_UNEQ;
2776 case UNLE_EXPR:
2777 return COMPCODE_UNLE;
2778 case UNGT_EXPR:
2779 return COMPCODE_UNGT;
2780 case LTGT_EXPR:
2781 return COMPCODE_LTGT;
2782 case UNGE_EXPR:
2783 return COMPCODE_UNGE;
2784 default:
2785 gcc_unreachable ();
2789 /* Convert a compcode bit-based encoding of a comparison operator back
2790 to GCC's enum tree_code representation. This function is the
2791 inverse of comparison_to_compcode. */
2793 static enum tree_code
2794 compcode_to_comparison (enum comparison_code code)
2796 switch (code)
2798 case COMPCODE_LT:
2799 return LT_EXPR;
2800 case COMPCODE_EQ:
2801 return EQ_EXPR;
2802 case COMPCODE_LE:
2803 return LE_EXPR;
2804 case COMPCODE_GT:
2805 return GT_EXPR;
2806 case COMPCODE_NE:
2807 return NE_EXPR;
2808 case COMPCODE_GE:
2809 return GE_EXPR;
2810 case COMPCODE_ORD:
2811 return ORDERED_EXPR;
2812 case COMPCODE_UNORD:
2813 return UNORDERED_EXPR;
2814 case COMPCODE_UNLT:
2815 return UNLT_EXPR;
2816 case COMPCODE_UNEQ:
2817 return UNEQ_EXPR;
2818 case COMPCODE_UNLE:
2819 return UNLE_EXPR;
2820 case COMPCODE_UNGT:
2821 return UNGT_EXPR;
2822 case COMPCODE_LTGT:
2823 return LTGT_EXPR;
2824 case COMPCODE_UNGE:
2825 return UNGE_EXPR;
2826 default:
2827 gcc_unreachable ();
2831 /* Return true if COND1 tests the opposite condition of COND2. */
2833 bool
2834 inverse_conditions_p (const_tree cond1, const_tree cond2)
2836 return (COMPARISON_CLASS_P (cond1)
2837 && COMPARISON_CLASS_P (cond2)
2838 && (invert_tree_comparison
2839 (TREE_CODE (cond1),
2840 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2841 && operand_equal_p (TREE_OPERAND (cond1, 0),
2842 TREE_OPERAND (cond2, 0), 0)
2843 && operand_equal_p (TREE_OPERAND (cond1, 1),
2844 TREE_OPERAND (cond2, 1), 0));
2847 /* Return a tree for the comparison which is the combination of
2848 doing the AND or OR (depending on CODE) of the two operations LCODE
2849 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2850 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2851 if this makes the transformation invalid. */
2853 tree
2854 combine_comparisons (location_t loc,
2855 enum tree_code code, enum tree_code lcode,
2856 enum tree_code rcode, tree truth_type,
2857 tree ll_arg, tree lr_arg)
2859 bool honor_nans = HONOR_NANS (ll_arg);
2860 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2861 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2862 int compcode;
2864 switch (code)
2866 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2867 compcode = lcompcode & rcompcode;
2868 break;
2870 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2871 compcode = lcompcode | rcompcode;
2872 break;
2874 default:
2875 return NULL_TREE;
2878 if (!honor_nans)
2880 /* Eliminate unordered comparisons, as well as LTGT and ORD
2881 which are not used unless the mode has NaNs. */
2882 compcode &= ~COMPCODE_UNORD;
2883 if (compcode == COMPCODE_LTGT)
2884 compcode = COMPCODE_NE;
2885 else if (compcode == COMPCODE_ORD)
2886 compcode = COMPCODE_TRUE;
2888 else if (flag_trapping_math)
2890 /* Check that the original operation and the optimized ones will trap
2891 under the same condition. */
2892 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2893 && (lcompcode != COMPCODE_EQ)
2894 && (lcompcode != COMPCODE_ORD);
2895 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2896 && (rcompcode != COMPCODE_EQ)
2897 && (rcompcode != COMPCODE_ORD);
2898 bool trap = (compcode & COMPCODE_UNORD) == 0
2899 && (compcode != COMPCODE_EQ)
2900 && (compcode != COMPCODE_ORD);
2902 /* In a short-circuited boolean expression the LHS might be
2903 such that the RHS, if evaluated, will never trap. For
2904 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2905 if neither x nor y is NaN. (This is a mixed blessing: for
2906 example, the expression above will never trap, hence
2907 optimizing it to x < y would be invalid). */
2908 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2909 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2910 rtrap = false;
2912 /* If the comparison was short-circuited, and only the RHS
2913 trapped, we may now generate a spurious trap. */
2914 if (rtrap && !ltrap
2915 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2916 return NULL_TREE;
2918 /* If we changed the conditions that cause a trap, we lose. */
2919 if ((ltrap || rtrap) != trap)
2920 return NULL_TREE;
2923 if (compcode == COMPCODE_TRUE)
2924 return constant_boolean_node (true, truth_type);
2925 else if (compcode == COMPCODE_FALSE)
2926 return constant_boolean_node (false, truth_type);
2927 else
2929 enum tree_code tcode;
2931 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2932 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2936 /* Return nonzero if two operands (typically of the same tree node)
2937 are necessarily equal. FLAGS modifies behavior as follows:
2939 If OEP_ONLY_CONST is set, only return nonzero for constants.
2940 This function tests whether the operands are indistinguishable;
2941 it does not test whether they are equal using C's == operation.
2942 The distinction is important for IEEE floating point, because
2943 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2944 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2946 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2947 even though it may hold multiple values during a function.
2948 This is because a GCC tree node guarantees that nothing else is
2949 executed between the evaluation of its "operands" (which may often
2950 be evaluated in arbitrary order). Hence if the operands themselves
2951 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2952 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2953 unset means assuming isochronic (or instantaneous) tree equivalence.
2954 Unless comparing arbitrary expression trees, such as from different
2955 statements, this flag can usually be left unset.
2957 If OEP_PURE_SAME is set, then pure functions with identical arguments
2958 are considered the same. It is used when the caller has other ways
2959 to ensure that global memory is unchanged in between.
2961 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2962 not values of expressions.
2964 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2965 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2967 If OEP_BITWISE is set, then require the values to be bitwise identical
2968 rather than simply numerically equal. Do not take advantage of things
2969 like math-related flags or undefined behavior; only return true for
2970 values that are provably bitwise identical in all circumstances.
2972 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2973 any operand with side effect. This is unnecesarily conservative in the
2974 case we know that arg0 and arg1 are in disjoint code paths (such as in
2975 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2976 addresses with TREE_CONSTANT flag set so we know that &var == &var
2977 even if var is volatile. */
2979 bool
2980 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
2981 unsigned int flags)
2983 bool r;
2984 if (verify_hash_value (arg0, arg1, flags, &r))
2985 return r;
2987 STRIP_ANY_LOCATION_WRAPPER (arg0);
2988 STRIP_ANY_LOCATION_WRAPPER (arg1);
2990 /* If either is ERROR_MARK, they aren't equal. */
2991 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2992 || TREE_TYPE (arg0) == error_mark_node
2993 || TREE_TYPE (arg1) == error_mark_node)
2994 return false;
2996 /* Similar, if either does not have a type (like a template id),
2997 they aren't equal. */
2998 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2999 return false;
3001 /* Bitwise identity makes no sense if the values have different layouts. */
3002 if ((flags & OEP_BITWISE)
3003 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3004 return false;
3006 /* We cannot consider pointers to different address space equal. */
3007 if (POINTER_TYPE_P (TREE_TYPE (arg0))
3008 && POINTER_TYPE_P (TREE_TYPE (arg1))
3009 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
3010 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
3011 return false;
3013 /* Check equality of integer constants before bailing out due to
3014 precision differences. */
3015 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3017 /* Address of INTEGER_CST is not defined; check that we did not forget
3018 to drop the OEP_ADDRESS_OF flags. */
3019 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3020 return tree_int_cst_equal (arg0, arg1);
3023 if (!(flags & OEP_ADDRESS_OF))
3025 /* If both types don't have the same signedness, then we can't consider
3026 them equal. We must check this before the STRIP_NOPS calls
3027 because they may change the signedness of the arguments. As pointers
3028 strictly don't have a signedness, require either two pointers or
3029 two non-pointers as well. */
3030 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3031 || POINTER_TYPE_P (TREE_TYPE (arg0))
3032 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3033 return false;
3035 /* If both types don't have the same precision, then it is not safe
3036 to strip NOPs. */
3037 if (element_precision (TREE_TYPE (arg0))
3038 != element_precision (TREE_TYPE (arg1)))
3039 return false;
3041 STRIP_NOPS (arg0);
3042 STRIP_NOPS (arg1);
3044 #if 0
3045 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3046 sanity check once the issue is solved. */
3047 else
3048 /* Addresses of conversions and SSA_NAMEs (and many other things)
3049 are not defined. Check that we did not forget to drop the
3050 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3051 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3052 && TREE_CODE (arg0) != SSA_NAME);
3053 #endif
3055 /* In case both args are comparisons but with different comparison
3056 code, try to swap the comparison operands of one arg to produce
3057 a match and compare that variant. */
3058 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3059 && COMPARISON_CLASS_P (arg0)
3060 && COMPARISON_CLASS_P (arg1))
3062 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3064 if (TREE_CODE (arg0) == swap_code)
3065 return operand_equal_p (TREE_OPERAND (arg0, 0),
3066 TREE_OPERAND (arg1, 1), flags)
3067 && operand_equal_p (TREE_OPERAND (arg0, 1),
3068 TREE_OPERAND (arg1, 0), flags);
3071 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3073 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3074 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3076 else if (flags & OEP_ADDRESS_OF)
3078 /* If we are interested in comparing addresses ignore
3079 MEM_REF wrappings of the base that can appear just for
3080 TBAA reasons. */
3081 if (TREE_CODE (arg0) == MEM_REF
3082 && DECL_P (arg1)
3083 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3084 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3085 && integer_zerop (TREE_OPERAND (arg0, 1)))
3086 return true;
3087 else if (TREE_CODE (arg1) == MEM_REF
3088 && DECL_P (arg0)
3089 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3090 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3091 && integer_zerop (TREE_OPERAND (arg1, 1)))
3092 return true;
3093 return false;
3095 else
3096 return false;
3099 /* When not checking adddresses, this is needed for conversions and for
3100 COMPONENT_REF. Might as well play it safe and always test this. */
3101 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3102 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3103 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3104 && !(flags & OEP_ADDRESS_OF)))
3105 return false;
3107 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3108 We don't care about side effects in that case because the SAVE_EXPR
3109 takes care of that for us. In all other cases, two expressions are
3110 equal if they have no side effects. If we have two identical
3111 expressions with side effects that should be treated the same due
3112 to the only side effects being identical SAVE_EXPR's, that will
3113 be detected in the recursive calls below.
3114 If we are taking an invariant address of two identical objects
3115 they are necessarily equal as well. */
3116 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3117 && (TREE_CODE (arg0) == SAVE_EXPR
3118 || (flags & OEP_MATCH_SIDE_EFFECTS)
3119 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3120 return true;
3122 /* Next handle constant cases, those for which we can return 1 even
3123 if ONLY_CONST is set. */
3124 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3125 switch (TREE_CODE (arg0))
3127 case INTEGER_CST:
3128 return tree_int_cst_equal (arg0, arg1);
3130 case FIXED_CST:
3131 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3132 TREE_FIXED_CST (arg1));
3134 case REAL_CST:
3135 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3136 return true;
3138 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3140 /* If we do not distinguish between signed and unsigned zero,
3141 consider them equal. */
3142 if (real_zerop (arg0) && real_zerop (arg1))
3143 return true;
3145 return false;
3147 case VECTOR_CST:
3149 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3150 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3151 return false;
3153 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3154 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3155 return false;
3157 unsigned int count = vector_cst_encoded_nelts (arg0);
3158 for (unsigned int i = 0; i < count; ++i)
3159 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3160 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3161 return false;
3162 return true;
3165 case COMPLEX_CST:
3166 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3167 flags)
3168 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3169 flags));
3171 case STRING_CST:
3172 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3173 && ! memcmp (TREE_STRING_POINTER (arg0),
3174 TREE_STRING_POINTER (arg1),
3175 TREE_STRING_LENGTH (arg0)));
3177 case ADDR_EXPR:
3178 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3179 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3180 flags | OEP_ADDRESS_OF
3181 | OEP_MATCH_SIDE_EFFECTS);
3182 case CONSTRUCTOR:
3183 /* In GIMPLE empty constructors are allowed in initializers of
3184 aggregates. */
3185 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3186 default:
3187 break;
3190 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3191 two instances of undefined behavior will give identical results. */
3192 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3193 return false;
3195 /* Define macros to test an operand from arg0 and arg1 for equality and a
3196 variant that allows null and views null as being different from any
3197 non-null value. In the latter case, if either is null, the both
3198 must be; otherwise, do the normal comparison. */
3199 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3200 TREE_OPERAND (arg1, N), flags)
3202 #define OP_SAME_WITH_NULL(N) \
3203 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3204 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3206 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3208 case tcc_unary:
3209 /* Two conversions are equal only if signedness and modes match. */
3210 switch (TREE_CODE (arg0))
3212 CASE_CONVERT:
3213 case FIX_TRUNC_EXPR:
3214 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3215 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3216 return false;
3217 break;
3218 default:
3219 break;
3222 return OP_SAME (0);
3225 case tcc_comparison:
3226 case tcc_binary:
3227 if (OP_SAME (0) && OP_SAME (1))
3228 return true;
3230 /* For commutative ops, allow the other order. */
3231 return (commutative_tree_code (TREE_CODE (arg0))
3232 && operand_equal_p (TREE_OPERAND (arg0, 0),
3233 TREE_OPERAND (arg1, 1), flags)
3234 && operand_equal_p (TREE_OPERAND (arg0, 1),
3235 TREE_OPERAND (arg1, 0), flags));
3237 case tcc_reference:
3238 /* If either of the pointer (or reference) expressions we are
3239 dereferencing contain a side effect, these cannot be equal,
3240 but their addresses can be. */
3241 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3242 && (TREE_SIDE_EFFECTS (arg0)
3243 || TREE_SIDE_EFFECTS (arg1)))
3244 return false;
3246 switch (TREE_CODE (arg0))
3248 case INDIRECT_REF:
3249 if (!(flags & OEP_ADDRESS_OF))
3251 if (TYPE_ALIGN (TREE_TYPE (arg0))
3252 != TYPE_ALIGN (TREE_TYPE (arg1)))
3253 return false;
3254 /* Verify that the access types are compatible. */
3255 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3256 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3257 return false;
3259 flags &= ~OEP_ADDRESS_OF;
3260 return OP_SAME (0);
3262 case IMAGPART_EXPR:
3263 /* Require the same offset. */
3264 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3265 TYPE_SIZE (TREE_TYPE (arg1)),
3266 flags & ~OEP_ADDRESS_OF))
3267 return false;
3269 /* Fallthru. */
3270 case REALPART_EXPR:
3271 case VIEW_CONVERT_EXPR:
3272 return OP_SAME (0);
3274 case TARGET_MEM_REF:
3275 case MEM_REF:
3276 if (!(flags & OEP_ADDRESS_OF))
3278 /* Require equal access sizes */
3279 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3280 && (!TYPE_SIZE (TREE_TYPE (arg0))
3281 || !TYPE_SIZE (TREE_TYPE (arg1))
3282 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3283 TYPE_SIZE (TREE_TYPE (arg1)),
3284 flags)))
3285 return false;
3286 /* Verify that access happens in similar types. */
3287 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3288 return false;
3289 /* Verify that accesses are TBAA compatible. */
3290 if (!alias_ptr_types_compatible_p
3291 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3292 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3293 || (MR_DEPENDENCE_CLIQUE (arg0)
3294 != MR_DEPENDENCE_CLIQUE (arg1))
3295 || (MR_DEPENDENCE_BASE (arg0)
3296 != MR_DEPENDENCE_BASE (arg1)))
3297 return false;
3298 /* Verify that alignment is compatible. */
3299 if (TYPE_ALIGN (TREE_TYPE (arg0))
3300 != TYPE_ALIGN (TREE_TYPE (arg1)))
3301 return false;
3303 flags &= ~OEP_ADDRESS_OF;
3304 return (OP_SAME (0) && OP_SAME (1)
3305 /* TARGET_MEM_REF require equal extra operands. */
3306 && (TREE_CODE (arg0) != TARGET_MEM_REF
3307 || (OP_SAME_WITH_NULL (2)
3308 && OP_SAME_WITH_NULL (3)
3309 && OP_SAME_WITH_NULL (4))));
3311 case ARRAY_REF:
3312 case ARRAY_RANGE_REF:
3313 if (!OP_SAME (0))
3314 return false;
3315 flags &= ~OEP_ADDRESS_OF;
3316 /* Compare the array index by value if it is constant first as we
3317 may have different types but same value here. */
3318 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3319 TREE_OPERAND (arg1, 1))
3320 || OP_SAME (1))
3321 && OP_SAME_WITH_NULL (2)
3322 && OP_SAME_WITH_NULL (3)
3323 /* Compare low bound and element size as with OEP_ADDRESS_OF
3324 we have to account for the offset of the ref. */
3325 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3326 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3327 || (operand_equal_p (array_ref_low_bound
3328 (CONST_CAST_TREE (arg0)),
3329 array_ref_low_bound
3330 (CONST_CAST_TREE (arg1)), flags)
3331 && operand_equal_p (array_ref_element_size
3332 (CONST_CAST_TREE (arg0)),
3333 array_ref_element_size
3334 (CONST_CAST_TREE (arg1)),
3335 flags))));
3337 case COMPONENT_REF:
3338 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3339 may be NULL when we're called to compare MEM_EXPRs. */
3340 if (!OP_SAME_WITH_NULL (0))
3341 return false;
3343 bool compare_address = flags & OEP_ADDRESS_OF;
3345 /* Most of time we only need to compare FIELD_DECLs for equality.
3346 However when determining address look into actual offsets.
3347 These may match for unions and unshared record types. */
3348 flags &= ~OEP_ADDRESS_OF;
3349 if (!OP_SAME (1))
3351 if (compare_address
3352 && (flags & OEP_ADDRESS_OF_SAME_FIELD) == 0)
3354 if (TREE_OPERAND (arg0, 2)
3355 || TREE_OPERAND (arg1, 2))
3356 return OP_SAME_WITH_NULL (2);
3357 tree field0 = TREE_OPERAND (arg0, 1);
3358 tree field1 = TREE_OPERAND (arg1, 1);
3360 /* Non-FIELD_DECL operands can appear in C++ templates. */
3361 if (TREE_CODE (field0) != FIELD_DECL
3362 || TREE_CODE (field1) != FIELD_DECL
3363 || !operand_equal_p (DECL_FIELD_OFFSET (field0),
3364 DECL_FIELD_OFFSET (field1), flags)
3365 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3366 DECL_FIELD_BIT_OFFSET (field1),
3367 flags))
3368 return false;
3370 else
3371 return false;
3374 return OP_SAME_WITH_NULL (2);
3376 case BIT_FIELD_REF:
3377 if (!OP_SAME (0))
3378 return false;
3379 flags &= ~OEP_ADDRESS_OF;
3380 return OP_SAME (1) && OP_SAME (2);
3382 default:
3383 return false;
3386 case tcc_expression:
3387 switch (TREE_CODE (arg0))
3389 case ADDR_EXPR:
3390 /* Be sure we pass right ADDRESS_OF flag. */
3391 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3392 return operand_equal_p (TREE_OPERAND (arg0, 0),
3393 TREE_OPERAND (arg1, 0),
3394 flags | OEP_ADDRESS_OF);
3396 case TRUTH_NOT_EXPR:
3397 return OP_SAME (0);
3399 case TRUTH_ANDIF_EXPR:
3400 case TRUTH_ORIF_EXPR:
3401 return OP_SAME (0) && OP_SAME (1);
3403 case WIDEN_MULT_PLUS_EXPR:
3404 case WIDEN_MULT_MINUS_EXPR:
3405 if (!OP_SAME (2))
3406 return false;
3407 /* The multiplcation operands are commutative. */
3408 /* FALLTHRU */
3410 case TRUTH_AND_EXPR:
3411 case TRUTH_OR_EXPR:
3412 case TRUTH_XOR_EXPR:
3413 if (OP_SAME (0) && OP_SAME (1))
3414 return true;
3416 /* Otherwise take into account this is a commutative operation. */
3417 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3418 TREE_OPERAND (arg1, 1), flags)
3419 && operand_equal_p (TREE_OPERAND (arg0, 1),
3420 TREE_OPERAND (arg1, 0), flags));
3422 case COND_EXPR:
3423 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3424 return false;
3425 flags &= ~OEP_ADDRESS_OF;
3426 return OP_SAME (0);
3428 case BIT_INSERT_EXPR:
3429 /* BIT_INSERT_EXPR has an implict operand as the type precision
3430 of op1. Need to check to make sure they are the same. */
3431 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3432 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3433 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3434 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3435 return false;
3436 /* FALLTHRU */
3438 case VEC_COND_EXPR:
3439 case DOT_PROD_EXPR:
3440 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3442 case MODIFY_EXPR:
3443 case INIT_EXPR:
3444 case COMPOUND_EXPR:
3445 case PREDECREMENT_EXPR:
3446 case PREINCREMENT_EXPR:
3447 case POSTDECREMENT_EXPR:
3448 case POSTINCREMENT_EXPR:
3449 if (flags & OEP_LEXICOGRAPHIC)
3450 return OP_SAME (0) && OP_SAME (1);
3451 return false;
3453 case CLEANUP_POINT_EXPR:
3454 case EXPR_STMT:
3455 case SAVE_EXPR:
3456 if (flags & OEP_LEXICOGRAPHIC)
3457 return OP_SAME (0);
3458 return false;
3460 case OBJ_TYPE_REF:
3461 /* Virtual table reference. */
3462 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3463 OBJ_TYPE_REF_EXPR (arg1), flags))
3464 return false;
3465 flags &= ~OEP_ADDRESS_OF;
3466 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3467 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3468 return false;
3469 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3470 OBJ_TYPE_REF_OBJECT (arg1), flags))
3471 return false;
3472 if (virtual_method_call_p (arg0))
3474 if (!virtual_method_call_p (arg1))
3475 return false;
3476 return types_same_for_odr (obj_type_ref_class (arg0),
3477 obj_type_ref_class (arg1));
3479 return false;
3481 default:
3482 return false;
3485 case tcc_vl_exp:
3486 switch (TREE_CODE (arg0))
3488 case CALL_EXPR:
3489 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3490 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3491 /* If not both CALL_EXPRs are either internal or normal function
3492 functions, then they are not equal. */
3493 return false;
3494 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3496 /* If the CALL_EXPRs call different internal functions, then they
3497 are not equal. */
3498 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3499 return false;
3501 else
3503 /* If the CALL_EXPRs call different functions, then they are not
3504 equal. */
3505 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3506 flags))
3507 return false;
3510 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3512 unsigned int cef = call_expr_flags (arg0);
3513 if (flags & OEP_PURE_SAME)
3514 cef &= ECF_CONST | ECF_PURE;
3515 else
3516 cef &= ECF_CONST;
3517 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3518 return false;
3521 /* Now see if all the arguments are the same. */
3523 const_call_expr_arg_iterator iter0, iter1;
3524 const_tree a0, a1;
3525 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3526 a1 = first_const_call_expr_arg (arg1, &iter1);
3527 a0 && a1;
3528 a0 = next_const_call_expr_arg (&iter0),
3529 a1 = next_const_call_expr_arg (&iter1))
3530 if (! operand_equal_p (a0, a1, flags))
3531 return false;
3533 /* If we get here and both argument lists are exhausted
3534 then the CALL_EXPRs are equal. */
3535 return ! (a0 || a1);
3537 default:
3538 return false;
3541 case tcc_declaration:
3542 /* Consider __builtin_sqrt equal to sqrt. */
3543 if (TREE_CODE (arg0) == FUNCTION_DECL)
3544 return (fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3545 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3546 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3547 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3549 if (DECL_P (arg0)
3550 && (flags & OEP_DECL_NAME)
3551 && (flags & OEP_LEXICOGRAPHIC))
3553 /* Consider decls with the same name equal. The caller needs
3554 to make sure they refer to the same entity (such as a function
3555 formal parameter). */
3556 tree a0name = DECL_NAME (arg0);
3557 tree a1name = DECL_NAME (arg1);
3558 const char *a0ns = a0name ? IDENTIFIER_POINTER (a0name) : NULL;
3559 const char *a1ns = a1name ? IDENTIFIER_POINTER (a1name) : NULL;
3560 return a0ns && a1ns && strcmp (a0ns, a1ns) == 0;
3562 return false;
3564 case tcc_exceptional:
3565 if (TREE_CODE (arg0) == CONSTRUCTOR)
3567 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3568 return false;
3570 /* In GIMPLE constructors are used only to build vectors from
3571 elements. Individual elements in the constructor must be
3572 indexed in increasing order and form an initial sequence.
3574 We make no effort to compare constructors in generic.
3575 (see sem_variable::equals in ipa-icf which can do so for
3576 constants). */
3577 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3578 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3579 return false;
3581 /* Be sure that vectors constructed have the same representation.
3582 We only tested element precision and modes to match.
3583 Vectors may be BLKmode and thus also check that the number of
3584 parts match. */
3585 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3586 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3587 return false;
3589 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3590 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3591 unsigned int len = vec_safe_length (v0);
3593 if (len != vec_safe_length (v1))
3594 return false;
3596 for (unsigned int i = 0; i < len; i++)
3598 constructor_elt *c0 = &(*v0)[i];
3599 constructor_elt *c1 = &(*v1)[i];
3601 if (!operand_equal_p (c0->value, c1->value, flags)
3602 /* In GIMPLE the indexes can be either NULL or matching i.
3603 Double check this so we won't get false
3604 positives for GENERIC. */
3605 || (c0->index
3606 && (TREE_CODE (c0->index) != INTEGER_CST
3607 || compare_tree_int (c0->index, i)))
3608 || (c1->index
3609 && (TREE_CODE (c1->index) != INTEGER_CST
3610 || compare_tree_int (c1->index, i))))
3611 return false;
3613 return true;
3615 else if (TREE_CODE (arg0) == STATEMENT_LIST
3616 && (flags & OEP_LEXICOGRAPHIC))
3618 /* Compare the STATEMENT_LISTs. */
3619 tree_stmt_iterator tsi1, tsi2;
3620 tree body1 = CONST_CAST_TREE (arg0);
3621 tree body2 = CONST_CAST_TREE (arg1);
3622 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3623 tsi_next (&tsi1), tsi_next (&tsi2))
3625 /* The lists don't have the same number of statements. */
3626 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3627 return false;
3628 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3629 return true;
3630 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3631 flags & (OEP_LEXICOGRAPHIC
3632 | OEP_NO_HASH_CHECK)))
3633 return false;
3636 return false;
3638 case tcc_statement:
3639 switch (TREE_CODE (arg0))
3641 case RETURN_EXPR:
3642 if (flags & OEP_LEXICOGRAPHIC)
3643 return OP_SAME_WITH_NULL (0);
3644 return false;
3645 case DEBUG_BEGIN_STMT:
3646 if (flags & OEP_LEXICOGRAPHIC)
3647 return true;
3648 return false;
3649 default:
3650 return false;
3653 default:
3654 return false;
3657 #undef OP_SAME
3658 #undef OP_SAME_WITH_NULL
3661 /* Generate a hash value for an expression. This can be used iteratively
3662 by passing a previous result as the HSTATE argument. */
3664 void
3665 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3666 unsigned int flags)
3668 int i;
3669 enum tree_code code;
3670 enum tree_code_class tclass;
3672 if (t == NULL_TREE || t == error_mark_node)
3674 hstate.merge_hash (0);
3675 return;
3678 STRIP_ANY_LOCATION_WRAPPER (t);
3680 if (!(flags & OEP_ADDRESS_OF))
3681 STRIP_NOPS (t);
3683 code = TREE_CODE (t);
3685 switch (code)
3687 /* Alas, constants aren't shared, so we can't rely on pointer
3688 identity. */
3689 case VOID_CST:
3690 hstate.merge_hash (0);
3691 return;
3692 case INTEGER_CST:
3693 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3694 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3695 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3696 return;
3697 case REAL_CST:
3699 unsigned int val2;
3700 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3701 val2 = rvc_zero;
3702 else
3703 val2 = real_hash (TREE_REAL_CST_PTR (t));
3704 hstate.merge_hash (val2);
3705 return;
3707 case FIXED_CST:
3709 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3710 hstate.merge_hash (val2);
3711 return;
3713 case STRING_CST:
3714 hstate.add ((const void *) TREE_STRING_POINTER (t),
3715 TREE_STRING_LENGTH (t));
3716 return;
3717 case COMPLEX_CST:
3718 hash_operand (TREE_REALPART (t), hstate, flags);
3719 hash_operand (TREE_IMAGPART (t), hstate, flags);
3720 return;
3721 case VECTOR_CST:
3723 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3724 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3725 unsigned int count = vector_cst_encoded_nelts (t);
3726 for (unsigned int i = 0; i < count; ++i)
3727 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3728 return;
3730 case SSA_NAME:
3731 /* We can just compare by pointer. */
3732 hstate.add_hwi (SSA_NAME_VERSION (t));
3733 return;
3734 case PLACEHOLDER_EXPR:
3735 /* The node itself doesn't matter. */
3736 return;
3737 case BLOCK:
3738 case OMP_CLAUSE:
3739 /* Ignore. */
3740 return;
3741 case TREE_LIST:
3742 /* A list of expressions, for a CALL_EXPR or as the elements of a
3743 VECTOR_CST. */
3744 for (; t; t = TREE_CHAIN (t))
3745 hash_operand (TREE_VALUE (t), hstate, flags);
3746 return;
3747 case CONSTRUCTOR:
3749 unsigned HOST_WIDE_INT idx;
3750 tree field, value;
3751 flags &= ~OEP_ADDRESS_OF;
3752 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3753 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3755 /* In GIMPLE the indexes can be either NULL or matching i. */
3756 if (field == NULL_TREE)
3757 field = bitsize_int (idx);
3758 hash_operand (field, hstate, flags);
3759 hash_operand (value, hstate, flags);
3761 return;
3763 case STATEMENT_LIST:
3765 tree_stmt_iterator i;
3766 for (i = tsi_start (CONST_CAST_TREE (t));
3767 !tsi_end_p (i); tsi_next (&i))
3768 hash_operand (tsi_stmt (i), hstate, flags);
3769 return;
3771 case TREE_VEC:
3772 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3773 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3774 return;
3775 case IDENTIFIER_NODE:
3776 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3777 return;
3778 case FUNCTION_DECL:
3779 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3780 Otherwise nodes that compare equal according to operand_equal_p might
3781 get different hash codes. However, don't do this for machine specific
3782 or front end builtins, since the function code is overloaded in those
3783 cases. */
3784 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3785 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3787 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3788 code = TREE_CODE (t);
3790 /* FALL THROUGH */
3791 default:
3792 if (POLY_INT_CST_P (t))
3794 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3795 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3796 return;
3798 tclass = TREE_CODE_CLASS (code);
3800 if (tclass == tcc_declaration)
3802 /* DECL's have a unique ID */
3803 hstate.add_hwi (DECL_UID (t));
3805 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3807 /* For comparisons that can be swapped, use the lower
3808 tree code. */
3809 enum tree_code ccode = swap_tree_comparison (code);
3810 if (code < ccode)
3811 ccode = code;
3812 hstate.add_object (ccode);
3813 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3814 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3816 else if (CONVERT_EXPR_CODE_P (code))
3818 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3819 operand_equal_p. */
3820 enum tree_code ccode = NOP_EXPR;
3821 hstate.add_object (ccode);
3823 /* Don't hash the type, that can lead to having nodes which
3824 compare equal according to operand_equal_p, but which
3825 have different hash codes. Make sure to include signedness
3826 in the hash computation. */
3827 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3828 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3830 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3831 else if (code == MEM_REF
3832 && (flags & OEP_ADDRESS_OF) != 0
3833 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3834 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3835 && integer_zerop (TREE_OPERAND (t, 1)))
3836 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3837 hstate, flags);
3838 /* Don't ICE on FE specific trees, or their arguments etc.
3839 during operand_equal_p hash verification. */
3840 else if (!IS_EXPR_CODE_CLASS (tclass))
3841 gcc_assert (flags & OEP_HASH_CHECK);
3842 else
3844 unsigned int sflags = flags;
3846 hstate.add_object (code);
3848 switch (code)
3850 case ADDR_EXPR:
3851 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3852 flags |= OEP_ADDRESS_OF;
3853 sflags = flags;
3854 break;
3856 case INDIRECT_REF:
3857 case MEM_REF:
3858 case TARGET_MEM_REF:
3859 flags &= ~OEP_ADDRESS_OF;
3860 sflags = flags;
3861 break;
3863 case COMPONENT_REF:
3864 if (sflags & OEP_ADDRESS_OF)
3866 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3867 if (TREE_OPERAND (t, 2))
3868 hash_operand (TREE_OPERAND (t, 2), hstate,
3869 flags & ~OEP_ADDRESS_OF);
3870 else
3872 tree field = TREE_OPERAND (t, 1);
3873 hash_operand (DECL_FIELD_OFFSET (field),
3874 hstate, flags & ~OEP_ADDRESS_OF);
3875 hash_operand (DECL_FIELD_BIT_OFFSET (field),
3876 hstate, flags & ~OEP_ADDRESS_OF);
3878 return;
3880 break;
3881 case ARRAY_REF:
3882 case ARRAY_RANGE_REF:
3883 case BIT_FIELD_REF:
3884 sflags &= ~OEP_ADDRESS_OF;
3885 break;
3887 case COND_EXPR:
3888 flags &= ~OEP_ADDRESS_OF;
3889 break;
3891 case WIDEN_MULT_PLUS_EXPR:
3892 case WIDEN_MULT_MINUS_EXPR:
3894 /* The multiplication operands are commutative. */
3895 inchash::hash one, two;
3896 hash_operand (TREE_OPERAND (t, 0), one, flags);
3897 hash_operand (TREE_OPERAND (t, 1), two, flags);
3898 hstate.add_commutative (one, two);
3899 hash_operand (TREE_OPERAND (t, 2), two, flags);
3900 return;
3903 case CALL_EXPR:
3904 if (CALL_EXPR_FN (t) == NULL_TREE)
3905 hstate.add_int (CALL_EXPR_IFN (t));
3906 break;
3908 case TARGET_EXPR:
3909 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3910 Usually different TARGET_EXPRs just should use
3911 different temporaries in their slots. */
3912 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
3913 return;
3915 case OBJ_TYPE_REF:
3916 /* Virtual table reference. */
3917 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
3918 flags &= ~OEP_ADDRESS_OF;
3919 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
3920 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
3921 if (!virtual_method_call_p (t))
3922 return;
3923 if (tree c = obj_type_ref_class (t))
3925 c = TYPE_NAME (TYPE_MAIN_VARIANT (c));
3926 /* We compute mangled names only when free_lang_data is run.
3927 In that case we can hash precisely. */
3928 if (TREE_CODE (c) == TYPE_DECL
3929 && DECL_ASSEMBLER_NAME_SET_P (c))
3930 hstate.add_object
3931 (IDENTIFIER_HASH_VALUE
3932 (DECL_ASSEMBLER_NAME (c)));
3934 return;
3935 default:
3936 break;
3939 /* Don't hash the type, that can lead to having nodes which
3940 compare equal according to operand_equal_p, but which
3941 have different hash codes. */
3942 if (code == NON_LVALUE_EXPR)
3944 /* Make sure to include signness in the hash computation. */
3945 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3946 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3949 else if (commutative_tree_code (code))
3951 /* It's a commutative expression. We want to hash it the same
3952 however it appears. We do this by first hashing both operands
3953 and then rehashing based on the order of their independent
3954 hashes. */
3955 inchash::hash one, two;
3956 hash_operand (TREE_OPERAND (t, 0), one, flags);
3957 hash_operand (TREE_OPERAND (t, 1), two, flags);
3958 hstate.add_commutative (one, two);
3960 else
3961 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3962 hash_operand (TREE_OPERAND (t, i), hstate,
3963 i == 0 ? flags : sflags);
3965 return;
3969 bool
3970 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
3971 unsigned int flags, bool *ret)
3973 /* When checking and unless comparing DECL names, verify that if
3974 the outermost operand_equal_p call returns non-zero then ARG0
3975 and ARG1 have the same hash value. */
3976 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
3978 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
3980 if (arg0 != arg1 && !(flags & OEP_DECL_NAME))
3982 inchash::hash hstate0 (0), hstate1 (0);
3983 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
3984 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
3985 hashval_t h0 = hstate0.end ();
3986 hashval_t h1 = hstate1.end ();
3987 gcc_assert (h0 == h1);
3989 *ret = true;
3991 else
3992 *ret = false;
3994 return true;
3997 return false;
4001 static operand_compare default_compare_instance;
4003 /* Conveinece wrapper around operand_compare class because usually we do
4004 not need to play with the valueizer. */
4006 bool
4007 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
4009 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
4012 namespace inchash
4015 /* Generate a hash value for an expression. This can be used iteratively
4016 by passing a previous result as the HSTATE argument.
4018 This function is intended to produce the same hash for expressions which
4019 would compare equal using operand_equal_p. */
4020 void
4021 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
4023 default_compare_instance.hash_operand (t, hstate, flags);
4028 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
4029 with a different signedness or a narrower precision. */
4031 static bool
4032 operand_equal_for_comparison_p (tree arg0, tree arg1)
4034 if (operand_equal_p (arg0, arg1, 0))
4035 return true;
4037 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
4038 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
4039 return false;
4041 /* Discard any conversions that don't change the modes of ARG0 and ARG1
4042 and see if the inner values are the same. This removes any
4043 signedness comparison, which doesn't matter here. */
4044 tree op0 = arg0;
4045 tree op1 = arg1;
4046 STRIP_NOPS (op0);
4047 STRIP_NOPS (op1);
4048 if (operand_equal_p (op0, op1, 0))
4049 return true;
4051 /* Discard a single widening conversion from ARG1 and see if the inner
4052 value is the same as ARG0. */
4053 if (CONVERT_EXPR_P (arg1)
4054 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4055 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4056 < TYPE_PRECISION (TREE_TYPE (arg1))
4057 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
4058 return true;
4060 return false;
4063 /* See if ARG is an expression that is either a comparison or is performing
4064 arithmetic on comparisons. The comparisons must only be comparing
4065 two different values, which will be stored in *CVAL1 and *CVAL2; if
4066 they are nonzero it means that some operands have already been found.
4067 No variables may be used anywhere else in the expression except in the
4068 comparisons.
4070 If this is true, return 1. Otherwise, return zero. */
4072 static bool
4073 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4075 enum tree_code code = TREE_CODE (arg);
4076 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4078 /* We can handle some of the tcc_expression cases here. */
4079 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4080 tclass = tcc_unary;
4081 else if (tclass == tcc_expression
4082 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4083 || code == COMPOUND_EXPR))
4084 tclass = tcc_binary;
4086 switch (tclass)
4088 case tcc_unary:
4089 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4091 case tcc_binary:
4092 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4093 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4095 case tcc_constant:
4096 return true;
4098 case tcc_expression:
4099 if (code == COND_EXPR)
4100 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4101 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4102 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4103 return false;
4105 case tcc_comparison:
4106 /* First see if we can handle the first operand, then the second. For
4107 the second operand, we know *CVAL1 can't be zero. It must be that
4108 one side of the comparison is each of the values; test for the
4109 case where this isn't true by failing if the two operands
4110 are the same. */
4112 if (operand_equal_p (TREE_OPERAND (arg, 0),
4113 TREE_OPERAND (arg, 1), 0))
4114 return false;
4116 if (*cval1 == 0)
4117 *cval1 = TREE_OPERAND (arg, 0);
4118 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4120 else if (*cval2 == 0)
4121 *cval2 = TREE_OPERAND (arg, 0);
4122 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4124 else
4125 return false;
4127 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4129 else if (*cval2 == 0)
4130 *cval2 = TREE_OPERAND (arg, 1);
4131 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4133 else
4134 return false;
4136 return true;
4138 default:
4139 return false;
4143 /* ARG is a tree that is known to contain just arithmetic operations and
4144 comparisons. Evaluate the operations in the tree substituting NEW0 for
4145 any occurrence of OLD0 as an operand of a comparison and likewise for
4146 NEW1 and OLD1. */
4148 static tree
4149 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4150 tree old1, tree new1)
4152 tree type = TREE_TYPE (arg);
4153 enum tree_code code = TREE_CODE (arg);
4154 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4156 /* We can handle some of the tcc_expression cases here. */
4157 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4158 tclass = tcc_unary;
4159 else if (tclass == tcc_expression
4160 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4161 tclass = tcc_binary;
4163 switch (tclass)
4165 case tcc_unary:
4166 return fold_build1_loc (loc, code, type,
4167 eval_subst (loc, TREE_OPERAND (arg, 0),
4168 old0, new0, old1, new1));
4170 case tcc_binary:
4171 return fold_build2_loc (loc, code, type,
4172 eval_subst (loc, TREE_OPERAND (arg, 0),
4173 old0, new0, old1, new1),
4174 eval_subst (loc, TREE_OPERAND (arg, 1),
4175 old0, new0, old1, new1));
4177 case tcc_expression:
4178 switch (code)
4180 case SAVE_EXPR:
4181 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4182 old1, new1);
4184 case COMPOUND_EXPR:
4185 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4186 old1, new1);
4188 case COND_EXPR:
4189 return fold_build3_loc (loc, code, type,
4190 eval_subst (loc, TREE_OPERAND (arg, 0),
4191 old0, new0, old1, new1),
4192 eval_subst (loc, TREE_OPERAND (arg, 1),
4193 old0, new0, old1, new1),
4194 eval_subst (loc, TREE_OPERAND (arg, 2),
4195 old0, new0, old1, new1));
4196 default:
4197 break;
4199 /* Fall through - ??? */
4201 case tcc_comparison:
4203 tree arg0 = TREE_OPERAND (arg, 0);
4204 tree arg1 = TREE_OPERAND (arg, 1);
4206 /* We need to check both for exact equality and tree equality. The
4207 former will be true if the operand has a side-effect. In that
4208 case, we know the operand occurred exactly once. */
4210 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4211 arg0 = new0;
4212 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4213 arg0 = new1;
4215 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4216 arg1 = new0;
4217 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4218 arg1 = new1;
4220 return fold_build2_loc (loc, code, type, arg0, arg1);
4223 default:
4224 return arg;
4228 /* Return a tree for the case when the result of an expression is RESULT
4229 converted to TYPE and OMITTED was previously an operand of the expression
4230 but is now not needed (e.g., we folded OMITTED * 0).
4232 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4233 the conversion of RESULT to TYPE. */
4235 tree
4236 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4238 tree t = fold_convert_loc (loc, type, result);
4240 /* If the resulting operand is an empty statement, just return the omitted
4241 statement casted to void. */
4242 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4243 return build1_loc (loc, NOP_EXPR, void_type_node,
4244 fold_ignored_result (omitted));
4246 if (TREE_SIDE_EFFECTS (omitted))
4247 return build2_loc (loc, COMPOUND_EXPR, type,
4248 fold_ignored_result (omitted), t);
4250 return non_lvalue_loc (loc, t);
4253 /* Return a tree for the case when the result of an expression is RESULT
4254 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4255 of the expression but are now not needed.
4257 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4258 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4259 evaluated before OMITTED2. Otherwise, if neither has side effects,
4260 just do the conversion of RESULT to TYPE. */
4262 tree
4263 omit_two_operands_loc (location_t loc, tree type, tree result,
4264 tree omitted1, tree omitted2)
4266 tree t = fold_convert_loc (loc, type, result);
4268 if (TREE_SIDE_EFFECTS (omitted2))
4269 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4270 if (TREE_SIDE_EFFECTS (omitted1))
4271 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4273 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4277 /* Return a simplified tree node for the truth-negation of ARG. This
4278 never alters ARG itself. We assume that ARG is an operation that
4279 returns a truth value (0 or 1).
4281 FIXME: one would think we would fold the result, but it causes
4282 problems with the dominator optimizer. */
4284 static tree
4285 fold_truth_not_expr (location_t loc, tree arg)
4287 tree type = TREE_TYPE (arg);
4288 enum tree_code code = TREE_CODE (arg);
4289 location_t loc1, loc2;
4291 /* If this is a comparison, we can simply invert it, except for
4292 floating-point non-equality comparisons, in which case we just
4293 enclose a TRUTH_NOT_EXPR around what we have. */
4295 if (TREE_CODE_CLASS (code) == tcc_comparison)
4297 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4298 if (FLOAT_TYPE_P (op_type)
4299 && flag_trapping_math
4300 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4301 && code != NE_EXPR && code != EQ_EXPR)
4302 return NULL_TREE;
4304 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4305 if (code == ERROR_MARK)
4306 return NULL_TREE;
4308 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4309 TREE_OPERAND (arg, 1));
4310 copy_warning (ret, arg);
4311 return ret;
4314 switch (code)
4316 case INTEGER_CST:
4317 return constant_boolean_node (integer_zerop (arg), type);
4319 case TRUTH_AND_EXPR:
4320 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4321 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4322 return build2_loc (loc, TRUTH_OR_EXPR, type,
4323 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4324 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4326 case TRUTH_OR_EXPR:
4327 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4328 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4329 return build2_loc (loc, TRUTH_AND_EXPR, type,
4330 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4331 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4333 case TRUTH_XOR_EXPR:
4334 /* Here we can invert either operand. We invert the first operand
4335 unless the second operand is a TRUTH_NOT_EXPR in which case our
4336 result is the XOR of the first operand with the inside of the
4337 negation of the second operand. */
4339 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4340 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4341 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4342 else
4343 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4344 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4345 TREE_OPERAND (arg, 1));
4347 case TRUTH_ANDIF_EXPR:
4348 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4349 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4350 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4351 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4352 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4354 case TRUTH_ORIF_EXPR:
4355 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4356 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4357 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4358 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4359 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4361 case TRUTH_NOT_EXPR:
4362 return TREE_OPERAND (arg, 0);
4364 case COND_EXPR:
4366 tree arg1 = TREE_OPERAND (arg, 1);
4367 tree arg2 = TREE_OPERAND (arg, 2);
4369 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4370 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4372 /* A COND_EXPR may have a throw as one operand, which
4373 then has void type. Just leave void operands
4374 as they are. */
4375 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4376 VOID_TYPE_P (TREE_TYPE (arg1))
4377 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4378 VOID_TYPE_P (TREE_TYPE (arg2))
4379 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4382 case COMPOUND_EXPR:
4383 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4384 return build2_loc (loc, COMPOUND_EXPR, type,
4385 TREE_OPERAND (arg, 0),
4386 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4388 case NON_LVALUE_EXPR:
4389 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4390 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4392 CASE_CONVERT:
4393 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4394 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4396 /* fall through */
4398 case FLOAT_EXPR:
4399 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4400 return build1_loc (loc, TREE_CODE (arg), type,
4401 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4403 case BIT_AND_EXPR:
4404 if (!integer_onep (TREE_OPERAND (arg, 1)))
4405 return NULL_TREE;
4406 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4408 case SAVE_EXPR:
4409 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4411 case CLEANUP_POINT_EXPR:
4412 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4413 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4414 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4416 default:
4417 return NULL_TREE;
4421 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4422 assume that ARG is an operation that returns a truth value (0 or 1
4423 for scalars, 0 or -1 for vectors). Return the folded expression if
4424 folding is successful. Otherwise, return NULL_TREE. */
4426 static tree
4427 fold_invert_truthvalue (location_t loc, tree arg)
4429 tree type = TREE_TYPE (arg);
4430 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4431 ? BIT_NOT_EXPR
4432 : TRUTH_NOT_EXPR,
4433 type, arg);
4436 /* Return a simplified tree node for the truth-negation of ARG. This
4437 never alters ARG itself. We assume that ARG is an operation that
4438 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4440 tree
4441 invert_truthvalue_loc (location_t loc, tree arg)
4443 if (TREE_CODE (arg) == ERROR_MARK)
4444 return arg;
4446 tree type = TREE_TYPE (arg);
4447 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4448 ? BIT_NOT_EXPR
4449 : TRUTH_NOT_EXPR,
4450 type, arg);
4453 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4454 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4455 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4456 is the original memory reference used to preserve the alias set of
4457 the access. */
4459 static tree
4460 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4461 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4462 int unsignedp, int reversep)
4464 tree result, bftype;
4466 /* Attempt not to lose the access path if possible. */
4467 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4469 tree ninner = TREE_OPERAND (orig_inner, 0);
4470 machine_mode nmode;
4471 poly_int64 nbitsize, nbitpos;
4472 tree noffset;
4473 int nunsignedp, nreversep, nvolatilep = 0;
4474 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4475 &noffset, &nmode, &nunsignedp,
4476 &nreversep, &nvolatilep);
4477 if (base == inner
4478 && noffset == NULL_TREE
4479 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4480 && !reversep
4481 && !nreversep
4482 && !nvolatilep)
4484 inner = ninner;
4485 bitpos -= nbitpos;
4489 alias_set_type iset = get_alias_set (orig_inner);
4490 if (iset == 0 && get_alias_set (inner) != iset)
4491 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4492 build_fold_addr_expr (inner),
4493 build_int_cst (ptr_type_node, 0));
4495 if (known_eq (bitpos, 0) && !reversep)
4497 tree size = TYPE_SIZE (TREE_TYPE (inner));
4498 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4499 || POINTER_TYPE_P (TREE_TYPE (inner)))
4500 && tree_fits_shwi_p (size)
4501 && tree_to_shwi (size) == bitsize)
4502 return fold_convert_loc (loc, type, inner);
4505 bftype = type;
4506 if (TYPE_PRECISION (bftype) != bitsize
4507 || TYPE_UNSIGNED (bftype) == !unsignedp)
4508 bftype = build_nonstandard_integer_type (bitsize, 0);
4510 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4511 bitsize_int (bitsize), bitsize_int (bitpos));
4512 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4514 if (bftype != type)
4515 result = fold_convert_loc (loc, type, result);
4517 return result;
4520 /* Optimize a bit-field compare.
4522 There are two cases: First is a compare against a constant and the
4523 second is a comparison of two items where the fields are at the same
4524 bit position relative to the start of a chunk (byte, halfword, word)
4525 large enough to contain it. In these cases we can avoid the shift
4526 implicit in bitfield extractions.
4528 For constants, we emit a compare of the shifted constant with the
4529 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4530 compared. For two fields at the same position, we do the ANDs with the
4531 similar mask and compare the result of the ANDs.
4533 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4534 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4535 are the left and right operands of the comparison, respectively.
4537 If the optimization described above can be done, we return the resulting
4538 tree. Otherwise we return zero. */
4540 static tree
4541 optimize_bit_field_compare (location_t loc, enum tree_code code,
4542 tree compare_type, tree lhs, tree rhs)
4544 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4545 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4546 tree type = TREE_TYPE (lhs);
4547 tree unsigned_type;
4548 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4549 machine_mode lmode, rmode;
4550 scalar_int_mode nmode;
4551 int lunsignedp, runsignedp;
4552 int lreversep, rreversep;
4553 int lvolatilep = 0, rvolatilep = 0;
4554 tree linner, rinner = NULL_TREE;
4555 tree mask;
4556 tree offset;
4558 /* Get all the information about the extractions being done. If the bit size
4559 is the same as the size of the underlying object, we aren't doing an
4560 extraction at all and so can do nothing. We also don't want to
4561 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4562 then will no longer be able to replace it. */
4563 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4564 &lunsignedp, &lreversep, &lvolatilep);
4565 if (linner == lhs
4566 || !known_size_p (plbitsize)
4567 || !plbitsize.is_constant (&lbitsize)
4568 || !plbitpos.is_constant (&lbitpos)
4569 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4570 || offset != 0
4571 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4572 || lvolatilep)
4573 return 0;
4575 if (const_p)
4576 rreversep = lreversep;
4577 else
4579 /* If this is not a constant, we can only do something if bit positions,
4580 sizes, signedness and storage order are the same. */
4581 rinner
4582 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4583 &runsignedp, &rreversep, &rvolatilep);
4585 if (rinner == rhs
4586 || maybe_ne (lbitpos, rbitpos)
4587 || maybe_ne (lbitsize, rbitsize)
4588 || lunsignedp != runsignedp
4589 || lreversep != rreversep
4590 || offset != 0
4591 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4592 || rvolatilep)
4593 return 0;
4596 /* Honor the C++ memory model and mimic what RTL expansion does. */
4597 poly_uint64 bitstart = 0;
4598 poly_uint64 bitend = 0;
4599 if (TREE_CODE (lhs) == COMPONENT_REF)
4601 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4602 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4603 return 0;
4606 /* See if we can find a mode to refer to this field. We should be able to,
4607 but fail if we can't. */
4608 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4609 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4610 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4611 TYPE_ALIGN (TREE_TYPE (rinner))),
4612 BITS_PER_WORD, false, &nmode))
4613 return 0;
4615 /* Set signed and unsigned types of the precision of this mode for the
4616 shifts below. */
4617 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4619 /* Compute the bit position and size for the new reference and our offset
4620 within it. If the new reference is the same size as the original, we
4621 won't optimize anything, so return zero. */
4622 nbitsize = GET_MODE_BITSIZE (nmode);
4623 nbitpos = lbitpos & ~ (nbitsize - 1);
4624 lbitpos -= nbitpos;
4625 if (nbitsize == lbitsize)
4626 return 0;
4628 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4629 lbitpos = nbitsize - lbitsize - lbitpos;
4631 /* Make the mask to be used against the extracted field. */
4632 mask = build_int_cst_type (unsigned_type, -1);
4633 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4634 mask = const_binop (RSHIFT_EXPR, mask,
4635 size_int (nbitsize - lbitsize - lbitpos));
4637 if (! const_p)
4639 if (nbitpos < 0)
4640 return 0;
4642 /* If not comparing with constant, just rework the comparison
4643 and return. */
4644 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4645 nbitsize, nbitpos, 1, lreversep);
4646 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4647 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4648 nbitsize, nbitpos, 1, rreversep);
4649 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4650 return fold_build2_loc (loc, code, compare_type, t1, t2);
4653 /* Otherwise, we are handling the constant case. See if the constant is too
4654 big for the field. Warn and return a tree for 0 (false) if so. We do
4655 this not only for its own sake, but to avoid having to test for this
4656 error case below. If we didn't, we might generate wrong code.
4658 For unsigned fields, the constant shifted right by the field length should
4659 be all zero. For signed fields, the high-order bits should agree with
4660 the sign bit. */
4662 if (lunsignedp)
4664 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4666 warning (0, "comparison is always %d due to width of bit-field",
4667 code == NE_EXPR);
4668 return constant_boolean_node (code == NE_EXPR, compare_type);
4671 else
4673 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4674 if (tem != 0 && tem != -1)
4676 warning (0, "comparison is always %d due to width of bit-field",
4677 code == NE_EXPR);
4678 return constant_boolean_node (code == NE_EXPR, compare_type);
4682 if (nbitpos < 0)
4683 return 0;
4685 /* Single-bit compares should always be against zero. */
4686 if (lbitsize == 1 && ! integer_zerop (rhs))
4688 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4689 rhs = build_int_cst (type, 0);
4692 /* Make a new bitfield reference, shift the constant over the
4693 appropriate number of bits and mask it with the computed mask
4694 (in case this was a signed field). If we changed it, make a new one. */
4695 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4696 nbitsize, nbitpos, 1, lreversep);
4698 rhs = const_binop (BIT_AND_EXPR,
4699 const_binop (LSHIFT_EXPR,
4700 fold_convert_loc (loc, unsigned_type, rhs),
4701 size_int (lbitpos)),
4702 mask);
4704 lhs = build2_loc (loc, code, compare_type,
4705 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4706 return lhs;
4709 /* Subroutine for fold_truth_andor_1: decode a field reference.
4711 If EXP is a comparison reference, we return the innermost reference.
4713 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4714 set to the starting bit number.
4716 If the innermost field can be completely contained in a mode-sized
4717 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4719 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4720 otherwise it is not changed.
4722 *PUNSIGNEDP is set to the signedness of the field.
4724 *PREVERSEP is set to the storage order of the field.
4726 *PMASK is set to the mask used. This is either contained in a
4727 BIT_AND_EXPR or derived from the width of the field.
4729 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4731 Return 0 if this is not a component reference or is one that we can't
4732 do anything with. */
4734 static tree
4735 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4736 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4737 int *punsignedp, int *preversep, int *pvolatilep,
4738 tree *pmask, tree *pand_mask)
4740 tree exp = *exp_;
4741 tree outer_type = 0;
4742 tree and_mask = 0;
4743 tree mask, inner, offset;
4744 tree unsigned_type;
4745 unsigned int precision;
4747 /* All the optimizations using this function assume integer fields.
4748 There are problems with FP fields since the type_for_size call
4749 below can fail for, e.g., XFmode. */
4750 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4751 return NULL_TREE;
4753 /* We are interested in the bare arrangement of bits, so strip everything
4754 that doesn't affect the machine mode. However, record the type of the
4755 outermost expression if it may matter below. */
4756 if (CONVERT_EXPR_P (exp)
4757 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4758 outer_type = TREE_TYPE (exp);
4759 STRIP_NOPS (exp);
4761 if (TREE_CODE (exp) == BIT_AND_EXPR)
4763 and_mask = TREE_OPERAND (exp, 1);
4764 exp = TREE_OPERAND (exp, 0);
4765 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4766 if (TREE_CODE (and_mask) != INTEGER_CST)
4767 return NULL_TREE;
4770 poly_int64 poly_bitsize, poly_bitpos;
4771 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4772 pmode, punsignedp, preversep, pvolatilep);
4773 if ((inner == exp && and_mask == 0)
4774 || !poly_bitsize.is_constant (pbitsize)
4775 || !poly_bitpos.is_constant (pbitpos)
4776 || *pbitsize < 0
4777 || offset != 0
4778 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4779 /* Reject out-of-bound accesses (PR79731). */
4780 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4781 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4782 *pbitpos + *pbitsize) < 0))
4783 return NULL_TREE;
4785 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4786 if (unsigned_type == NULL_TREE)
4787 return NULL_TREE;
4789 *exp_ = exp;
4791 /* If the number of bits in the reference is the same as the bitsize of
4792 the outer type, then the outer type gives the signedness. Otherwise
4793 (in case of a small bitfield) the signedness is unchanged. */
4794 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4795 *punsignedp = TYPE_UNSIGNED (outer_type);
4797 /* Compute the mask to access the bitfield. */
4798 precision = TYPE_PRECISION (unsigned_type);
4800 mask = build_int_cst_type (unsigned_type, -1);
4802 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4803 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4805 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4806 if (and_mask != 0)
4807 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4808 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4810 *pmask = mask;
4811 *pand_mask = and_mask;
4812 return inner;
4815 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4816 bit positions and MASK is SIGNED. */
4818 static bool
4819 all_ones_mask_p (const_tree mask, unsigned int size)
4821 tree type = TREE_TYPE (mask);
4822 unsigned int precision = TYPE_PRECISION (type);
4824 /* If this function returns true when the type of the mask is
4825 UNSIGNED, then there will be errors. In particular see
4826 gcc.c-torture/execute/990326-1.c. There does not appear to be
4827 any documentation paper trail as to why this is so. But the pre
4828 wide-int worked with that restriction and it has been preserved
4829 here. */
4830 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4831 return false;
4833 return wi::mask (size, false, precision) == wi::to_wide (mask);
4836 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4837 represents the sign bit of EXP's type. If EXP represents a sign
4838 or zero extension, also test VAL against the unextended type.
4839 The return value is the (sub)expression whose sign bit is VAL,
4840 or NULL_TREE otherwise. */
4842 tree
4843 sign_bit_p (tree exp, const_tree val)
4845 int width;
4846 tree t;
4848 /* Tree EXP must have an integral type. */
4849 t = TREE_TYPE (exp);
4850 if (! INTEGRAL_TYPE_P (t))
4851 return NULL_TREE;
4853 /* Tree VAL must be an integer constant. */
4854 if (TREE_CODE (val) != INTEGER_CST
4855 || TREE_OVERFLOW (val))
4856 return NULL_TREE;
4858 width = TYPE_PRECISION (t);
4859 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4860 return exp;
4862 /* Handle extension from a narrower type. */
4863 if (TREE_CODE (exp) == NOP_EXPR
4864 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4865 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4867 return NULL_TREE;
4870 /* Subroutine for fold_truth_andor_1 and simple_condition_p: determine if an
4871 operand is simple enough to be evaluated unconditionally. */
4873 static bool
4874 simple_operand_p (const_tree exp)
4876 /* Strip any conversions that don't change the machine mode. */
4877 STRIP_NOPS (exp);
4879 return (CONSTANT_CLASS_P (exp)
4880 || TREE_CODE (exp) == SSA_NAME
4881 || (DECL_P (exp)
4882 && ! TREE_ADDRESSABLE (exp)
4883 && ! TREE_THIS_VOLATILE (exp)
4884 && ! DECL_NONLOCAL (exp)
4885 /* Don't regard global variables as simple. They may be
4886 allocated in ways unknown to the compiler (shared memory,
4887 #pragma weak, etc). */
4888 && ! TREE_PUBLIC (exp)
4889 && ! DECL_EXTERNAL (exp)
4890 /* Weakrefs are not safe to be read, since they can be NULL.
4891 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4892 have DECL_WEAK flag set. */
4893 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4894 /* Loading a static variable is unduly expensive, but global
4895 registers aren't expensive. */
4896 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4899 /* Determine if an operand is simple enough to be evaluated unconditionally.
4900 In addition to simple_operand_p, we assume that comparisons, conversions,
4901 and logic-not operations are simple, if their operands are simple, too. */
4903 bool
4904 simple_condition_p (tree exp)
4906 enum tree_code code;
4908 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
4909 return false;
4911 while (CONVERT_EXPR_P (exp))
4912 exp = TREE_OPERAND (exp, 0);
4914 code = TREE_CODE (exp);
4916 if (TREE_CODE_CLASS (code) == tcc_comparison)
4917 return (simple_operand_p (TREE_OPERAND (exp, 0))
4918 && simple_operand_p (TREE_OPERAND (exp, 1)));
4920 if (code == TRUTH_NOT_EXPR)
4921 return simple_condition_p (TREE_OPERAND (exp, 0));
4923 return simple_operand_p (exp);
4927 /* The following functions are subroutines to fold_range_test and allow it to
4928 try to change a logical combination of comparisons into a range test.
4930 For example, both
4931 X == 2 || X == 3 || X == 4 || X == 5
4933 X >= 2 && X <= 5
4934 are converted to
4935 (unsigned) (X - 2) <= 3
4937 We describe each set of comparisons as being either inside or outside
4938 a range, using a variable named like IN_P, and then describe the
4939 range with a lower and upper bound. If one of the bounds is omitted,
4940 it represents either the highest or lowest value of the type.
4942 In the comments below, we represent a range by two numbers in brackets
4943 preceded by a "+" to designate being inside that range, or a "-" to
4944 designate being outside that range, so the condition can be inverted by
4945 flipping the prefix. An omitted bound is represented by a "-". For
4946 example, "- [-, 10]" means being outside the range starting at the lowest
4947 possible value and ending at 10, in other words, being greater than 10.
4948 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4949 always false.
4951 We set up things so that the missing bounds are handled in a consistent
4952 manner so neither a missing bound nor "true" and "false" need to be
4953 handled using a special case. */
4955 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4956 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4957 and UPPER1_P are nonzero if the respective argument is an upper bound
4958 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4959 must be specified for a comparison. ARG1 will be converted to ARG0's
4960 type if both are specified. */
4962 static tree
4963 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4964 tree arg1, int upper1_p)
4966 tree tem;
4967 int result;
4968 int sgn0, sgn1;
4970 /* If neither arg represents infinity, do the normal operation.
4971 Else, if not a comparison, return infinity. Else handle the special
4972 comparison rules. Note that most of the cases below won't occur, but
4973 are handled for consistency. */
4975 if (arg0 != 0 && arg1 != 0)
4977 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4978 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4979 STRIP_NOPS (tem);
4980 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4983 if (TREE_CODE_CLASS (code) != tcc_comparison)
4984 return 0;
4986 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4987 for neither. In real maths, we cannot assume open ended ranges are
4988 the same. But, this is computer arithmetic, where numbers are finite.
4989 We can therefore make the transformation of any unbounded range with
4990 the value Z, Z being greater than any representable number. This permits
4991 us to treat unbounded ranges as equal. */
4992 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4993 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4994 switch (code)
4996 case EQ_EXPR:
4997 result = sgn0 == sgn1;
4998 break;
4999 case NE_EXPR:
5000 result = sgn0 != sgn1;
5001 break;
5002 case LT_EXPR:
5003 result = sgn0 < sgn1;
5004 break;
5005 case LE_EXPR:
5006 result = sgn0 <= sgn1;
5007 break;
5008 case GT_EXPR:
5009 result = sgn0 > sgn1;
5010 break;
5011 case GE_EXPR:
5012 result = sgn0 >= sgn1;
5013 break;
5014 default:
5015 gcc_unreachable ();
5018 return constant_boolean_node (result, type);
5021 /* Helper routine for make_range. Perform one step for it, return
5022 new expression if the loop should continue or NULL_TREE if it should
5023 stop. */
5025 tree
5026 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
5027 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
5028 bool *strict_overflow_p)
5030 tree arg0_type = TREE_TYPE (arg0);
5031 tree n_low, n_high, low = *p_low, high = *p_high;
5032 int in_p = *p_in_p, n_in_p;
5034 switch (code)
5036 case TRUTH_NOT_EXPR:
5037 /* We can only do something if the range is testing for zero. */
5038 if (low == NULL_TREE || high == NULL_TREE
5039 || ! integer_zerop (low) || ! integer_zerop (high))
5040 return NULL_TREE;
5041 *p_in_p = ! in_p;
5042 return arg0;
5044 case EQ_EXPR: case NE_EXPR:
5045 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
5046 /* We can only do something if the range is testing for zero
5047 and if the second operand is an integer constant. Note that
5048 saying something is "in" the range we make is done by
5049 complementing IN_P since it will set in the initial case of
5050 being not equal to zero; "out" is leaving it alone. */
5051 if (low == NULL_TREE || high == NULL_TREE
5052 || ! integer_zerop (low) || ! integer_zerop (high)
5053 || TREE_CODE (arg1) != INTEGER_CST)
5054 return NULL_TREE;
5056 switch (code)
5058 case NE_EXPR: /* - [c, c] */
5059 low = high = arg1;
5060 break;
5061 case EQ_EXPR: /* + [c, c] */
5062 in_p = ! in_p, low = high = arg1;
5063 break;
5064 case GT_EXPR: /* - [-, c] */
5065 low = 0, high = arg1;
5066 break;
5067 case GE_EXPR: /* + [c, -] */
5068 in_p = ! in_p, low = arg1, high = 0;
5069 break;
5070 case LT_EXPR: /* - [c, -] */
5071 low = arg1, high = 0;
5072 break;
5073 case LE_EXPR: /* + [-, c] */
5074 in_p = ! in_p, low = 0, high = arg1;
5075 break;
5076 default:
5077 gcc_unreachable ();
5080 /* If this is an unsigned comparison, we also know that EXP is
5081 greater than or equal to zero. We base the range tests we make
5082 on that fact, so we record it here so we can parse existing
5083 range tests. We test arg0_type since often the return type
5084 of, e.g. EQ_EXPR, is boolean. */
5085 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5087 if (! merge_ranges (&n_in_p, &n_low, &n_high,
5088 in_p, low, high, 1,
5089 build_int_cst (arg0_type, 0),
5090 NULL_TREE))
5091 return NULL_TREE;
5093 in_p = n_in_p, low = n_low, high = n_high;
5095 /* If the high bound is missing, but we have a nonzero low
5096 bound, reverse the range so it goes from zero to the low bound
5097 minus 1. */
5098 if (high == 0 && low && ! integer_zerop (low))
5100 in_p = ! in_p;
5101 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5102 build_int_cst (TREE_TYPE (low), 1), 0);
5103 low = build_int_cst (arg0_type, 0);
5107 *p_low = low;
5108 *p_high = high;
5109 *p_in_p = in_p;
5110 return arg0;
5112 case NEGATE_EXPR:
5113 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5114 low and high are non-NULL, then normalize will DTRT. */
5115 if (!TYPE_UNSIGNED (arg0_type)
5116 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5118 if (low == NULL_TREE)
5119 low = TYPE_MIN_VALUE (arg0_type);
5120 if (high == NULL_TREE)
5121 high = TYPE_MAX_VALUE (arg0_type);
5124 /* (-x) IN [a,b] -> x in [-b, -a] */
5125 n_low = range_binop (MINUS_EXPR, exp_type,
5126 build_int_cst (exp_type, 0),
5127 0, high, 1);
5128 n_high = range_binop (MINUS_EXPR, exp_type,
5129 build_int_cst (exp_type, 0),
5130 0, low, 0);
5131 if (n_high != 0 && TREE_OVERFLOW (n_high))
5132 return NULL_TREE;
5133 goto normalize;
5135 case BIT_NOT_EXPR:
5136 /* ~ X -> -X - 1 */
5137 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5138 build_int_cst (exp_type, 1));
5140 case PLUS_EXPR:
5141 case MINUS_EXPR:
5142 if (TREE_CODE (arg1) != INTEGER_CST)
5143 return NULL_TREE;
5145 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5146 move a constant to the other side. */
5147 if (!TYPE_UNSIGNED (arg0_type)
5148 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5149 return NULL_TREE;
5151 /* If EXP is signed, any overflow in the computation is undefined,
5152 so we don't worry about it so long as our computations on
5153 the bounds don't overflow. For unsigned, overflow is defined
5154 and this is exactly the right thing. */
5155 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5156 arg0_type, low, 0, arg1, 0);
5157 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5158 arg0_type, high, 1, arg1, 0);
5159 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5160 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5161 return NULL_TREE;
5163 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5164 *strict_overflow_p = true;
5166 normalize:
5167 /* Check for an unsigned range which has wrapped around the maximum
5168 value thus making n_high < n_low, and normalize it. */
5169 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5171 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5172 build_int_cst (TREE_TYPE (n_high), 1), 0);
5173 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5174 build_int_cst (TREE_TYPE (n_low), 1), 0);
5176 /* If the range is of the form +/- [ x+1, x ], we won't
5177 be able to normalize it. But then, it represents the
5178 whole range or the empty set, so make it
5179 +/- [ -, - ]. */
5180 if (tree_int_cst_equal (n_low, low)
5181 && tree_int_cst_equal (n_high, high))
5182 low = high = 0;
5183 else
5184 in_p = ! in_p;
5186 else
5187 low = n_low, high = n_high;
5189 *p_low = low;
5190 *p_high = high;
5191 *p_in_p = in_p;
5192 return arg0;
5194 CASE_CONVERT:
5195 case NON_LVALUE_EXPR:
5196 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5197 return NULL_TREE;
5199 if (! INTEGRAL_TYPE_P (arg0_type)
5200 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5201 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5202 return NULL_TREE;
5204 n_low = low, n_high = high;
5206 if (n_low != 0)
5207 n_low = fold_convert_loc (loc, arg0_type, n_low);
5209 if (n_high != 0)
5210 n_high = fold_convert_loc (loc, arg0_type, n_high);
5212 /* If we're converting arg0 from an unsigned type, to exp,
5213 a signed type, we will be doing the comparison as unsigned.
5214 The tests above have already verified that LOW and HIGH
5215 are both positive.
5217 So we have to ensure that we will handle large unsigned
5218 values the same way that the current signed bounds treat
5219 negative values. */
5221 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5223 tree high_positive;
5224 tree equiv_type;
5225 /* For fixed-point modes, we need to pass the saturating flag
5226 as the 2nd parameter. */
5227 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5228 equiv_type
5229 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5230 TYPE_SATURATING (arg0_type));
5231 else
5232 equiv_type
5233 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5235 /* A range without an upper bound is, naturally, unbounded.
5236 Since convert would have cropped a very large value, use
5237 the max value for the destination type. */
5238 high_positive
5239 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5240 : TYPE_MAX_VALUE (arg0_type);
5242 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5243 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5244 fold_convert_loc (loc, arg0_type,
5245 high_positive),
5246 build_int_cst (arg0_type, 1));
5248 /* If the low bound is specified, "and" the range with the
5249 range for which the original unsigned value will be
5250 positive. */
5251 if (low != 0)
5253 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5254 1, fold_convert_loc (loc, arg0_type,
5255 integer_zero_node),
5256 high_positive))
5257 return NULL_TREE;
5259 in_p = (n_in_p == in_p);
5261 else
5263 /* Otherwise, "or" the range with the range of the input
5264 that will be interpreted as negative. */
5265 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5266 1, fold_convert_loc (loc, arg0_type,
5267 integer_zero_node),
5268 high_positive))
5269 return NULL_TREE;
5271 in_p = (in_p != n_in_p);
5275 /* Otherwise, if we are converting arg0 from signed type, to exp,
5276 an unsigned type, we will do the comparison as signed. If
5277 high is non-NULL, we punt above if it doesn't fit in the signed
5278 type, so if we get through here, +[-, high] or +[low, high] are
5279 equivalent to +[-, n_high] or +[n_low, n_high]. Similarly,
5280 +[-, -] or -[-, -] are equivalent too. But if low is specified and
5281 high is not, the +[low, -] range is equivalent to union of
5282 +[n_low, -] and +[-, -1] ranges, so +[low, -] is equivalent to
5283 -[0, n_low-1] and similarly -[low, -] to +[0, n_low-1], except for
5284 low being 0, which should be treated as [-, -]. */
5285 else if (TYPE_UNSIGNED (exp_type)
5286 && !TYPE_UNSIGNED (arg0_type)
5287 && low
5288 && !high)
5290 if (integer_zerop (low))
5291 n_low = NULL_TREE;
5292 else
5294 n_high = fold_build2_loc (loc, PLUS_EXPR, arg0_type,
5295 n_low, build_int_cst (arg0_type, -1));
5296 n_low = build_zero_cst (arg0_type);
5297 in_p = !in_p;
5301 *p_low = n_low;
5302 *p_high = n_high;
5303 *p_in_p = in_p;
5304 return arg0;
5306 default:
5307 return NULL_TREE;
5311 /* Given EXP, a logical expression, set the range it is testing into
5312 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5313 actually being tested. *PLOW and *PHIGH will be made of the same
5314 type as the returned expression. If EXP is not a comparison, we
5315 will most likely not be returning a useful value and range. Set
5316 *STRICT_OVERFLOW_P to true if the return value is only valid
5317 because signed overflow is undefined; otherwise, do not change
5318 *STRICT_OVERFLOW_P. */
5320 tree
5321 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5322 bool *strict_overflow_p)
5324 enum tree_code code;
5325 tree arg0, arg1 = NULL_TREE;
5326 tree exp_type, nexp;
5327 int in_p;
5328 tree low, high;
5329 location_t loc = EXPR_LOCATION (exp);
5331 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5332 and see if we can refine the range. Some of the cases below may not
5333 happen, but it doesn't seem worth worrying about this. We "continue"
5334 the outer loop when we've changed something; otherwise we "break"
5335 the switch, which will "break" the while. */
5337 in_p = 0;
5338 low = high = build_int_cst (TREE_TYPE (exp), 0);
5340 while (1)
5342 code = TREE_CODE (exp);
5343 exp_type = TREE_TYPE (exp);
5344 arg0 = NULL_TREE;
5346 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5348 if (TREE_OPERAND_LENGTH (exp) > 0)
5349 arg0 = TREE_OPERAND (exp, 0);
5350 if (TREE_CODE_CLASS (code) == tcc_binary
5351 || TREE_CODE_CLASS (code) == tcc_comparison
5352 || (TREE_CODE_CLASS (code) == tcc_expression
5353 && TREE_OPERAND_LENGTH (exp) > 1))
5354 arg1 = TREE_OPERAND (exp, 1);
5356 if (arg0 == NULL_TREE)
5357 break;
5359 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5360 &high, &in_p, strict_overflow_p);
5361 if (nexp == NULL_TREE)
5362 break;
5363 exp = nexp;
5366 /* If EXP is a constant, we can evaluate whether this is true or false. */
5367 if (TREE_CODE (exp) == INTEGER_CST)
5369 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5370 exp, 0, low, 0))
5371 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5372 exp, 1, high, 1)));
5373 low = high = 0;
5374 exp = 0;
5377 *pin_p = in_p, *plow = low, *phigh = high;
5378 return exp;
5381 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5382 a bitwise check i.e. when
5383 LOW == 0xXX...X00...0
5384 HIGH == 0xXX...X11...1
5385 Return corresponding mask in MASK and stem in VALUE. */
5387 static bool
5388 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5389 tree *value)
5391 if (TREE_CODE (low) != INTEGER_CST
5392 || TREE_CODE (high) != INTEGER_CST)
5393 return false;
5395 unsigned prec = TYPE_PRECISION (type);
5396 wide_int lo = wi::to_wide (low, prec);
5397 wide_int hi = wi::to_wide (high, prec);
5399 wide_int end_mask = lo ^ hi;
5400 if ((end_mask & (end_mask + 1)) != 0
5401 || (lo & end_mask) != 0)
5402 return false;
5404 wide_int stem_mask = ~end_mask;
5405 wide_int stem = lo & stem_mask;
5406 if (stem != (hi & stem_mask))
5407 return false;
5409 *mask = wide_int_to_tree (type, stem_mask);
5410 *value = wide_int_to_tree (type, stem);
5412 return true;
5415 /* Helper routine for build_range_check and match.pd. Return the type to
5416 perform the check or NULL if it shouldn't be optimized. */
5418 tree
5419 range_check_type (tree etype)
5421 /* First make sure that arithmetics in this type is valid, then make sure
5422 that it wraps around. */
5423 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5424 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5426 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5428 tree utype, minv, maxv;
5430 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5431 for the type in question, as we rely on this here. */
5432 utype = unsigned_type_for (etype);
5433 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5434 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5435 build_int_cst (TREE_TYPE (maxv), 1), 1);
5436 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5438 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5439 minv, 1, maxv, 1)))
5440 etype = utype;
5441 else
5442 return NULL_TREE;
5444 else if (POINTER_TYPE_P (etype) || TREE_CODE (etype) == OFFSET_TYPE)
5445 etype = unsigned_type_for (etype);
5446 return etype;
5449 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5450 type, TYPE, return an expression to test if EXP is in (or out of, depending
5451 on IN_P) the range. Return 0 if the test couldn't be created. */
5453 tree
5454 build_range_check (location_t loc, tree type, tree exp, int in_p,
5455 tree low, tree high)
5457 tree etype = TREE_TYPE (exp), mask, value;
5459 /* Disable this optimization for function pointer expressions
5460 on targets that require function pointer canonicalization. */
5461 if (targetm.have_canonicalize_funcptr_for_compare ()
5462 && POINTER_TYPE_P (etype)
5463 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5464 return NULL_TREE;
5466 if (! in_p)
5468 value = build_range_check (loc, type, exp, 1, low, high);
5469 if (value != 0)
5470 return invert_truthvalue_loc (loc, value);
5472 return 0;
5475 if (low == 0 && high == 0)
5476 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5478 if (low == 0)
5479 return fold_build2_loc (loc, LE_EXPR, type, exp,
5480 fold_convert_loc (loc, etype, high));
5482 if (high == 0)
5483 return fold_build2_loc (loc, GE_EXPR, type, exp,
5484 fold_convert_loc (loc, etype, low));
5486 if (operand_equal_p (low, high, 0))
5487 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5488 fold_convert_loc (loc, etype, low));
5490 if (TREE_CODE (exp) == BIT_AND_EXPR
5491 && maskable_range_p (low, high, etype, &mask, &value))
5492 return fold_build2_loc (loc, EQ_EXPR, type,
5493 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5494 exp, mask),
5495 value);
5497 if (integer_zerop (low))
5499 if (! TYPE_UNSIGNED (etype))
5501 etype = unsigned_type_for (etype);
5502 high = fold_convert_loc (loc, etype, high);
5503 exp = fold_convert_loc (loc, etype, exp);
5505 return build_range_check (loc, type, exp, 1, 0, high);
5508 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5509 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5511 int prec = TYPE_PRECISION (etype);
5513 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5515 if (TYPE_UNSIGNED (etype))
5517 tree signed_etype = signed_type_for (etype);
5518 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5519 etype
5520 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5521 else
5522 etype = signed_etype;
5523 exp = fold_convert_loc (loc, etype, exp);
5525 return fold_build2_loc (loc, GT_EXPR, type, exp,
5526 build_int_cst (etype, 0));
5530 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5531 This requires wrap-around arithmetics for the type of the expression. */
5532 etype = range_check_type (etype);
5533 if (etype == NULL_TREE)
5534 return NULL_TREE;
5536 high = fold_convert_loc (loc, etype, high);
5537 low = fold_convert_loc (loc, etype, low);
5538 exp = fold_convert_loc (loc, etype, exp);
5540 value = const_binop (MINUS_EXPR, high, low);
5542 if (value != 0 && !TREE_OVERFLOW (value))
5543 return build_range_check (loc, type,
5544 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5545 1, build_int_cst (etype, 0), value);
5547 return 0;
5550 /* Return the predecessor of VAL in its type, handling the infinite case. */
5552 static tree
5553 range_predecessor (tree val)
5555 tree type = TREE_TYPE (val);
5557 if (INTEGRAL_TYPE_P (type)
5558 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5559 return 0;
5560 else
5561 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5562 build_int_cst (TREE_TYPE (val), 1), 0);
5565 /* Return the successor of VAL in its type, handling the infinite case. */
5567 static tree
5568 range_successor (tree val)
5570 tree type = TREE_TYPE (val);
5572 if (INTEGRAL_TYPE_P (type)
5573 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5574 return 0;
5575 else
5576 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5577 build_int_cst (TREE_TYPE (val), 1), 0);
5580 /* Given two ranges, see if we can merge them into one. Return 1 if we
5581 can, 0 if we can't. Set the output range into the specified parameters. */
5583 bool
5584 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5585 tree high0, int in1_p, tree low1, tree high1)
5587 int no_overlap;
5588 int subset;
5589 int temp;
5590 tree tem;
5591 int in_p;
5592 tree low, high;
5593 int lowequal = ((low0 == 0 && low1 == 0)
5594 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5595 low0, 0, low1, 0)));
5596 int highequal = ((high0 == 0 && high1 == 0)
5597 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5598 high0, 1, high1, 1)));
5600 /* Make range 0 be the range that starts first, or ends last if they
5601 start at the same value. Swap them if it isn't. */
5602 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5603 low0, 0, low1, 0))
5604 || (lowequal
5605 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5606 high1, 1, high0, 1))))
5608 temp = in0_p, in0_p = in1_p, in1_p = temp;
5609 tem = low0, low0 = low1, low1 = tem;
5610 tem = high0, high0 = high1, high1 = tem;
5613 /* If the second range is != high1 where high1 is the type maximum of
5614 the type, try first merging with < high1 range. */
5615 if (low1
5616 && high1
5617 && TREE_CODE (low1) == INTEGER_CST
5618 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5619 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5620 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5621 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5622 && operand_equal_p (low1, high1, 0))
5624 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5625 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5626 !in1_p, NULL_TREE, range_predecessor (low1)))
5627 return true;
5628 /* Similarly for the second range != low1 where low1 is the type minimum
5629 of the type, try first merging with > low1 range. */
5630 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5631 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5632 !in1_p, range_successor (low1), NULL_TREE))
5633 return true;
5636 /* Now flag two cases, whether the ranges are disjoint or whether the
5637 second range is totally subsumed in the first. Note that the tests
5638 below are simplified by the ones above. */
5639 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5640 high0, 1, low1, 0));
5641 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5642 high1, 1, high0, 1));
5644 /* We now have four cases, depending on whether we are including or
5645 excluding the two ranges. */
5646 if (in0_p && in1_p)
5648 /* If they don't overlap, the result is false. If the second range
5649 is a subset it is the result. Otherwise, the range is from the start
5650 of the second to the end of the first. */
5651 if (no_overlap)
5652 in_p = 0, low = high = 0;
5653 else if (subset)
5654 in_p = 1, low = low1, high = high1;
5655 else
5656 in_p = 1, low = low1, high = high0;
5659 else if (in0_p && ! in1_p)
5661 /* If they don't overlap, the result is the first range. If they are
5662 equal, the result is false. If the second range is a subset of the
5663 first, and the ranges begin at the same place, we go from just after
5664 the end of the second range to the end of the first. If the second
5665 range is not a subset of the first, or if it is a subset and both
5666 ranges end at the same place, the range starts at the start of the
5667 first range and ends just before the second range.
5668 Otherwise, we can't describe this as a single range. */
5669 if (no_overlap)
5670 in_p = 1, low = low0, high = high0;
5671 else if (lowequal && highequal)
5672 in_p = 0, low = high = 0;
5673 else if (subset && lowequal)
5675 low = range_successor (high1);
5676 high = high0;
5677 in_p = 1;
5678 if (low == 0)
5680 /* We are in the weird situation where high0 > high1 but
5681 high1 has no successor. Punt. */
5682 return 0;
5685 else if (! subset || highequal)
5687 low = low0;
5688 high = range_predecessor (low1);
5689 in_p = 1;
5690 if (high == 0)
5692 /* low0 < low1 but low1 has no predecessor. Punt. */
5693 return 0;
5696 else
5697 return 0;
5700 else if (! in0_p && in1_p)
5702 /* If they don't overlap, the result is the second range. If the second
5703 is a subset of the first, the result is false. Otherwise,
5704 the range starts just after the first range and ends at the
5705 end of the second. */
5706 if (no_overlap)
5707 in_p = 1, low = low1, high = high1;
5708 else if (subset || highequal)
5709 in_p = 0, low = high = 0;
5710 else
5712 low = range_successor (high0);
5713 high = high1;
5714 in_p = 1;
5715 if (low == 0)
5717 /* high1 > high0 but high0 has no successor. Punt. */
5718 return 0;
5723 else
5725 /* The case where we are excluding both ranges. Here the complex case
5726 is if they don't overlap. In that case, the only time we have a
5727 range is if they are adjacent. If the second is a subset of the
5728 first, the result is the first. Otherwise, the range to exclude
5729 starts at the beginning of the first range and ends at the end of the
5730 second. */
5731 if (no_overlap)
5733 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5734 range_successor (high0),
5735 1, low1, 0)))
5736 in_p = 0, low = low0, high = high1;
5737 else
5739 /* Canonicalize - [min, x] into - [-, x]. */
5740 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5741 switch (TREE_CODE (TREE_TYPE (low0)))
5743 case ENUMERAL_TYPE:
5744 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5745 GET_MODE_BITSIZE
5746 (TYPE_MODE (TREE_TYPE (low0)))))
5747 break;
5748 /* FALLTHROUGH */
5749 case INTEGER_TYPE:
5750 if (tree_int_cst_equal (low0,
5751 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5752 low0 = 0;
5753 break;
5754 case POINTER_TYPE:
5755 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5756 && integer_zerop (low0))
5757 low0 = 0;
5758 break;
5759 default:
5760 break;
5763 /* Canonicalize - [x, max] into - [x, -]. */
5764 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5765 switch (TREE_CODE (TREE_TYPE (high1)))
5767 case ENUMERAL_TYPE:
5768 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5769 GET_MODE_BITSIZE
5770 (TYPE_MODE (TREE_TYPE (high1)))))
5771 break;
5772 /* FALLTHROUGH */
5773 case INTEGER_TYPE:
5774 if (tree_int_cst_equal (high1,
5775 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5776 high1 = 0;
5777 break;
5778 case POINTER_TYPE:
5779 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5780 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5781 high1, 1,
5782 build_int_cst (TREE_TYPE (high1), 1),
5783 1)))
5784 high1 = 0;
5785 break;
5786 default:
5787 break;
5790 /* The ranges might be also adjacent between the maximum and
5791 minimum values of the given type. For
5792 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5793 return + [x + 1, y - 1]. */
5794 if (low0 == 0 && high1 == 0)
5796 low = range_successor (high0);
5797 high = range_predecessor (low1);
5798 if (low == 0 || high == 0)
5799 return 0;
5801 in_p = 1;
5803 else
5804 return 0;
5807 else if (subset)
5808 in_p = 0, low = low0, high = high0;
5809 else
5810 in_p = 0, low = low0, high = high1;
5813 *pin_p = in_p, *plow = low, *phigh = high;
5814 return 1;
5818 /* Subroutine of fold, looking inside expressions of the form
5819 A op B ? A : C, where (ARG00, COMP_CODE, ARG01), ARG1 and ARG2
5820 are the three operands of the COND_EXPR. This function is
5821 being used also to optimize A op B ? C : A, by reversing the
5822 comparison first.
5824 Return a folded expression whose code is not a COND_EXPR
5825 anymore, or NULL_TREE if no folding opportunity is found. */
5827 static tree
5828 fold_cond_expr_with_comparison (location_t loc, tree type,
5829 enum tree_code comp_code,
5830 tree arg00, tree arg01, tree arg1, tree arg2)
5832 tree arg1_type = TREE_TYPE (arg1);
5833 tree tem;
5835 STRIP_NOPS (arg1);
5836 STRIP_NOPS (arg2);
5838 /* If we have A op 0 ? A : -A, consider applying the following
5839 transformations:
5841 A == 0? A : -A same as -A
5842 A != 0? A : -A same as A
5843 A >= 0? A : -A same as abs (A)
5844 A > 0? A : -A same as abs (A)
5845 A <= 0? A : -A same as -abs (A)
5846 A < 0? A : -A same as -abs (A)
5848 None of these transformations work for modes with signed
5849 zeros. If A is +/-0, the first two transformations will
5850 change the sign of the result (from +0 to -0, or vice
5851 versa). The last four will fix the sign of the result,
5852 even though the original expressions could be positive or
5853 negative, depending on the sign of A.
5855 Note that all these transformations are correct if A is
5856 NaN, since the two alternatives (A and -A) are also NaNs. */
5857 if (!HONOR_SIGNED_ZEROS (type)
5858 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5859 ? real_zerop (arg01)
5860 : integer_zerop (arg01))
5861 && ((TREE_CODE (arg2) == NEGATE_EXPR
5862 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5863 /* In the case that A is of the form X-Y, '-A' (arg2) may
5864 have already been folded to Y-X, check for that. */
5865 || (TREE_CODE (arg1) == MINUS_EXPR
5866 && TREE_CODE (arg2) == MINUS_EXPR
5867 && operand_equal_p (TREE_OPERAND (arg1, 0),
5868 TREE_OPERAND (arg2, 1), 0)
5869 && operand_equal_p (TREE_OPERAND (arg1, 1),
5870 TREE_OPERAND (arg2, 0), 0))))
5871 switch (comp_code)
5873 case EQ_EXPR:
5874 case UNEQ_EXPR:
5875 tem = fold_convert_loc (loc, arg1_type, arg1);
5876 return fold_convert_loc (loc, type, negate_expr (tem));
5877 case NE_EXPR:
5878 case LTGT_EXPR:
5879 return fold_convert_loc (loc, type, arg1);
5880 case UNGE_EXPR:
5881 case UNGT_EXPR:
5882 if (flag_trapping_math)
5883 break;
5884 /* Fall through. */
5885 case GE_EXPR:
5886 case GT_EXPR:
5887 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5888 break;
5889 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5890 return fold_convert_loc (loc, type, tem);
5891 case UNLE_EXPR:
5892 case UNLT_EXPR:
5893 if (flag_trapping_math)
5894 break;
5895 /* FALLTHRU */
5896 case LE_EXPR:
5897 case LT_EXPR:
5898 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5899 break;
5900 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5901 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
5903 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
5904 is not, invokes UB both in abs and in the negation of it.
5905 So, use ABSU_EXPR instead. */
5906 tree utype = unsigned_type_for (TREE_TYPE (arg1));
5907 tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
5908 tem = negate_expr (tem);
5909 return fold_convert_loc (loc, type, tem);
5911 else
5913 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5914 return negate_expr (fold_convert_loc (loc, type, tem));
5916 default:
5917 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5918 break;
5921 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5922 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5923 both transformations are correct when A is NaN: A != 0
5924 is then true, and A == 0 is false. */
5926 if (!HONOR_SIGNED_ZEROS (type)
5927 && integer_zerop (arg01) && integer_zerop (arg2))
5929 if (comp_code == NE_EXPR)
5930 return fold_convert_loc (loc, type, arg1);
5931 else if (comp_code == EQ_EXPR)
5932 return build_zero_cst (type);
5935 /* Try some transformations of A op B ? A : B.
5937 A == B? A : B same as B
5938 A != B? A : B same as A
5939 A >= B? A : B same as max (A, B)
5940 A > B? A : B same as max (B, A)
5941 A <= B? A : B same as min (A, B)
5942 A < B? A : B same as min (B, A)
5944 As above, these transformations don't work in the presence
5945 of signed zeros. For example, if A and B are zeros of
5946 opposite sign, the first two transformations will change
5947 the sign of the result. In the last four, the original
5948 expressions give different results for (A=+0, B=-0) and
5949 (A=-0, B=+0), but the transformed expressions do not.
5951 The first two transformations are correct if either A or B
5952 is a NaN. In the first transformation, the condition will
5953 be false, and B will indeed be chosen. In the case of the
5954 second transformation, the condition A != B will be true,
5955 and A will be chosen.
5957 The conversions to max() and min() are not correct if B is
5958 a number and A is not. The conditions in the original
5959 expressions will be false, so all four give B. The min()
5960 and max() versions would give a NaN instead. */
5961 if (!HONOR_SIGNED_ZEROS (type)
5962 && operand_equal_for_comparison_p (arg01, arg2)
5963 /* Avoid these transformations if the COND_EXPR may be used
5964 as an lvalue in the C++ front-end. PR c++/19199. */
5965 && (in_gimple_form
5966 || VECTOR_TYPE_P (type)
5967 || (! lang_GNU_CXX ()
5968 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5969 || ! maybe_lvalue_p (arg1)
5970 || ! maybe_lvalue_p (arg2)))
5972 tree comp_op0 = arg00;
5973 tree comp_op1 = arg01;
5974 tree comp_type = TREE_TYPE (comp_op0);
5976 switch (comp_code)
5978 case EQ_EXPR:
5979 return fold_convert_loc (loc, type, arg2);
5980 case NE_EXPR:
5981 return fold_convert_loc (loc, type, arg1);
5982 case LE_EXPR:
5983 case LT_EXPR:
5984 case UNLE_EXPR:
5985 case UNLT_EXPR:
5986 /* In C++ a ?: expression can be an lvalue, so put the
5987 operand which will be used if they are equal first
5988 so that we can convert this back to the
5989 corresponding COND_EXPR. */
5990 if (!HONOR_NANS (arg1))
5992 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5993 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5994 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5995 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5996 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5997 comp_op1, comp_op0);
5998 return fold_convert_loc (loc, type, tem);
6000 break;
6001 case GE_EXPR:
6002 case GT_EXPR:
6003 case UNGE_EXPR:
6004 case UNGT_EXPR:
6005 if (!HONOR_NANS (arg1))
6007 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6008 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6009 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
6010 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
6011 : fold_build2_loc (loc, MAX_EXPR, comp_type,
6012 comp_op1, comp_op0);
6013 return fold_convert_loc (loc, type, tem);
6015 break;
6016 case UNEQ_EXPR:
6017 if (!HONOR_NANS (arg1))
6018 return fold_convert_loc (loc, type, arg2);
6019 break;
6020 case LTGT_EXPR:
6021 if (!HONOR_NANS (arg1))
6022 return fold_convert_loc (loc, type, arg1);
6023 break;
6024 default:
6025 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6026 break;
6030 return NULL_TREE;
6035 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
6036 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
6037 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
6038 false) >= 2)
6039 #endif
6041 /* EXP is some logical combination of boolean tests. See if we can
6042 merge it into some range test. Return the new tree if so. */
6044 static tree
6045 fold_range_test (location_t loc, enum tree_code code, tree type,
6046 tree op0, tree op1)
6048 int or_op = (code == TRUTH_ORIF_EXPR
6049 || code == TRUTH_OR_EXPR);
6050 int in0_p, in1_p, in_p;
6051 tree low0, low1, low, high0, high1, high;
6052 bool strict_overflow_p = false;
6053 tree tem, lhs, rhs;
6054 const char * const warnmsg = G_("assuming signed overflow does not occur "
6055 "when simplifying range test");
6057 if (!INTEGRAL_TYPE_P (type))
6058 return 0;
6060 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
6061 /* If op0 is known true or false and this is a short-circuiting
6062 operation we must not merge with op1 since that makes side-effects
6063 unconditional. So special-case this. */
6064 if (!lhs
6065 && ((code == TRUTH_ORIF_EXPR && in0_p)
6066 || (code == TRUTH_ANDIF_EXPR && !in0_p)))
6067 return op0;
6068 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
6070 /* If this is an OR operation, invert both sides; we will invert
6071 again at the end. */
6072 if (or_op)
6073 in0_p = ! in0_p, in1_p = ! in1_p;
6075 /* If both expressions are the same, if we can merge the ranges, and we
6076 can build the range test, return it or it inverted. If one of the
6077 ranges is always true or always false, consider it to be the same
6078 expression as the other. */
6079 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
6080 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
6081 in1_p, low1, high1)
6082 && (tem = (build_range_check (loc, type,
6083 lhs != 0 ? lhs
6084 : rhs != 0 ? rhs : integer_zero_node,
6085 in_p, low, high))) != 0)
6087 if (strict_overflow_p)
6088 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
6089 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6092 /* On machines where the branch cost is expensive, if this is a
6093 short-circuited branch and the underlying object on both sides
6094 is the same, make a non-short-circuit operation. */
6095 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6096 if (param_logical_op_non_short_circuit != -1)
6097 logical_op_non_short_circuit
6098 = param_logical_op_non_short_circuit;
6099 if (logical_op_non_short_circuit
6100 && !sanitize_coverage_p ()
6101 && lhs != 0 && rhs != 0
6102 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6103 && operand_equal_p (lhs, rhs, 0))
6105 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6106 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6107 which cases we can't do this. */
6108 if (simple_operand_p (lhs))
6109 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6110 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6111 type, op0, op1);
6113 else if (!lang_hooks.decls.global_bindings_p ()
6114 && !CONTAINS_PLACEHOLDER_P (lhs))
6116 tree common = save_expr (lhs);
6118 if ((lhs = build_range_check (loc, type, common,
6119 or_op ? ! in0_p : in0_p,
6120 low0, high0)) != 0
6121 && (rhs = build_range_check (loc, type, common,
6122 or_op ? ! in1_p : in1_p,
6123 low1, high1)) != 0)
6125 if (strict_overflow_p)
6126 fold_overflow_warning (warnmsg,
6127 WARN_STRICT_OVERFLOW_COMPARISON);
6128 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6129 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6130 type, lhs, rhs);
6135 return 0;
6138 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6139 bit value. Arrange things so the extra bits will be set to zero if and
6140 only if C is signed-extended to its full width. If MASK is nonzero,
6141 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6143 static tree
6144 unextend (tree c, int p, int unsignedp, tree mask)
6146 tree type = TREE_TYPE (c);
6147 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6148 tree temp;
6150 if (p == modesize || unsignedp)
6151 return c;
6153 /* We work by getting just the sign bit into the low-order bit, then
6154 into the high-order bit, then sign-extend. We then XOR that value
6155 with C. */
6156 temp = build_int_cst (TREE_TYPE (c),
6157 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6159 /* We must use a signed type in order to get an arithmetic right shift.
6160 However, we must also avoid introducing accidental overflows, so that
6161 a subsequent call to integer_zerop will work. Hence we must
6162 do the type conversion here. At this point, the constant is either
6163 zero or one, and the conversion to a signed type can never overflow.
6164 We could get an overflow if this conversion is done anywhere else. */
6165 if (TYPE_UNSIGNED (type))
6166 temp = fold_convert (signed_type_for (type), temp);
6168 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6169 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6170 if (mask != 0)
6171 temp = const_binop (BIT_AND_EXPR, temp,
6172 fold_convert (TREE_TYPE (c), mask));
6173 /* If necessary, convert the type back to match the type of C. */
6174 if (TYPE_UNSIGNED (type))
6175 temp = fold_convert (type, temp);
6177 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6180 /* For an expression that has the form
6181 (A && B) || ~B
6183 (A || B) && ~B,
6184 we can drop one of the inner expressions and simplify to
6185 A || ~B
6187 A && ~B
6188 LOC is the location of the resulting expression. OP is the inner
6189 logical operation; the left-hand side in the examples above, while CMPOP
6190 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6191 removing a condition that guards another, as in
6192 (A != NULL && A->...) || A == NULL
6193 which we must not transform. If RHS_ONLY is true, only eliminate the
6194 right-most operand of the inner logical operation. */
6196 static tree
6197 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6198 bool rhs_only)
6200 tree type = TREE_TYPE (cmpop);
6201 enum tree_code code = TREE_CODE (cmpop);
6202 enum tree_code truthop_code = TREE_CODE (op);
6203 tree lhs = TREE_OPERAND (op, 0);
6204 tree rhs = TREE_OPERAND (op, 1);
6205 tree orig_lhs = lhs, orig_rhs = rhs;
6206 enum tree_code rhs_code = TREE_CODE (rhs);
6207 enum tree_code lhs_code = TREE_CODE (lhs);
6208 enum tree_code inv_code;
6210 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6211 return NULL_TREE;
6213 if (TREE_CODE_CLASS (code) != tcc_comparison)
6214 return NULL_TREE;
6216 if (rhs_code == truthop_code)
6218 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6219 if (newrhs != NULL_TREE)
6221 rhs = newrhs;
6222 rhs_code = TREE_CODE (rhs);
6225 if (lhs_code == truthop_code && !rhs_only)
6227 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6228 if (newlhs != NULL_TREE)
6230 lhs = newlhs;
6231 lhs_code = TREE_CODE (lhs);
6235 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6236 if (inv_code == rhs_code
6237 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6238 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6239 return lhs;
6240 if (!rhs_only && inv_code == lhs_code
6241 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6242 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6243 return rhs;
6244 if (rhs != orig_rhs || lhs != orig_lhs)
6245 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6246 lhs, rhs);
6247 return NULL_TREE;
6250 /* Find ways of folding logical expressions of LHS and RHS:
6251 Try to merge two comparisons to the same innermost item.
6252 Look for range tests like "ch >= '0' && ch <= '9'".
6253 Look for combinations of simple terms on machines with expensive branches
6254 and evaluate the RHS unconditionally.
6256 For example, if we have p->a == 2 && p->b == 4 and we can make an
6257 object large enough to span both A and B, we can do this with a comparison
6258 against the object ANDed with the a mask.
6260 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6261 operations to do this with one comparison.
6263 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6264 function and the one above.
6266 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6267 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6269 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6270 two operands.
6272 We return the simplified tree or 0 if no optimization is possible. */
6274 static tree
6275 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6276 tree lhs, tree rhs)
6278 /* If this is the "or" of two comparisons, we can do something if
6279 the comparisons are NE_EXPR. If this is the "and", we can do something
6280 if the comparisons are EQ_EXPR. I.e.,
6281 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6283 WANTED_CODE is this operation code. For single bit fields, we can
6284 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6285 comparison for one-bit fields. */
6287 enum tree_code wanted_code;
6288 enum tree_code lcode, rcode;
6289 tree ll_arg, lr_arg, rl_arg, rr_arg;
6290 tree ll_inner, lr_inner, rl_inner, rr_inner;
6291 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6292 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6293 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6294 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6295 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6296 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6297 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6298 scalar_int_mode lnmode, rnmode;
6299 tree ll_mask, lr_mask, rl_mask, rr_mask;
6300 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6301 tree l_const, r_const;
6302 tree lntype, rntype, result;
6303 HOST_WIDE_INT first_bit, end_bit;
6304 int volatilep;
6306 /* Start by getting the comparison codes. Fail if anything is volatile.
6307 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6308 it were surrounded with a NE_EXPR. */
6310 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6311 return 0;
6313 lcode = TREE_CODE (lhs);
6314 rcode = TREE_CODE (rhs);
6316 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6318 lhs = build2 (NE_EXPR, truth_type, lhs,
6319 build_int_cst (TREE_TYPE (lhs), 0));
6320 lcode = NE_EXPR;
6323 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6325 rhs = build2 (NE_EXPR, truth_type, rhs,
6326 build_int_cst (TREE_TYPE (rhs), 0));
6327 rcode = NE_EXPR;
6330 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6331 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6332 return 0;
6334 ll_arg = TREE_OPERAND (lhs, 0);
6335 lr_arg = TREE_OPERAND (lhs, 1);
6336 rl_arg = TREE_OPERAND (rhs, 0);
6337 rr_arg = TREE_OPERAND (rhs, 1);
6339 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6340 if (simple_operand_p (ll_arg)
6341 && simple_operand_p (lr_arg))
6343 if (operand_equal_p (ll_arg, rl_arg, 0)
6344 && operand_equal_p (lr_arg, rr_arg, 0))
6346 result = combine_comparisons (loc, code, lcode, rcode,
6347 truth_type, ll_arg, lr_arg);
6348 if (result)
6349 return result;
6351 else if (operand_equal_p (ll_arg, rr_arg, 0)
6352 && operand_equal_p (lr_arg, rl_arg, 0))
6354 result = combine_comparisons (loc, code, lcode,
6355 swap_tree_comparison (rcode),
6356 truth_type, ll_arg, lr_arg);
6357 if (result)
6358 return result;
6362 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6363 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6365 /* If the RHS can be evaluated unconditionally and its operands are
6366 simple, it wins to evaluate the RHS unconditionally on machines
6367 with expensive branches. In this case, this isn't a comparison
6368 that can be merged. */
6370 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6371 false) >= 2
6372 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6373 && simple_operand_p (rl_arg)
6374 && simple_operand_p (rr_arg))
6376 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6377 if (code == TRUTH_OR_EXPR
6378 && lcode == NE_EXPR && integer_zerop (lr_arg)
6379 && rcode == NE_EXPR && integer_zerop (rr_arg)
6380 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6381 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6382 return build2_loc (loc, NE_EXPR, truth_type,
6383 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6384 ll_arg, rl_arg),
6385 build_int_cst (TREE_TYPE (ll_arg), 0));
6387 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6388 if (code == TRUTH_AND_EXPR
6389 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6390 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6391 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6392 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6393 return build2_loc (loc, EQ_EXPR, truth_type,
6394 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6395 ll_arg, rl_arg),
6396 build_int_cst (TREE_TYPE (ll_arg), 0));
6399 /* See if the comparisons can be merged. Then get all the parameters for
6400 each side. */
6402 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6403 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6404 return 0;
6406 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6407 volatilep = 0;
6408 ll_inner = decode_field_reference (loc, &ll_arg,
6409 &ll_bitsize, &ll_bitpos, &ll_mode,
6410 &ll_unsignedp, &ll_reversep, &volatilep,
6411 &ll_mask, &ll_and_mask);
6412 lr_inner = decode_field_reference (loc, &lr_arg,
6413 &lr_bitsize, &lr_bitpos, &lr_mode,
6414 &lr_unsignedp, &lr_reversep, &volatilep,
6415 &lr_mask, &lr_and_mask);
6416 rl_inner = decode_field_reference (loc, &rl_arg,
6417 &rl_bitsize, &rl_bitpos, &rl_mode,
6418 &rl_unsignedp, &rl_reversep, &volatilep,
6419 &rl_mask, &rl_and_mask);
6420 rr_inner = decode_field_reference (loc, &rr_arg,
6421 &rr_bitsize, &rr_bitpos, &rr_mode,
6422 &rr_unsignedp, &rr_reversep, &volatilep,
6423 &rr_mask, &rr_and_mask);
6425 /* It must be true that the inner operation on the lhs of each
6426 comparison must be the same if we are to be able to do anything.
6427 Then see if we have constants. If not, the same must be true for
6428 the rhs's. */
6429 if (volatilep
6430 || ll_reversep != rl_reversep
6431 || ll_inner == 0 || rl_inner == 0
6432 || ! operand_equal_p (ll_inner, rl_inner, 0))
6433 return 0;
6435 if (TREE_CODE (lr_arg) == INTEGER_CST
6436 && TREE_CODE (rr_arg) == INTEGER_CST)
6438 l_const = lr_arg, r_const = rr_arg;
6439 lr_reversep = ll_reversep;
6441 else if (lr_reversep != rr_reversep
6442 || lr_inner == 0 || rr_inner == 0
6443 || ! operand_equal_p (lr_inner, rr_inner, 0))
6444 return 0;
6445 else
6446 l_const = r_const = 0;
6448 /* If either comparison code is not correct for our logical operation,
6449 fail. However, we can convert a one-bit comparison against zero into
6450 the opposite comparison against that bit being set in the field. */
6452 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6453 if (lcode != wanted_code)
6455 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6457 /* Make the left operand unsigned, since we are only interested
6458 in the value of one bit. Otherwise we are doing the wrong
6459 thing below. */
6460 ll_unsignedp = 1;
6461 l_const = ll_mask;
6463 else
6464 return 0;
6467 /* This is analogous to the code for l_const above. */
6468 if (rcode != wanted_code)
6470 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6472 rl_unsignedp = 1;
6473 r_const = rl_mask;
6475 else
6476 return 0;
6479 /* See if we can find a mode that contains both fields being compared on
6480 the left. If we can't, fail. Otherwise, update all constants and masks
6481 to be relative to a field of that size. */
6482 first_bit = MIN (ll_bitpos, rl_bitpos);
6483 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6484 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6485 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6486 volatilep, &lnmode))
6487 return 0;
6489 lnbitsize = GET_MODE_BITSIZE (lnmode);
6490 lnbitpos = first_bit & ~ (lnbitsize - 1);
6491 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6492 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6494 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6496 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6497 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6500 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6501 size_int (xll_bitpos));
6502 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6503 size_int (xrl_bitpos));
6504 if (ll_mask == NULL_TREE || rl_mask == NULL_TREE)
6505 return 0;
6507 if (l_const)
6509 l_const = fold_convert_loc (loc, lntype, l_const);
6510 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6511 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6512 if (l_const == NULL_TREE)
6513 return 0;
6514 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6515 fold_build1_loc (loc, BIT_NOT_EXPR,
6516 lntype, ll_mask))))
6518 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6520 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6523 if (r_const)
6525 r_const = fold_convert_loc (loc, lntype, r_const);
6526 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6527 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6528 if (r_const == NULL_TREE)
6529 return 0;
6530 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6531 fold_build1_loc (loc, BIT_NOT_EXPR,
6532 lntype, rl_mask))))
6534 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6536 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6540 /* If the right sides are not constant, do the same for it. Also,
6541 disallow this optimization if a size, signedness or storage order
6542 mismatch occurs between the left and right sides. */
6543 if (l_const == 0)
6545 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6546 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6547 || ll_reversep != lr_reversep
6548 /* Make sure the two fields on the right
6549 correspond to the left without being swapped. */
6550 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6551 return 0;
6553 first_bit = MIN (lr_bitpos, rr_bitpos);
6554 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6555 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6556 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6557 volatilep, &rnmode))
6558 return 0;
6560 rnbitsize = GET_MODE_BITSIZE (rnmode);
6561 rnbitpos = first_bit & ~ (rnbitsize - 1);
6562 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6563 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6565 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6567 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6568 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6571 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6572 rntype, lr_mask),
6573 size_int (xlr_bitpos));
6574 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6575 rntype, rr_mask),
6576 size_int (xrr_bitpos));
6577 if (lr_mask == NULL_TREE || rr_mask == NULL_TREE)
6578 return 0;
6580 /* Make a mask that corresponds to both fields being compared.
6581 Do this for both items being compared. If the operands are the
6582 same size and the bits being compared are in the same position
6583 then we can do this by masking both and comparing the masked
6584 results. */
6585 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6586 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6587 if (lnbitsize == rnbitsize
6588 && xll_bitpos == xlr_bitpos
6589 && lnbitpos >= 0
6590 && rnbitpos >= 0)
6592 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6593 lntype, lnbitsize, lnbitpos,
6594 ll_unsignedp || rl_unsignedp, ll_reversep);
6595 if (! all_ones_mask_p (ll_mask, lnbitsize))
6596 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6598 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6599 rntype, rnbitsize, rnbitpos,
6600 lr_unsignedp || rr_unsignedp, lr_reversep);
6601 if (! all_ones_mask_p (lr_mask, rnbitsize))
6602 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6604 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6607 /* There is still another way we can do something: If both pairs of
6608 fields being compared are adjacent, we may be able to make a wider
6609 field containing them both.
6611 Note that we still must mask the lhs/rhs expressions. Furthermore,
6612 the mask must be shifted to account for the shift done by
6613 make_bit_field_ref. */
6614 if (((ll_bitsize + ll_bitpos == rl_bitpos
6615 && lr_bitsize + lr_bitpos == rr_bitpos)
6616 || (ll_bitpos == rl_bitpos + rl_bitsize
6617 && lr_bitpos == rr_bitpos + rr_bitsize))
6618 && ll_bitpos >= 0
6619 && rl_bitpos >= 0
6620 && lr_bitpos >= 0
6621 && rr_bitpos >= 0)
6623 tree type;
6625 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6626 ll_bitsize + rl_bitsize,
6627 MIN (ll_bitpos, rl_bitpos),
6628 ll_unsignedp, ll_reversep);
6629 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6630 lr_bitsize + rr_bitsize,
6631 MIN (lr_bitpos, rr_bitpos),
6632 lr_unsignedp, lr_reversep);
6634 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6635 size_int (MIN (xll_bitpos, xrl_bitpos)));
6636 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6637 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6638 if (ll_mask == NULL_TREE || lr_mask == NULL_TREE)
6639 return 0;
6641 /* Convert to the smaller type before masking out unwanted bits. */
6642 type = lntype;
6643 if (lntype != rntype)
6645 if (lnbitsize > rnbitsize)
6647 lhs = fold_convert_loc (loc, rntype, lhs);
6648 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6649 type = rntype;
6651 else if (lnbitsize < rnbitsize)
6653 rhs = fold_convert_loc (loc, lntype, rhs);
6654 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6655 type = lntype;
6659 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6660 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6662 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6663 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6665 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6668 return 0;
6671 /* Handle the case of comparisons with constants. If there is something in
6672 common between the masks, those bits of the constants must be the same.
6673 If not, the condition is always false. Test for this to avoid generating
6674 incorrect code below. */
6675 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6676 if (! integer_zerop (result)
6677 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6678 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6680 if (wanted_code == NE_EXPR)
6682 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6683 return constant_boolean_node (true, truth_type);
6685 else
6687 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6688 return constant_boolean_node (false, truth_type);
6692 if (lnbitpos < 0)
6693 return 0;
6695 /* Construct the expression we will return. First get the component
6696 reference we will make. Unless the mask is all ones the width of
6697 that field, perform the mask operation. Then compare with the
6698 merged constant. */
6699 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6700 lntype, lnbitsize, lnbitpos,
6701 ll_unsignedp || rl_unsignedp, ll_reversep);
6703 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6704 if (! all_ones_mask_p (ll_mask, lnbitsize))
6705 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6707 return build2_loc (loc, wanted_code, truth_type, result,
6708 const_binop (BIT_IOR_EXPR, l_const, r_const));
6711 /* T is an integer expression that is being multiplied, divided, or taken a
6712 modulus (CODE says which and what kind of divide or modulus) by a
6713 constant C. See if we can eliminate that operation by folding it with
6714 other operations already in T. WIDE_TYPE, if non-null, is a type that
6715 should be used for the computation if wider than our type.
6717 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6718 (X * 2) + (Y * 4). We must, however, be assured that either the original
6719 expression would not overflow or that overflow is undefined for the type
6720 in the language in question.
6722 If we return a non-null expression, it is an equivalent form of the
6723 original computation, but need not be in the original type.
6725 We set *STRICT_OVERFLOW_P to true if the return values depends on
6726 signed overflow being undefined. Otherwise we do not change
6727 *STRICT_OVERFLOW_P. */
6729 static tree
6730 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6731 bool *strict_overflow_p)
6733 /* To avoid exponential search depth, refuse to allow recursion past
6734 three levels. Beyond that (1) it's highly unlikely that we'll find
6735 something interesting and (2) we've probably processed it before
6736 when we built the inner expression. */
6738 static int depth;
6739 tree ret;
6741 if (depth > 3)
6742 return NULL;
6744 depth++;
6745 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6746 depth--;
6748 return ret;
6751 static tree
6752 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6753 bool *strict_overflow_p)
6755 tree type = TREE_TYPE (t);
6756 enum tree_code tcode = TREE_CODE (t);
6757 tree ctype = (wide_type != 0
6758 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6759 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6760 ? wide_type : type);
6761 tree t1, t2;
6762 int same_p = tcode == code;
6763 tree op0 = NULL_TREE, op1 = NULL_TREE;
6764 bool sub_strict_overflow_p;
6766 /* Don't deal with constants of zero here; they confuse the code below. */
6767 if (integer_zerop (c))
6768 return NULL_TREE;
6770 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6771 op0 = TREE_OPERAND (t, 0);
6773 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6774 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6776 /* Note that we need not handle conditional operations here since fold
6777 already handles those cases. So just do arithmetic here. */
6778 switch (tcode)
6780 case INTEGER_CST:
6781 /* For a constant, we can always simplify if we are a multiply
6782 or (for divide and modulus) if it is a multiple of our constant. */
6783 if (code == MULT_EXPR
6784 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6785 TYPE_SIGN (type)))
6787 tree tem = const_binop (code, fold_convert (ctype, t),
6788 fold_convert (ctype, c));
6789 /* If the multiplication overflowed, we lost information on it.
6790 See PR68142 and PR69845. */
6791 if (TREE_OVERFLOW (tem))
6792 return NULL_TREE;
6793 return tem;
6795 break;
6797 CASE_CONVERT: case NON_LVALUE_EXPR:
6798 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0)))
6799 break;
6800 /* If op0 is an expression ... */
6801 if ((COMPARISON_CLASS_P (op0)
6802 || UNARY_CLASS_P (op0)
6803 || BINARY_CLASS_P (op0)
6804 || VL_EXP_CLASS_P (op0)
6805 || EXPRESSION_CLASS_P (op0))
6806 /* ... and has wrapping overflow, and its type is smaller
6807 than ctype, then we cannot pass through as widening. */
6808 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6809 && (TYPE_PRECISION (ctype)
6810 > TYPE_PRECISION (TREE_TYPE (op0))))
6811 /* ... or this is a truncation (t is narrower than op0),
6812 then we cannot pass through this narrowing. */
6813 || (TYPE_PRECISION (type)
6814 < TYPE_PRECISION (TREE_TYPE (op0)))
6815 /* ... or signedness changes for division or modulus,
6816 then we cannot pass through this conversion. */
6817 || (code != MULT_EXPR
6818 && (TYPE_UNSIGNED (ctype)
6819 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6820 /* ... or has undefined overflow while the converted to
6821 type has not, we cannot do the operation in the inner type
6822 as that would introduce undefined overflow. */
6823 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6824 && !TYPE_OVERFLOW_UNDEFINED (type))))
6825 break;
6827 /* Pass the constant down and see if we can make a simplification. If
6828 we can, replace this expression with the inner simplification for
6829 possible later conversion to our or some other type. */
6830 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6831 && TREE_CODE (t2) == INTEGER_CST
6832 && !TREE_OVERFLOW (t2)
6833 && (t1 = extract_muldiv (op0, t2, code,
6834 code == MULT_EXPR ? ctype : NULL_TREE,
6835 strict_overflow_p)) != 0)
6836 return t1;
6837 break;
6839 case ABS_EXPR:
6840 /* If widening the type changes it from signed to unsigned, then we
6841 must avoid building ABS_EXPR itself as unsigned. */
6842 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6844 tree cstype = (*signed_type_for) (ctype);
6845 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6846 != 0)
6848 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6849 return fold_convert (ctype, t1);
6851 break;
6853 /* If the constant is negative, we cannot simplify this. */
6854 if (tree_int_cst_sgn (c) == -1)
6855 break;
6856 /* FALLTHROUGH */
6857 case NEGATE_EXPR:
6858 /* For division and modulus, type can't be unsigned, as e.g.
6859 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6860 For signed types, even with wrapping overflow, this is fine. */
6861 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6862 break;
6863 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6864 != 0)
6865 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6866 break;
6868 case MIN_EXPR: case MAX_EXPR:
6869 /* If widening the type changes the signedness, then we can't perform
6870 this optimization as that changes the result. */
6871 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6872 break;
6874 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6875 sub_strict_overflow_p = false;
6876 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6877 &sub_strict_overflow_p)) != 0
6878 && (t2 = extract_muldiv (op1, c, code, wide_type,
6879 &sub_strict_overflow_p)) != 0)
6881 if (tree_int_cst_sgn (c) < 0)
6882 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6883 if (sub_strict_overflow_p)
6884 *strict_overflow_p = true;
6885 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6886 fold_convert (ctype, t2));
6888 break;
6890 case LSHIFT_EXPR: case RSHIFT_EXPR:
6891 /* If the second operand is constant, this is a multiplication
6892 or floor division, by a power of two, so we can treat it that
6893 way unless the multiplier or divisor overflows. Signed
6894 left-shift overflow is implementation-defined rather than
6895 undefined in C90, so do not convert signed left shift into
6896 multiplication. */
6897 if (TREE_CODE (op1) == INTEGER_CST
6898 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6899 /* const_binop may not detect overflow correctly,
6900 so check for it explicitly here. */
6901 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6902 wi::to_wide (op1))
6903 && (t1 = fold_convert (ctype,
6904 const_binop (LSHIFT_EXPR, size_one_node,
6905 op1))) != 0
6906 && !TREE_OVERFLOW (t1))
6907 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6908 ? MULT_EXPR : FLOOR_DIV_EXPR,
6909 ctype,
6910 fold_convert (ctype, op0),
6911 t1),
6912 c, code, wide_type, strict_overflow_p);
6913 break;
6915 case PLUS_EXPR: case MINUS_EXPR:
6916 /* See if we can eliminate the operation on both sides. If we can, we
6917 can return a new PLUS or MINUS. If we can't, the only remaining
6918 cases where we can do anything are if the second operand is a
6919 constant. */
6920 sub_strict_overflow_p = false;
6921 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6922 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6923 if (t1 != 0 && t2 != 0
6924 && TYPE_OVERFLOW_WRAPS (ctype)
6925 && (code == MULT_EXPR
6926 /* If not multiplication, we can only do this if both operands
6927 are divisible by c. */
6928 || (multiple_of_p (ctype, op0, c)
6929 && multiple_of_p (ctype, op1, c))))
6931 if (sub_strict_overflow_p)
6932 *strict_overflow_p = true;
6933 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6934 fold_convert (ctype, t2));
6937 /* If this was a subtraction, negate OP1 and set it to be an addition.
6938 This simplifies the logic below. */
6939 if (tcode == MINUS_EXPR)
6941 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6942 /* If OP1 was not easily negatable, the constant may be OP0. */
6943 if (TREE_CODE (op0) == INTEGER_CST)
6945 std::swap (op0, op1);
6946 std::swap (t1, t2);
6950 if (TREE_CODE (op1) != INTEGER_CST)
6951 break;
6953 /* If either OP1 or C are negative, this optimization is not safe for
6954 some of the division and remainder types while for others we need
6955 to change the code. */
6956 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6958 if (code == CEIL_DIV_EXPR)
6959 code = FLOOR_DIV_EXPR;
6960 else if (code == FLOOR_DIV_EXPR)
6961 code = CEIL_DIV_EXPR;
6962 else if (code != MULT_EXPR
6963 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6964 break;
6967 /* If it's a multiply or a division/modulus operation of a multiple
6968 of our constant, do the operation and verify it doesn't overflow. */
6969 if (code == MULT_EXPR
6970 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6971 TYPE_SIGN (type)))
6973 op1 = const_binop (code, fold_convert (ctype, op1),
6974 fold_convert (ctype, c));
6975 /* We allow the constant to overflow with wrapping semantics. */
6976 if (op1 == 0
6977 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6978 break;
6980 else
6981 break;
6983 /* If we have an unsigned type, we cannot widen the operation since it
6984 will change the result if the original computation overflowed. */
6985 if (TYPE_UNSIGNED (ctype) && ctype != type)
6986 break;
6988 /* The last case is if we are a multiply. In that case, we can
6989 apply the distributive law to commute the multiply and addition
6990 if the multiplication of the constants doesn't overflow
6991 and overflow is defined. With undefined overflow
6992 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6993 But fold_plusminus_mult_expr would factor back any power-of-two
6994 value so do not distribute in the first place in this case. */
6995 if (code == MULT_EXPR
6996 && TYPE_OVERFLOW_WRAPS (ctype)
6997 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
6998 return fold_build2 (tcode, ctype,
6999 fold_build2 (code, ctype,
7000 fold_convert (ctype, op0),
7001 fold_convert (ctype, c)),
7002 op1);
7004 break;
7006 case MULT_EXPR:
7007 /* We have a special case here if we are doing something like
7008 (C * 8) % 4 since we know that's zero. */
7009 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
7010 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
7011 /* If the multiplication can overflow we cannot optimize this. */
7012 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
7013 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
7014 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7015 TYPE_SIGN (type)))
7017 *strict_overflow_p = true;
7018 return omit_one_operand (type, integer_zero_node, op0);
7021 /* ... fall through ... */
7023 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
7024 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
7025 /* If we can extract our operation from the LHS, do so and return a
7026 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
7027 do something only if the second operand is a constant. */
7028 if (same_p
7029 && TYPE_OVERFLOW_WRAPS (ctype)
7030 && (t1 = extract_muldiv (op0, c, code, wide_type,
7031 strict_overflow_p)) != 0)
7032 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7033 fold_convert (ctype, op1));
7034 else if (tcode == MULT_EXPR && code == MULT_EXPR
7035 && TYPE_OVERFLOW_WRAPS (ctype)
7036 && (t1 = extract_muldiv (op1, c, code, wide_type,
7037 strict_overflow_p)) != 0)
7038 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7039 fold_convert (ctype, t1));
7040 else if (TREE_CODE (op1) != INTEGER_CST)
7041 return 0;
7043 /* If these are the same operation types, we can associate them
7044 assuming no overflow. */
7045 if (tcode == code)
7047 bool overflow_p = false;
7048 wi::overflow_type overflow_mul;
7049 signop sign = TYPE_SIGN (ctype);
7050 unsigned prec = TYPE_PRECISION (ctype);
7051 wide_int mul = wi::mul (wi::to_wide (op1, prec),
7052 wi::to_wide (c, prec),
7053 sign, &overflow_mul);
7054 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
7055 if (overflow_mul
7056 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
7057 overflow_p = true;
7058 if (!overflow_p)
7059 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7060 wide_int_to_tree (ctype, mul));
7063 /* If these operations "cancel" each other, we have the main
7064 optimizations of this pass, which occur when either constant is a
7065 multiple of the other, in which case we replace this with either an
7066 operation or CODE or TCODE.
7068 If we have an unsigned type, we cannot do this since it will change
7069 the result if the original computation overflowed. */
7070 if (TYPE_OVERFLOW_UNDEFINED (ctype)
7071 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
7072 || (tcode == MULT_EXPR
7073 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
7074 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
7075 && code != MULT_EXPR)))
7077 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7078 TYPE_SIGN (type)))
7080 if (TYPE_OVERFLOW_UNDEFINED (ctype))
7081 *strict_overflow_p = true;
7082 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7083 fold_convert (ctype,
7084 const_binop (TRUNC_DIV_EXPR,
7085 op1, c)));
7087 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
7088 TYPE_SIGN (type)))
7090 if (TYPE_OVERFLOW_UNDEFINED (ctype))
7091 *strict_overflow_p = true;
7092 return fold_build2 (code, ctype, fold_convert (ctype, op0),
7093 fold_convert (ctype,
7094 const_binop (TRUNC_DIV_EXPR,
7095 c, op1)));
7098 break;
7100 default:
7101 break;
7104 return 0;
7107 /* Return a node which has the indicated constant VALUE (either 0 or
7108 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7109 and is of the indicated TYPE. */
7111 tree
7112 constant_boolean_node (bool value, tree type)
7114 if (type == integer_type_node)
7115 return value ? integer_one_node : integer_zero_node;
7116 else if (type == boolean_type_node)
7117 return value ? boolean_true_node : boolean_false_node;
7118 else if (TREE_CODE (type) == VECTOR_TYPE)
7119 return build_vector_from_val (type,
7120 build_int_cst (TREE_TYPE (type),
7121 value ? -1 : 0));
7122 else
7123 return fold_convert (type, value ? integer_one_node : integer_zero_node);
7127 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7128 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7129 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7130 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7131 COND is the first argument to CODE; otherwise (as in the example
7132 given here), it is the second argument. TYPE is the type of the
7133 original expression. Return NULL_TREE if no simplification is
7134 possible. */
7136 static tree
7137 fold_binary_op_with_conditional_arg (location_t loc,
7138 enum tree_code code,
7139 tree type, tree op0, tree op1,
7140 tree cond, tree arg, int cond_first_p)
7142 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7143 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7144 tree test, true_value, false_value;
7145 tree lhs = NULL_TREE;
7146 tree rhs = NULL_TREE;
7147 enum tree_code cond_code = COND_EXPR;
7149 /* Do not move possibly trapping operations into the conditional as this
7150 pessimizes code and causes gimplification issues when applied late. */
7151 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7152 ANY_INTEGRAL_TYPE_P (type)
7153 && TYPE_OVERFLOW_TRAPS (type), op1))
7154 return NULL_TREE;
7156 if (TREE_CODE (cond) == COND_EXPR
7157 || TREE_CODE (cond) == VEC_COND_EXPR)
7159 test = TREE_OPERAND (cond, 0);
7160 true_value = TREE_OPERAND (cond, 1);
7161 false_value = TREE_OPERAND (cond, 2);
7162 /* If this operand throws an expression, then it does not make
7163 sense to try to perform a logical or arithmetic operation
7164 involving it. */
7165 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7166 lhs = true_value;
7167 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7168 rhs = false_value;
7170 else if (!(TREE_CODE (type) != VECTOR_TYPE
7171 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
7173 tree testtype = TREE_TYPE (cond);
7174 test = cond;
7175 true_value = constant_boolean_node (true, testtype);
7176 false_value = constant_boolean_node (false, testtype);
7178 else
7179 /* Detect the case of mixing vector and scalar types - bail out. */
7180 return NULL_TREE;
7182 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
7183 cond_code = VEC_COND_EXPR;
7185 /* This transformation is only worthwhile if we don't have to wrap ARG
7186 in a SAVE_EXPR and the operation can be simplified without recursing
7187 on at least one of the branches once its pushed inside the COND_EXPR. */
7188 if (!TREE_CONSTANT (arg)
7189 && (TREE_SIDE_EFFECTS (arg)
7190 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7191 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7192 return NULL_TREE;
7194 arg = fold_convert_loc (loc, arg_type, arg);
7195 if (lhs == 0)
7197 true_value = fold_convert_loc (loc, cond_type, true_value);
7198 if (cond_first_p)
7199 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7200 else
7201 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7203 if (rhs == 0)
7205 false_value = fold_convert_loc (loc, cond_type, false_value);
7206 if (cond_first_p)
7207 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7208 else
7209 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7212 /* Check that we have simplified at least one of the branches. */
7213 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7214 return NULL_TREE;
7216 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7220 /* Subroutine of fold() that checks for the addition of ARG +/- 0.0.
7222 If !NEGATE, return true if ZERO_ARG is +/-0.0 and, for all ARG of
7223 type TYPE, ARG + ZERO_ARG is the same as ARG. If NEGATE, return true
7224 if ARG - ZERO_ARG is the same as X.
7226 If ARG is NULL, check for any value of type TYPE.
7228 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7229 and finite. The problematic cases are when X is zero, and its mode
7230 has signed zeros. In the case of rounding towards -infinity,
7231 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7232 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7234 bool
7235 fold_real_zero_addition_p (const_tree type, const_tree arg,
7236 const_tree zero_arg, int negate)
7238 if (!real_zerop (zero_arg))
7239 return false;
7241 /* Don't allow the fold with -fsignaling-nans. */
7242 if (arg ? tree_expr_maybe_signaling_nan_p (arg) : HONOR_SNANS (type))
7243 return false;
7245 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7246 if (!HONOR_SIGNED_ZEROS (type))
7247 return true;
7249 /* There is no case that is safe for all rounding modes. */
7250 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7251 return false;
7253 /* In a vector or complex, we would need to check the sign of all zeros. */
7254 if (TREE_CODE (zero_arg) == VECTOR_CST)
7255 zero_arg = uniform_vector_p (zero_arg);
7256 if (!zero_arg || TREE_CODE (zero_arg) != REAL_CST)
7257 return false;
7259 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7260 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (zero_arg)))
7261 negate = !negate;
7263 /* The mode has signed zeros, and we have to honor their sign.
7264 In this situation, there are only two cases we can return true for.
7265 (i) X - 0 is the same as X with default rounding.
7266 (ii) X + 0 is X when X can't possibly be -0.0. */
7267 return negate || (arg && !tree_expr_maybe_real_minus_zero_p (arg));
7270 /* Subroutine of match.pd that optimizes comparisons of a division by
7271 a nonzero integer constant against an integer constant, i.e.
7272 X/C1 op C2.
7274 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7275 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7277 enum tree_code
7278 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7279 tree *hi, bool *neg_overflow)
7281 tree prod, tmp, type = TREE_TYPE (c1);
7282 signop sign = TYPE_SIGN (type);
7283 wi::overflow_type overflow;
7285 /* We have to do this the hard way to detect unsigned overflow.
7286 prod = int_const_binop (MULT_EXPR, c1, c2); */
7287 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7288 prod = force_fit_type (type, val, -1, overflow);
7289 *neg_overflow = false;
7291 if (sign == UNSIGNED)
7293 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7294 *lo = prod;
7296 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7297 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7298 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7300 else if (tree_int_cst_sgn (c1) >= 0)
7302 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7303 switch (tree_int_cst_sgn (c2))
7305 case -1:
7306 *neg_overflow = true;
7307 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7308 *hi = prod;
7309 break;
7311 case 0:
7312 *lo = fold_negate_const (tmp, type);
7313 *hi = tmp;
7314 break;
7316 case 1:
7317 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7318 *lo = prod;
7319 break;
7321 default:
7322 gcc_unreachable ();
7325 else
7327 /* A negative divisor reverses the relational operators. */
7328 code = swap_tree_comparison (code);
7330 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7331 switch (tree_int_cst_sgn (c2))
7333 case -1:
7334 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7335 *lo = prod;
7336 break;
7338 case 0:
7339 *hi = fold_negate_const (tmp, type);
7340 *lo = tmp;
7341 break;
7343 case 1:
7344 *neg_overflow = true;
7345 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7346 *hi = prod;
7347 break;
7349 default:
7350 gcc_unreachable ();
7354 if (code != EQ_EXPR && code != NE_EXPR)
7355 return code;
7357 if (TREE_OVERFLOW (*lo)
7358 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7359 *lo = NULL_TREE;
7360 if (TREE_OVERFLOW (*hi)
7361 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7362 *hi = NULL_TREE;
7364 return code;
7368 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7369 equality/inequality test, then return a simplified form of the test
7370 using a sign testing. Otherwise return NULL. TYPE is the desired
7371 result type. */
7373 static tree
7374 fold_single_bit_test_into_sign_test (location_t loc,
7375 enum tree_code code, tree arg0, tree arg1,
7376 tree result_type)
7378 /* If this is testing a single bit, we can optimize the test. */
7379 if ((code == NE_EXPR || code == EQ_EXPR)
7380 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7381 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7383 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7384 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7385 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7387 if (arg00 != NULL_TREE
7388 /* This is only a win if casting to a signed type is cheap,
7389 i.e. when arg00's type is not a partial mode. */
7390 && type_has_mode_precision_p (TREE_TYPE (arg00)))
7392 tree stype = signed_type_for (TREE_TYPE (arg00));
7393 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7394 result_type,
7395 fold_convert_loc (loc, stype, arg00),
7396 build_int_cst (stype, 0));
7400 return NULL_TREE;
7403 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7404 equality/inequality test, then return a simplified form of
7405 the test using shifts and logical operations. Otherwise return
7406 NULL. TYPE is the desired result type. */
7408 tree
7409 fold_single_bit_test (location_t loc, enum tree_code code,
7410 tree arg0, tree arg1, tree result_type)
7412 /* If this is testing a single bit, we can optimize the test. */
7413 if ((code == NE_EXPR || code == EQ_EXPR)
7414 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7415 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7417 tree inner = TREE_OPERAND (arg0, 0);
7418 tree type = TREE_TYPE (arg0);
7419 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7420 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
7421 int ops_unsigned;
7422 tree signed_type, unsigned_type, intermediate_type;
7423 tree tem, one;
7425 /* First, see if we can fold the single bit test into a sign-bit
7426 test. */
7427 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7428 result_type);
7429 if (tem)
7430 return tem;
7432 /* Otherwise we have (A & C) != 0 where C is a single bit,
7433 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7434 Similarly for (A & C) == 0. */
7436 /* If INNER is a right shift of a constant and it plus BITNUM does
7437 not overflow, adjust BITNUM and INNER. */
7438 if (TREE_CODE (inner) == RSHIFT_EXPR
7439 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7440 && bitnum < TYPE_PRECISION (type)
7441 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
7442 TYPE_PRECISION (type) - bitnum))
7444 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
7445 inner = TREE_OPERAND (inner, 0);
7448 /* If we are going to be able to omit the AND below, we must do our
7449 operations as unsigned. If we must use the AND, we have a choice.
7450 Normally unsigned is faster, but for some machines signed is. */
7451 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
7452 && !flag_syntax_only) ? 0 : 1;
7454 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7455 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7456 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7457 inner = fold_convert_loc (loc, intermediate_type, inner);
7459 if (bitnum != 0)
7460 inner = build2 (RSHIFT_EXPR, intermediate_type,
7461 inner, size_int (bitnum));
7463 one = build_int_cst (intermediate_type, 1);
7465 if (code == EQ_EXPR)
7466 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7468 /* Put the AND last so it can combine with more things. */
7469 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7471 /* Make sure to return the proper type. */
7472 inner = fold_convert_loc (loc, result_type, inner);
7474 return inner;
7476 return NULL_TREE;
7479 /* Test whether it is preferable to swap two operands, ARG0 and
7480 ARG1, for example because ARG0 is an integer constant and ARG1
7481 isn't. */
7483 bool
7484 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7486 if (CONSTANT_CLASS_P (arg1))
7487 return 0;
7488 if (CONSTANT_CLASS_P (arg0))
7489 return 1;
7491 STRIP_NOPS (arg0);
7492 STRIP_NOPS (arg1);
7494 if (TREE_CONSTANT (arg1))
7495 return 0;
7496 if (TREE_CONSTANT (arg0))
7497 return 1;
7499 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7500 for commutative and comparison operators. Ensuring a canonical
7501 form allows the optimizers to find additional redundancies without
7502 having to explicitly check for both orderings. */
7503 if (TREE_CODE (arg0) == SSA_NAME
7504 && TREE_CODE (arg1) == SSA_NAME
7505 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7506 return 1;
7508 /* Put SSA_NAMEs last. */
7509 if (TREE_CODE (arg1) == SSA_NAME)
7510 return 0;
7511 if (TREE_CODE (arg0) == SSA_NAME)
7512 return 1;
7514 /* Put variables last. */
7515 if (DECL_P (arg1))
7516 return 0;
7517 if (DECL_P (arg0))
7518 return 1;
7520 return 0;
7524 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7525 means A >= Y && A != MAX, but in this case we know that
7526 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7528 static tree
7529 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7531 tree a, typea, type = TREE_TYPE (bound), a1, diff, y;
7533 if (TREE_CODE (bound) == LT_EXPR)
7534 a = TREE_OPERAND (bound, 0);
7535 else if (TREE_CODE (bound) == GT_EXPR)
7536 a = TREE_OPERAND (bound, 1);
7537 else
7538 return NULL_TREE;
7540 typea = TREE_TYPE (a);
7541 if (!INTEGRAL_TYPE_P (typea)
7542 && !POINTER_TYPE_P (typea))
7543 return NULL_TREE;
7545 if (TREE_CODE (ineq) == LT_EXPR)
7547 a1 = TREE_OPERAND (ineq, 1);
7548 y = TREE_OPERAND (ineq, 0);
7550 else if (TREE_CODE (ineq) == GT_EXPR)
7552 a1 = TREE_OPERAND (ineq, 0);
7553 y = TREE_OPERAND (ineq, 1);
7555 else
7556 return NULL_TREE;
7558 if (TREE_TYPE (a1) != typea)
7559 return NULL_TREE;
7561 if (POINTER_TYPE_P (typea))
7563 /* Convert the pointer types into integer before taking the difference. */
7564 tree ta = fold_convert_loc (loc, ssizetype, a);
7565 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7566 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7568 else
7569 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7571 if (!diff || !integer_onep (diff))
7572 return NULL_TREE;
7574 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7577 /* Fold a sum or difference of at least one multiplication.
7578 Returns the folded tree or NULL if no simplification could be made. */
7580 static tree
7581 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7582 tree arg0, tree arg1)
7584 tree arg00, arg01, arg10, arg11;
7585 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7587 /* (A * C) +- (B * C) -> (A+-B) * C.
7588 (A * C) +- A -> A * (C+-1).
7589 We are most concerned about the case where C is a constant,
7590 but other combinations show up during loop reduction. Since
7591 it is not difficult, try all four possibilities. */
7593 if (TREE_CODE (arg0) == MULT_EXPR)
7595 arg00 = TREE_OPERAND (arg0, 0);
7596 arg01 = TREE_OPERAND (arg0, 1);
7598 else if (TREE_CODE (arg0) == INTEGER_CST)
7600 arg00 = build_one_cst (type);
7601 arg01 = arg0;
7603 else
7605 /* We cannot generate constant 1 for fract. */
7606 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7607 return NULL_TREE;
7608 arg00 = arg0;
7609 arg01 = build_one_cst (type);
7611 if (TREE_CODE (arg1) == MULT_EXPR)
7613 arg10 = TREE_OPERAND (arg1, 0);
7614 arg11 = TREE_OPERAND (arg1, 1);
7616 else if (TREE_CODE (arg1) == INTEGER_CST)
7618 arg10 = build_one_cst (type);
7619 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7620 the purpose of this canonicalization. */
7621 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7622 && negate_expr_p (arg1)
7623 && code == PLUS_EXPR)
7625 arg11 = negate_expr (arg1);
7626 code = MINUS_EXPR;
7628 else
7629 arg11 = arg1;
7631 else
7633 /* We cannot generate constant 1 for fract. */
7634 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7635 return NULL_TREE;
7636 arg10 = arg1;
7637 arg11 = build_one_cst (type);
7639 same = NULL_TREE;
7641 /* Prefer factoring a common non-constant. */
7642 if (operand_equal_p (arg00, arg10, 0))
7643 same = arg00, alt0 = arg01, alt1 = arg11;
7644 else if (operand_equal_p (arg01, arg11, 0))
7645 same = arg01, alt0 = arg00, alt1 = arg10;
7646 else if (operand_equal_p (arg00, arg11, 0))
7647 same = arg00, alt0 = arg01, alt1 = arg10;
7648 else if (operand_equal_p (arg01, arg10, 0))
7649 same = arg01, alt0 = arg00, alt1 = arg11;
7651 /* No identical multiplicands; see if we can find a common
7652 power-of-two factor in non-power-of-two multiplies. This
7653 can help in multi-dimensional array access. */
7654 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7656 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7657 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7658 HOST_WIDE_INT tmp;
7659 bool swap = false;
7660 tree maybe_same;
7662 /* Move min of absolute values to int11. */
7663 if (absu_hwi (int01) < absu_hwi (int11))
7665 tmp = int01, int01 = int11, int11 = tmp;
7666 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7667 maybe_same = arg01;
7668 swap = true;
7670 else
7671 maybe_same = arg11;
7673 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7674 if (factor > 1
7675 && pow2p_hwi (factor)
7676 && (int01 & (factor - 1)) == 0
7677 /* The remainder should not be a constant, otherwise we
7678 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7679 increased the number of multiplications necessary. */
7680 && TREE_CODE (arg10) != INTEGER_CST)
7682 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7683 build_int_cst (TREE_TYPE (arg00),
7684 int01 / int11));
7685 alt1 = arg10;
7686 same = maybe_same;
7687 if (swap)
7688 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7692 if (!same)
7693 return NULL_TREE;
7695 if (! ANY_INTEGRAL_TYPE_P (type)
7696 || TYPE_OVERFLOW_WRAPS (type)
7697 /* We are neither factoring zero nor minus one. */
7698 || TREE_CODE (same) == INTEGER_CST)
7699 return fold_build2_loc (loc, MULT_EXPR, type,
7700 fold_build2_loc (loc, code, type,
7701 fold_convert_loc (loc, type, alt0),
7702 fold_convert_loc (loc, type, alt1)),
7703 fold_convert_loc (loc, type, same));
7705 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7706 same may be minus one and thus the multiplication may overflow. Perform
7707 the sum operation in an unsigned type. */
7708 tree utype = unsigned_type_for (type);
7709 tree tem = fold_build2_loc (loc, code, utype,
7710 fold_convert_loc (loc, utype, alt0),
7711 fold_convert_loc (loc, utype, alt1));
7712 /* If the sum evaluated to a constant that is not -INF the multiplication
7713 cannot overflow. */
7714 if (TREE_CODE (tem) == INTEGER_CST
7715 && (wi::to_wide (tem)
7716 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7717 return fold_build2_loc (loc, MULT_EXPR, type,
7718 fold_convert (type, tem), same);
7720 /* Do not resort to unsigned multiplication because
7721 we lose the no-overflow property of the expression. */
7722 return NULL_TREE;
7725 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7726 specified by EXPR into the buffer PTR of length LEN bytes.
7727 Return the number of bytes placed in the buffer, or zero
7728 upon failure. */
7730 static int
7731 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7733 tree type = TREE_TYPE (expr);
7734 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7735 int byte, offset, word, words;
7736 unsigned char value;
7738 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7739 return 0;
7740 if (off == -1)
7741 off = 0;
7743 if (ptr == NULL)
7744 /* Dry run. */
7745 return MIN (len, total_bytes - off);
7747 words = total_bytes / UNITS_PER_WORD;
7749 for (byte = 0; byte < total_bytes; byte++)
7751 int bitpos = byte * BITS_PER_UNIT;
7752 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7753 number of bytes. */
7754 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7756 if (total_bytes > UNITS_PER_WORD)
7758 word = byte / UNITS_PER_WORD;
7759 if (WORDS_BIG_ENDIAN)
7760 word = (words - 1) - word;
7761 offset = word * UNITS_PER_WORD;
7762 if (BYTES_BIG_ENDIAN)
7763 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7764 else
7765 offset += byte % UNITS_PER_WORD;
7767 else
7768 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7769 if (offset >= off && offset - off < len)
7770 ptr[offset - off] = value;
7772 return MIN (len, total_bytes - off);
7776 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7777 specified by EXPR into the buffer PTR of length LEN bytes.
7778 Return the number of bytes placed in the buffer, or zero
7779 upon failure. */
7781 static int
7782 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7784 tree type = TREE_TYPE (expr);
7785 scalar_mode mode = SCALAR_TYPE_MODE (type);
7786 int total_bytes = GET_MODE_SIZE (mode);
7787 FIXED_VALUE_TYPE value;
7788 tree i_value, i_type;
7790 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7791 return 0;
7793 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7795 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7796 return 0;
7798 value = TREE_FIXED_CST (expr);
7799 i_value = double_int_to_tree (i_type, value.data);
7801 return native_encode_int (i_value, ptr, len, off);
7805 /* Subroutine of native_encode_expr. Encode the REAL_CST
7806 specified by EXPR into the buffer PTR of length LEN bytes.
7807 Return the number of bytes placed in the buffer, or zero
7808 upon failure. */
7810 static int
7811 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7813 tree type = TREE_TYPE (expr);
7814 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7815 int byte, offset, word, words, bitpos;
7816 unsigned char value;
7818 /* There are always 32 bits in each long, no matter the size of
7819 the hosts long. We handle floating point representations with
7820 up to 192 bits. */
7821 long tmp[6];
7823 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7824 return 0;
7825 if (off == -1)
7826 off = 0;
7828 if (ptr == NULL)
7829 /* Dry run. */
7830 return MIN (len, total_bytes - off);
7832 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7834 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7836 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7837 bitpos += BITS_PER_UNIT)
7839 byte = (bitpos / BITS_PER_UNIT) & 3;
7840 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7842 if (UNITS_PER_WORD < 4)
7844 word = byte / UNITS_PER_WORD;
7845 if (WORDS_BIG_ENDIAN)
7846 word = (words - 1) - word;
7847 offset = word * UNITS_PER_WORD;
7848 if (BYTES_BIG_ENDIAN)
7849 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7850 else
7851 offset += byte % UNITS_PER_WORD;
7853 else
7855 offset = byte;
7856 if (BYTES_BIG_ENDIAN)
7858 /* Reverse bytes within each long, or within the entire float
7859 if it's smaller than a long (for HFmode). */
7860 offset = MIN (3, total_bytes - 1) - offset;
7861 gcc_assert (offset >= 0);
7864 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7865 if (offset >= off
7866 && offset - off < len)
7867 ptr[offset - off] = value;
7869 return MIN (len, total_bytes - off);
7872 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7873 specified by EXPR into the buffer PTR of length LEN bytes.
7874 Return the number of bytes placed in the buffer, or zero
7875 upon failure. */
7877 static int
7878 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7880 int rsize, isize;
7881 tree part;
7883 part = TREE_REALPART (expr);
7884 rsize = native_encode_expr (part, ptr, len, off);
7885 if (off == -1 && rsize == 0)
7886 return 0;
7887 part = TREE_IMAGPART (expr);
7888 if (off != -1)
7889 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7890 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7891 len - rsize, off);
7892 if (off == -1 && isize != rsize)
7893 return 0;
7894 return rsize + isize;
7897 /* Like native_encode_vector, but only encode the first COUNT elements.
7898 The other arguments are as for native_encode_vector. */
7900 static int
7901 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7902 int off, unsigned HOST_WIDE_INT count)
7904 tree itype = TREE_TYPE (TREE_TYPE (expr));
7905 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7906 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7908 /* This is the only case in which elements can be smaller than a byte.
7909 Element 0 is always in the lsb of the containing byte. */
7910 unsigned int elt_bits = TYPE_PRECISION (itype);
7911 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7912 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7913 return 0;
7915 if (off == -1)
7916 off = 0;
7918 /* Zero the buffer and then set bits later where necessary. */
7919 int extract_bytes = MIN (len, total_bytes - off);
7920 if (ptr)
7921 memset (ptr, 0, extract_bytes);
7923 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7924 unsigned int first_elt = off * elts_per_byte;
7925 unsigned int extract_elts = extract_bytes * elts_per_byte;
7926 for (unsigned int i = 0; i < extract_elts; ++i)
7928 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7929 if (TREE_CODE (elt) != INTEGER_CST)
7930 return 0;
7932 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7934 unsigned int bit = i * elt_bits;
7935 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7938 return extract_bytes;
7941 int offset = 0;
7942 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7943 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7945 if (off >= size)
7947 off -= size;
7948 continue;
7950 tree elem = VECTOR_CST_ELT (expr, i);
7951 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7952 len - offset, off);
7953 if ((off == -1 && res != size) || res == 0)
7954 return 0;
7955 offset += res;
7956 if (offset >= len)
7957 return (off == -1 && i < count - 1) ? 0 : offset;
7958 if (off != -1)
7959 off = 0;
7961 return offset;
7964 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7965 specified by EXPR into the buffer PTR of length LEN bytes.
7966 Return the number of bytes placed in the buffer, or zero
7967 upon failure. */
7969 static int
7970 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7972 unsigned HOST_WIDE_INT count;
7973 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7974 return 0;
7975 return native_encode_vector_part (expr, ptr, len, off, count);
7979 /* Subroutine of native_encode_expr. Encode the STRING_CST
7980 specified by EXPR into the buffer PTR of length LEN bytes.
7981 Return the number of bytes placed in the buffer, or zero
7982 upon failure. */
7984 static int
7985 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7987 tree type = TREE_TYPE (expr);
7989 /* Wide-char strings are encoded in target byte-order so native
7990 encoding them is trivial. */
7991 if (BITS_PER_UNIT != CHAR_BIT
7992 || TREE_CODE (type) != ARRAY_TYPE
7993 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7994 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7995 return 0;
7997 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7998 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7999 return 0;
8000 if (off == -1)
8001 off = 0;
8002 len = MIN (total_bytes - off, len);
8003 if (ptr == NULL)
8004 /* Dry run. */;
8005 else
8007 int written = 0;
8008 if (off < TREE_STRING_LENGTH (expr))
8010 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
8011 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
8013 memset (ptr + written, 0, len - written);
8015 return len;
8019 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
8020 FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
8021 the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
8022 anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
8023 sufficient to encode the entire EXPR, or if OFF is out of bounds.
8024 Otherwise, start at byte offset OFF and encode at most LEN bytes.
8025 Return the number of bytes placed in the buffer, or zero upon failure. */
8028 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
8030 /* We don't support starting at negative offset and -1 is special. */
8031 if (off < -1)
8032 return 0;
8034 switch (TREE_CODE (expr))
8036 case INTEGER_CST:
8037 return native_encode_int (expr, ptr, len, off);
8039 case REAL_CST:
8040 return native_encode_real (expr, ptr, len, off);
8042 case FIXED_CST:
8043 return native_encode_fixed (expr, ptr, len, off);
8045 case COMPLEX_CST:
8046 return native_encode_complex (expr, ptr, len, off);
8048 case VECTOR_CST:
8049 return native_encode_vector (expr, ptr, len, off);
8051 case STRING_CST:
8052 return native_encode_string (expr, ptr, len, off);
8054 default:
8055 return 0;
8059 /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
8060 or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
8061 of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
8062 machine modes, we can't just use build_nonstandard_integer_type. */
8064 tree
8065 find_bitfield_repr_type (int fieldsize, int len)
8067 machine_mode mode;
8068 for (int pass = 0; pass < 2; pass++)
8070 enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
8071 FOR_EACH_MODE_IN_CLASS (mode, mclass)
8072 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8073 && known_eq (GET_MODE_PRECISION (mode),
8074 GET_MODE_BITSIZE (mode))
8075 && known_le (GET_MODE_SIZE (mode), len))
8077 tree ret = lang_hooks.types.type_for_mode (mode, 1);
8078 if (ret && TYPE_MODE (ret) == mode)
8079 return ret;
8083 for (int i = 0; i < NUM_INT_N_ENTS; i ++)
8084 if (int_n_enabled_p[i]
8085 && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
8086 && int_n_trees[i].unsigned_type)
8088 tree ret = int_n_trees[i].unsigned_type;
8089 mode = TYPE_MODE (ret);
8090 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8091 && known_eq (GET_MODE_PRECISION (mode),
8092 GET_MODE_BITSIZE (mode))
8093 && known_le (GET_MODE_SIZE (mode), len))
8094 return ret;
8097 return NULL_TREE;
8100 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8101 NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
8102 to be non-NULL and OFF zero), then in addition to filling the
8103 bytes pointed by PTR with the value also clear any bits pointed
8104 by MASK that are known to be initialized, keep them as is for
8105 e.g. uninitialized padding bits or uninitialized fields. */
8108 native_encode_initializer (tree init, unsigned char *ptr, int len,
8109 int off, unsigned char *mask)
8111 int r;
8113 /* We don't support starting at negative offset and -1 is special. */
8114 if (off < -1 || init == NULL_TREE)
8115 return 0;
8117 gcc_assert (mask == NULL || (off == 0 && ptr));
8119 STRIP_NOPS (init);
8120 switch (TREE_CODE (init))
8122 case VIEW_CONVERT_EXPR:
8123 case NON_LVALUE_EXPR:
8124 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
8125 mask);
8126 default:
8127 r = native_encode_expr (init, ptr, len, off);
8128 if (mask)
8129 memset (mask, 0, r);
8130 return r;
8131 case CONSTRUCTOR:
8132 tree type = TREE_TYPE (init);
8133 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
8134 if (total_bytes < 0)
8135 return 0;
8136 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8137 return 0;
8138 int o = off == -1 ? 0 : off;
8139 if (TREE_CODE (type) == ARRAY_TYPE)
8141 tree min_index;
8142 unsigned HOST_WIDE_INT cnt;
8143 HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
8144 constructor_elt *ce;
8146 if (!TYPE_DOMAIN (type)
8147 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
8148 return 0;
8150 fieldsize = int_size_in_bytes (TREE_TYPE (type));
8151 if (fieldsize <= 0)
8152 return 0;
8154 min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8155 if (ptr)
8156 memset (ptr, '\0', MIN (total_bytes - off, len));
8158 for (cnt = 0; ; cnt++)
8160 tree val = NULL_TREE, index = NULL_TREE;
8161 HOST_WIDE_INT pos = curpos, count = 0;
8162 bool full = false;
8163 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8165 val = ce->value;
8166 index = ce->index;
8168 else if (mask == NULL
8169 || CONSTRUCTOR_NO_CLEARING (init)
8170 || curpos >= total_bytes)
8171 break;
8172 else
8173 pos = total_bytes;
8175 if (index && TREE_CODE (index) == RANGE_EXPR)
8177 if (TREE_CODE (TREE_OPERAND (index, 0)) != INTEGER_CST
8178 || TREE_CODE (TREE_OPERAND (index, 1)) != INTEGER_CST)
8179 return 0;
8180 offset_int wpos
8181 = wi::sext (wi::to_offset (TREE_OPERAND (index, 0))
8182 - wi::to_offset (min_index),
8183 TYPE_PRECISION (sizetype));
8184 wpos *= fieldsize;
8185 if (!wi::fits_shwi_p (pos))
8186 return 0;
8187 pos = wpos.to_shwi ();
8188 offset_int wcount
8189 = wi::sext (wi::to_offset (TREE_OPERAND (index, 1))
8190 - wi::to_offset (TREE_OPERAND (index, 0)),
8191 TYPE_PRECISION (sizetype));
8192 if (!wi::fits_shwi_p (wcount))
8193 return 0;
8194 count = wcount.to_shwi ();
8196 else if (index)
8198 if (TREE_CODE (index) != INTEGER_CST)
8199 return 0;
8200 offset_int wpos
8201 = wi::sext (wi::to_offset (index)
8202 - wi::to_offset (min_index),
8203 TYPE_PRECISION (sizetype));
8204 wpos *= fieldsize;
8205 if (!wi::fits_shwi_p (wpos))
8206 return 0;
8207 pos = wpos.to_shwi ();
8210 if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
8212 if (valueinit == -1)
8214 tree zero = build_zero_cst (TREE_TYPE (type));
8215 r = native_encode_initializer (zero, ptr + curpos,
8216 fieldsize, 0,
8217 mask + curpos);
8218 if (TREE_CODE (zero) == CONSTRUCTOR)
8219 ggc_free (zero);
8220 if (!r)
8221 return 0;
8222 valueinit = curpos;
8223 curpos += fieldsize;
8225 while (curpos != pos)
8227 memcpy (ptr + curpos, ptr + valueinit, fieldsize);
8228 memcpy (mask + curpos, mask + valueinit, fieldsize);
8229 curpos += fieldsize;
8233 curpos = pos;
8234 if (val)
8237 if (off == -1
8238 || (curpos >= off
8239 && (curpos + fieldsize
8240 <= (HOST_WIDE_INT) off + len)))
8242 if (full)
8244 if (ptr)
8245 memcpy (ptr + (curpos - o), ptr + (pos - o),
8246 fieldsize);
8247 if (mask)
8248 memcpy (mask + curpos, mask + pos, fieldsize);
8250 else if (!native_encode_initializer (val,
8252 ? ptr + curpos - o
8253 : NULL,
8254 fieldsize,
8255 off == -1 ? -1
8256 : 0,
8257 mask
8258 ? mask + curpos
8259 : NULL))
8260 return 0;
8261 else
8263 full = true;
8264 pos = curpos;
8267 else if (curpos + fieldsize > off
8268 && curpos < (HOST_WIDE_INT) off + len)
8270 /* Partial overlap. */
8271 unsigned char *p = NULL;
8272 int no = 0;
8273 int l;
8274 gcc_assert (mask == NULL);
8275 if (curpos >= off)
8277 if (ptr)
8278 p = ptr + curpos - off;
8279 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8280 fieldsize);
8282 else
8284 p = ptr;
8285 no = off - curpos;
8286 l = len;
8288 if (!native_encode_initializer (val, p, l, no, NULL))
8289 return 0;
8291 curpos += fieldsize;
8293 while (count-- != 0);
8295 return MIN (total_bytes - off, len);
8297 else if (TREE_CODE (type) == RECORD_TYPE
8298 || TREE_CODE (type) == UNION_TYPE)
8300 unsigned HOST_WIDE_INT cnt;
8301 constructor_elt *ce;
8302 tree fld_base = TYPE_FIELDS (type);
8303 tree to_free = NULL_TREE;
8305 gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
8306 if (ptr != NULL)
8307 memset (ptr, '\0', MIN (total_bytes - o, len));
8308 for (cnt = 0; ; cnt++)
8310 tree val = NULL_TREE, field = NULL_TREE;
8311 HOST_WIDE_INT pos = 0, fieldsize;
8312 unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8314 if (to_free)
8316 ggc_free (to_free);
8317 to_free = NULL_TREE;
8320 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8322 val = ce->value;
8323 field = ce->index;
8324 if (field == NULL_TREE)
8325 return 0;
8327 pos = int_byte_position (field);
8328 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8329 continue;
8331 else if (mask == NULL
8332 || CONSTRUCTOR_NO_CLEARING (init))
8333 break;
8334 else
8335 pos = total_bytes;
8337 if (mask && !CONSTRUCTOR_NO_CLEARING (init))
8339 tree fld;
8340 for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
8342 if (TREE_CODE (fld) != FIELD_DECL)
8343 continue;
8344 if (fld == field)
8345 break;
8346 if (DECL_PADDING_P (fld))
8347 continue;
8348 if (DECL_SIZE_UNIT (fld) == NULL_TREE
8349 || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
8350 return 0;
8351 if (integer_zerop (DECL_SIZE_UNIT (fld)))
8352 continue;
8353 break;
8355 if (fld == NULL_TREE)
8357 if (ce == NULL)
8358 break;
8359 return 0;
8361 fld_base = DECL_CHAIN (fld);
8362 if (fld != field)
8364 cnt--;
8365 field = fld;
8366 pos = int_byte_position (field);
8367 val = build_zero_cst (TREE_TYPE (fld));
8368 if (TREE_CODE (val) == CONSTRUCTOR)
8369 to_free = val;
8373 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8374 && TYPE_DOMAIN (TREE_TYPE (field))
8375 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8377 if (mask || off != -1)
8378 return 0;
8379 if (val == NULL_TREE)
8380 continue;
8381 if (TREE_CODE (TREE_TYPE (val)) != ARRAY_TYPE)
8382 return 0;
8383 fieldsize = int_size_in_bytes (TREE_TYPE (val));
8384 if (fieldsize < 0
8385 || (int) fieldsize != fieldsize
8386 || (pos + fieldsize) > INT_MAX)
8387 return 0;
8388 if (pos + fieldsize > total_bytes)
8390 if (ptr != NULL && total_bytes < len)
8391 memset (ptr + total_bytes, '\0',
8392 MIN (pos + fieldsize, len) - total_bytes);
8393 total_bytes = pos + fieldsize;
8396 else
8398 if (DECL_SIZE_UNIT (field) == NULL_TREE
8399 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8400 return 0;
8401 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8403 if (fieldsize == 0)
8404 continue;
8406 if (DECL_BIT_FIELD (field))
8408 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8409 return 0;
8410 fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8411 bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8412 if (bpos % BITS_PER_UNIT)
8413 bpos %= BITS_PER_UNIT;
8414 else
8415 bpos = 0;
8416 fieldsize += bpos;
8417 epos = fieldsize % BITS_PER_UNIT;
8418 fieldsize += BITS_PER_UNIT - 1;
8419 fieldsize /= BITS_PER_UNIT;
8422 if (off != -1 && pos + fieldsize <= off)
8423 continue;
8425 if (val == NULL_TREE)
8426 continue;
8428 if (DECL_BIT_FIELD (field))
8430 /* FIXME: Handle PDP endian. */
8431 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8432 return 0;
8434 if (TREE_CODE (val) != INTEGER_CST)
8435 return 0;
8437 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8438 tree repr_type = NULL_TREE;
8439 HOST_WIDE_INT rpos = 0;
8440 if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8442 rpos = int_byte_position (repr);
8443 repr_type = TREE_TYPE (repr);
8445 else
8447 repr_type = find_bitfield_repr_type (fieldsize, len);
8448 if (repr_type == NULL_TREE)
8449 return 0;
8450 HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
8451 gcc_assert (repr_size > 0 && repr_size <= len);
8452 if (pos + repr_size <= o + len)
8453 rpos = pos;
8454 else
8456 rpos = o + len - repr_size;
8457 gcc_assert (rpos <= pos);
8461 if (rpos > pos)
8462 return 0;
8463 wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
8464 int diff = (TYPE_PRECISION (repr_type)
8465 - TYPE_PRECISION (TREE_TYPE (field)));
8466 HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8467 if (!BYTES_BIG_ENDIAN)
8468 w = wi::lshift (w, bitoff);
8469 else
8470 w = wi::lshift (w, diff - bitoff);
8471 val = wide_int_to_tree (repr_type, w);
8473 unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8474 / BITS_PER_UNIT + 1];
8475 int l = native_encode_int (val, buf, sizeof buf, 0);
8476 if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
8477 return 0;
8479 if (ptr == NULL)
8480 continue;
8482 /* If the bitfield does not start at byte boundary, handle
8483 the partial byte at the start. */
8484 if (bpos
8485 && (off == -1 || (pos >= off && len >= 1)))
8487 if (!BYTES_BIG_ENDIAN)
8489 int msk = (1 << bpos) - 1;
8490 buf[pos - rpos] &= ~msk;
8491 buf[pos - rpos] |= ptr[pos - o] & msk;
8492 if (mask)
8494 if (fieldsize > 1 || epos == 0)
8495 mask[pos] &= msk;
8496 else
8497 mask[pos] &= (msk | ~((1 << epos) - 1));
8500 else
8502 int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
8503 buf[pos - rpos] &= msk;
8504 buf[pos - rpos] |= ptr[pos - o] & ~msk;
8505 if (mask)
8507 if (fieldsize > 1 || epos == 0)
8508 mask[pos] &= ~msk;
8509 else
8510 mask[pos] &= (~msk
8511 | ((1 << (BITS_PER_UNIT - epos))
8512 - 1));
8516 /* If the bitfield does not end at byte boundary, handle
8517 the partial byte at the end. */
8518 if (epos
8519 && (off == -1
8520 || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8522 if (!BYTES_BIG_ENDIAN)
8524 int msk = (1 << epos) - 1;
8525 buf[pos - rpos + fieldsize - 1] &= msk;
8526 buf[pos - rpos + fieldsize - 1]
8527 |= ptr[pos + fieldsize - 1 - o] & ~msk;
8528 if (mask && (fieldsize > 1 || bpos == 0))
8529 mask[pos + fieldsize - 1] &= ~msk;
8531 else
8533 int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
8534 buf[pos - rpos + fieldsize - 1] &= ~msk;
8535 buf[pos - rpos + fieldsize - 1]
8536 |= ptr[pos + fieldsize - 1 - o] & msk;
8537 if (mask && (fieldsize > 1 || bpos == 0))
8538 mask[pos + fieldsize - 1] &= msk;
8541 if (off == -1
8542 || (pos >= off
8543 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8545 memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8546 if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
8547 memset (mask + pos + (bpos != 0), 0,
8548 fieldsize - (bpos != 0) - (epos != 0));
8550 else
8552 /* Partial overlap. */
8553 HOST_WIDE_INT fsz = fieldsize;
8554 gcc_assert (mask == NULL);
8555 if (pos < off)
8557 fsz -= (off - pos);
8558 pos = off;
8560 if (pos + fsz > (HOST_WIDE_INT) off + len)
8561 fsz = (HOST_WIDE_INT) off + len - pos;
8562 memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8564 continue;
8567 if (off == -1
8568 || (pos >= off
8569 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8571 int fldsize = fieldsize;
8572 if (off == -1)
8574 tree fld = DECL_CHAIN (field);
8575 while (fld)
8577 if (TREE_CODE (fld) == FIELD_DECL)
8578 break;
8579 fld = DECL_CHAIN (fld);
8581 if (fld == NULL_TREE)
8582 fldsize = len - pos;
8584 r = native_encode_initializer (val, ptr ? ptr + pos - o
8585 : NULL,
8586 fldsize,
8587 off == -1 ? -1 : 0,
8588 mask ? mask + pos : NULL);
8589 if (!r)
8590 return 0;
8591 if (off == -1
8592 && fldsize != fieldsize
8593 && r > fieldsize
8594 && pos + r > total_bytes)
8595 total_bytes = pos + r;
8597 else
8599 /* Partial overlap. */
8600 unsigned char *p = NULL;
8601 int no = 0;
8602 int l;
8603 gcc_assert (mask == NULL);
8604 if (pos >= off)
8606 if (ptr)
8607 p = ptr + pos - off;
8608 l = MIN ((HOST_WIDE_INT) off + len - pos,
8609 fieldsize);
8611 else
8613 p = ptr;
8614 no = off - pos;
8615 l = len;
8617 if (!native_encode_initializer (val, p, l, no, NULL))
8618 return 0;
8621 return MIN (total_bytes - off, len);
8623 return 0;
8628 /* Subroutine of native_interpret_expr. Interpret the contents of
8629 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8630 If the buffer cannot be interpreted, return NULL_TREE. */
8632 static tree
8633 native_interpret_int (tree type, const unsigned char *ptr, int len)
8635 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8637 if (total_bytes > len
8638 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8639 return NULL_TREE;
8641 wide_int result = wi::from_buffer (ptr, total_bytes);
8643 return wide_int_to_tree (type, result);
8647 /* Subroutine of native_interpret_expr. Interpret the contents of
8648 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8649 If the buffer cannot be interpreted, return NULL_TREE. */
8651 static tree
8652 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8654 scalar_mode mode = SCALAR_TYPE_MODE (type);
8655 int total_bytes = GET_MODE_SIZE (mode);
8656 double_int result;
8657 FIXED_VALUE_TYPE fixed_value;
8659 if (total_bytes > len
8660 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8661 return NULL_TREE;
8663 result = double_int::from_buffer (ptr, total_bytes);
8664 fixed_value = fixed_from_double_int (result, mode);
8666 return build_fixed (type, fixed_value);
8670 /* Subroutine of native_interpret_expr. Interpret the contents of
8671 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8672 If the buffer cannot be interpreted, return NULL_TREE. */
8674 tree
8675 native_interpret_real (tree type, const unsigned char *ptr, int len)
8677 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8678 int total_bytes = GET_MODE_SIZE (mode);
8679 unsigned char value;
8680 /* There are always 32 bits in each long, no matter the size of
8681 the hosts long. We handle floating point representations with
8682 up to 192 bits. */
8683 REAL_VALUE_TYPE r;
8684 long tmp[6];
8686 if (total_bytes > len || total_bytes > 24)
8687 return NULL_TREE;
8688 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8690 memset (tmp, 0, sizeof (tmp));
8691 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8692 bitpos += BITS_PER_UNIT)
8694 /* Both OFFSET and BYTE index within a long;
8695 bitpos indexes the whole float. */
8696 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8697 if (UNITS_PER_WORD < 4)
8699 int word = byte / UNITS_PER_WORD;
8700 if (WORDS_BIG_ENDIAN)
8701 word = (words - 1) - word;
8702 offset = word * UNITS_PER_WORD;
8703 if (BYTES_BIG_ENDIAN)
8704 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8705 else
8706 offset += byte % UNITS_PER_WORD;
8708 else
8710 offset = byte;
8711 if (BYTES_BIG_ENDIAN)
8713 /* Reverse bytes within each long, or within the entire float
8714 if it's smaller than a long (for HFmode). */
8715 offset = MIN (3, total_bytes - 1) - offset;
8716 gcc_assert (offset >= 0);
8719 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8721 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8724 real_from_target (&r, tmp, mode);
8725 return build_real (type, r);
8729 /* Subroutine of native_interpret_expr. Interpret the contents of
8730 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8731 If the buffer cannot be interpreted, return NULL_TREE. */
8733 static tree
8734 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8736 tree etype, rpart, ipart;
8737 int size;
8739 etype = TREE_TYPE (type);
8740 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8741 if (size * 2 > len)
8742 return NULL_TREE;
8743 rpart = native_interpret_expr (etype, ptr, size);
8744 if (!rpart)
8745 return NULL_TREE;
8746 ipart = native_interpret_expr (etype, ptr+size, size);
8747 if (!ipart)
8748 return NULL_TREE;
8749 return build_complex (type, rpart, ipart);
8752 /* Read a vector of type TYPE from the target memory image given by BYTES,
8753 which contains LEN bytes. The vector is known to be encodable using
8754 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8756 Return the vector on success, otherwise return null. */
8758 static tree
8759 native_interpret_vector_part (tree type, const unsigned char *bytes,
8760 unsigned int len, unsigned int npatterns,
8761 unsigned int nelts_per_pattern)
8763 tree elt_type = TREE_TYPE (type);
8764 if (VECTOR_BOOLEAN_TYPE_P (type)
8765 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8767 /* This is the only case in which elements can be smaller than a byte.
8768 Element 0 is always in the lsb of the containing byte. */
8769 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8770 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8771 return NULL_TREE;
8773 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8774 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8776 unsigned int bit_index = i * elt_bits;
8777 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8778 unsigned int lsb = bit_index % BITS_PER_UNIT;
8779 builder.quick_push (bytes[byte_index] & (1 << lsb)
8780 ? build_all_ones_cst (elt_type)
8781 : build_zero_cst (elt_type));
8783 return builder.build ();
8786 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8787 if (elt_bytes * npatterns * nelts_per_pattern > len)
8788 return NULL_TREE;
8790 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8791 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8793 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8794 if (!elt)
8795 return NULL_TREE;
8796 builder.quick_push (elt);
8797 bytes += elt_bytes;
8799 return builder.build ();
8802 /* Subroutine of native_interpret_expr. Interpret the contents of
8803 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8804 If the buffer cannot be interpreted, return NULL_TREE. */
8806 static tree
8807 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8809 tree etype;
8810 unsigned int size;
8811 unsigned HOST_WIDE_INT count;
8813 etype = TREE_TYPE (type);
8814 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8815 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
8816 || size * count > len)
8817 return NULL_TREE;
8819 return native_interpret_vector_part (type, ptr, len, count, 1);
8823 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8824 the buffer PTR of length LEN as a constant of type TYPE. For
8825 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8826 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8827 return NULL_TREE. */
8829 tree
8830 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8832 switch (TREE_CODE (type))
8834 case INTEGER_TYPE:
8835 case ENUMERAL_TYPE:
8836 case BOOLEAN_TYPE:
8837 case POINTER_TYPE:
8838 case REFERENCE_TYPE:
8839 case OFFSET_TYPE:
8840 return native_interpret_int (type, ptr, len);
8842 case REAL_TYPE:
8843 if (tree ret = native_interpret_real (type, ptr, len))
8845 /* For floating point values in composite modes, punt if this
8846 folding doesn't preserve bit representation. As the mode doesn't
8847 have fixed precision while GCC pretends it does, there could be
8848 valid values that GCC can't really represent accurately.
8849 See PR95450. Even for other modes, e.g. x86 XFmode can have some
8850 bit combinationations which GCC doesn't preserve. */
8851 unsigned char buf[24];
8852 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8853 int total_bytes = GET_MODE_SIZE (mode);
8854 if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
8855 || memcmp (ptr, buf, total_bytes) != 0)
8856 return NULL_TREE;
8857 return ret;
8859 return NULL_TREE;
8861 case FIXED_POINT_TYPE:
8862 return native_interpret_fixed (type, ptr, len);
8864 case COMPLEX_TYPE:
8865 return native_interpret_complex (type, ptr, len);
8867 case VECTOR_TYPE:
8868 return native_interpret_vector (type, ptr, len);
8870 default:
8871 return NULL_TREE;
8875 /* Returns true if we can interpret the contents of a native encoding
8876 as TYPE. */
8878 bool
8879 can_native_interpret_type_p (tree type)
8881 switch (TREE_CODE (type))
8883 case INTEGER_TYPE:
8884 case ENUMERAL_TYPE:
8885 case BOOLEAN_TYPE:
8886 case POINTER_TYPE:
8887 case REFERENCE_TYPE:
8888 case FIXED_POINT_TYPE:
8889 case REAL_TYPE:
8890 case COMPLEX_TYPE:
8891 case VECTOR_TYPE:
8892 case OFFSET_TYPE:
8893 return true;
8894 default:
8895 return false;
8899 /* Attempt to interpret aggregate of TYPE from bytes encoded in target
8900 byte order at PTR + OFF with LEN bytes. Does not handle unions. */
8902 tree
8903 native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
8904 int len)
8906 vec<constructor_elt, va_gc> *elts = NULL;
8907 if (TREE_CODE (type) == ARRAY_TYPE)
8909 HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
8910 if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
8911 return NULL_TREE;
8913 HOST_WIDE_INT cnt = 0;
8914 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
8916 if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
8917 return NULL_TREE;
8918 cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
8920 if (eltsz == 0)
8921 cnt = 0;
8922 HOST_WIDE_INT pos = 0;
8923 for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
8925 tree v = NULL_TREE;
8926 if (pos >= len || pos + eltsz > len)
8927 return NULL_TREE;
8928 if (can_native_interpret_type_p (TREE_TYPE (type)))
8930 v = native_interpret_expr (TREE_TYPE (type),
8931 ptr + off + pos, eltsz);
8932 if (v == NULL_TREE)
8933 return NULL_TREE;
8935 else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
8936 || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
8937 v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos,
8938 eltsz);
8939 if (v == NULL_TREE)
8940 return NULL_TREE;
8941 CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
8943 return build_constructor (type, elts);
8945 if (TREE_CODE (type) != RECORD_TYPE)
8946 return NULL_TREE;
8947 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8949 if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field))
8950 continue;
8951 tree fld = field;
8952 HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
8953 int diff = 0;
8954 tree v = NULL_TREE;
8955 if (DECL_BIT_FIELD (field))
8957 fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
8958 if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
8960 poly_int64 bitoffset;
8961 poly_uint64 field_offset, fld_offset;
8962 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8963 && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
8964 bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
8965 else
8966 bitoffset = 0;
8967 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8968 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
8969 diff = (TYPE_PRECISION (TREE_TYPE (fld))
8970 - TYPE_PRECISION (TREE_TYPE (field)));
8971 if (!bitoffset.is_constant (&bitoff)
8972 || bitoff < 0
8973 || bitoff > diff)
8974 return NULL_TREE;
8976 else
8978 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8979 return NULL_TREE;
8980 int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8981 int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8982 bpos %= BITS_PER_UNIT;
8983 fieldsize += bpos;
8984 fieldsize += BITS_PER_UNIT - 1;
8985 fieldsize /= BITS_PER_UNIT;
8986 tree repr_type = find_bitfield_repr_type (fieldsize, len);
8987 if (repr_type == NULL_TREE)
8988 return NULL_TREE;
8989 sz = int_size_in_bytes (repr_type);
8990 if (sz < 0 || sz > len)
8991 return NULL_TREE;
8992 pos = int_byte_position (field);
8993 if (pos < 0 || pos > len || pos + fieldsize > len)
8994 return NULL_TREE;
8995 HOST_WIDE_INT rpos;
8996 if (pos + sz <= len)
8997 rpos = pos;
8998 else
9000 rpos = len - sz;
9001 gcc_assert (rpos <= pos);
9003 bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
9004 pos = rpos;
9005 diff = (TYPE_PRECISION (repr_type)
9006 - TYPE_PRECISION (TREE_TYPE (field)));
9007 v = native_interpret_expr (repr_type, ptr + off + pos, sz);
9008 if (v == NULL_TREE)
9009 return NULL_TREE;
9010 fld = NULL_TREE;
9014 if (fld)
9016 sz = int_size_in_bytes (TREE_TYPE (fld));
9017 if (sz < 0 || sz > len)
9018 return NULL_TREE;
9019 tree byte_pos = byte_position (fld);
9020 if (!tree_fits_shwi_p (byte_pos))
9021 return NULL_TREE;
9022 pos = tree_to_shwi (byte_pos);
9023 if (pos < 0 || pos > len || pos + sz > len)
9024 return NULL_TREE;
9026 if (fld == NULL_TREE)
9027 /* Already handled above. */;
9028 else if (can_native_interpret_type_p (TREE_TYPE (fld)))
9030 v = native_interpret_expr (TREE_TYPE (fld),
9031 ptr + off + pos, sz);
9032 if (v == NULL_TREE)
9033 return NULL_TREE;
9035 else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
9036 || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
9037 v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz);
9038 if (v == NULL_TREE)
9039 return NULL_TREE;
9040 if (fld != field)
9042 if (TREE_CODE (v) != INTEGER_CST)
9043 return NULL_TREE;
9045 /* FIXME: Figure out how to handle PDP endian bitfields. */
9046 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
9047 return NULL_TREE;
9048 if (!BYTES_BIG_ENDIAN)
9049 v = wide_int_to_tree (TREE_TYPE (field),
9050 wi::lrshift (wi::to_wide (v), bitoff));
9051 else
9052 v = wide_int_to_tree (TREE_TYPE (field),
9053 wi::lrshift (wi::to_wide (v),
9054 diff - bitoff));
9056 CONSTRUCTOR_APPEND_ELT (elts, field, v);
9058 return build_constructor (type, elts);
9061 /* Routines for manipulation of native_encode_expr encoded data if the encoded
9062 or extracted constant positions and/or sizes aren't byte aligned. */
9064 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
9065 bits between adjacent elements. AMNT should be within
9066 [0, BITS_PER_UNIT).
9067 Example, AMNT = 2:
9068 00011111|11100000 << 2 = 01111111|10000000
9069 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
9071 void
9072 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
9073 unsigned int amnt)
9075 if (amnt == 0)
9076 return;
9078 unsigned char carry_over = 0U;
9079 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
9080 unsigned char clear_mask = (~0U) << amnt;
9082 for (unsigned int i = 0; i < sz; i++)
9084 unsigned prev_carry_over = carry_over;
9085 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
9087 ptr[i] <<= amnt;
9088 if (i != 0)
9090 ptr[i] &= clear_mask;
9091 ptr[i] |= prev_carry_over;
9096 /* Like shift_bytes_in_array_left but for big-endian.
9097 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
9098 bits between adjacent elements. AMNT should be within
9099 [0, BITS_PER_UNIT).
9100 Example, AMNT = 2:
9101 00011111|11100000 >> 2 = 00000111|11111000
9102 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
9104 void
9105 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
9106 unsigned int amnt)
9108 if (amnt == 0)
9109 return;
9111 unsigned char carry_over = 0U;
9112 unsigned char carry_mask = ~(~0U << amnt);
9114 for (unsigned int i = 0; i < sz; i++)
9116 unsigned prev_carry_over = carry_over;
9117 carry_over = ptr[i] & carry_mask;
9119 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
9120 ptr[i] >>= amnt;
9121 ptr[i] |= prev_carry_over;
9125 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9126 directly on the VECTOR_CST encoding, in a way that works for variable-
9127 length vectors. Return the resulting VECTOR_CST on success or null
9128 on failure. */
9130 static tree
9131 fold_view_convert_vector_encoding (tree type, tree expr)
9133 tree expr_type = TREE_TYPE (expr);
9134 poly_uint64 type_bits, expr_bits;
9135 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
9136 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
9137 return NULL_TREE;
9139 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
9140 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
9141 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
9142 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
9144 /* We can only preserve the semantics of a stepped pattern if the new
9145 vector element is an integer of the same size. */
9146 if (VECTOR_CST_STEPPED_P (expr)
9147 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
9148 return NULL_TREE;
9150 /* The number of bits needed to encode one element from every pattern
9151 of the original vector. */
9152 unsigned int expr_sequence_bits
9153 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
9155 /* The number of bits needed to encode one element from every pattern
9156 of the result. */
9157 unsigned int type_sequence_bits
9158 = least_common_multiple (expr_sequence_bits, type_elt_bits);
9160 /* Don't try to read more bytes than are available, which can happen
9161 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9162 The general VIEW_CONVERT handling can cope with that case, so there's
9163 no point complicating things here. */
9164 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
9165 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
9166 BITS_PER_UNIT);
9167 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
9168 if (known_gt (buffer_bits, expr_bits))
9169 return NULL_TREE;
9171 /* Get enough bytes of EXPR to form the new encoding. */
9172 auto_vec<unsigned char, 128> buffer (buffer_bytes);
9173 buffer.quick_grow (buffer_bytes);
9174 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
9175 buffer_bits / expr_elt_bits)
9176 != (int) buffer_bytes)
9177 return NULL_TREE;
9179 /* Reencode the bytes as TYPE. */
9180 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
9181 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
9182 type_npatterns, nelts_per_pattern);
9185 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9186 TYPE at compile-time. If we're unable to perform the conversion
9187 return NULL_TREE. */
9189 static tree
9190 fold_view_convert_expr (tree type, tree expr)
9192 /* We support up to 512-bit values (for V8DFmode). */
9193 unsigned char buffer[64];
9194 int len;
9196 /* Check that the host and target are sane. */
9197 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
9198 return NULL_TREE;
9200 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
9201 if (tree res = fold_view_convert_vector_encoding (type, expr))
9202 return res;
9204 len = native_encode_expr (expr, buffer, sizeof (buffer));
9205 if (len == 0)
9206 return NULL_TREE;
9208 return native_interpret_expr (type, buffer, len);
9211 /* Build an expression for the address of T. Folds away INDIRECT_REF
9212 to avoid confusing the gimplify process. */
9214 tree
9215 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
9217 /* The size of the object is not relevant when talking about its address. */
9218 if (TREE_CODE (t) == WITH_SIZE_EXPR)
9219 t = TREE_OPERAND (t, 0);
9221 if (TREE_CODE (t) == INDIRECT_REF)
9223 t = TREE_OPERAND (t, 0);
9225 if (TREE_TYPE (t) != ptrtype)
9226 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
9228 else if (TREE_CODE (t) == MEM_REF
9229 && integer_zerop (TREE_OPERAND (t, 1)))
9231 t = TREE_OPERAND (t, 0);
9233 if (TREE_TYPE (t) != ptrtype)
9234 t = fold_convert_loc (loc, ptrtype, t);
9236 else if (TREE_CODE (t) == MEM_REF
9237 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
9238 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
9239 TREE_OPERAND (t, 0),
9240 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
9241 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
9243 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
9245 if (TREE_TYPE (t) != ptrtype)
9246 t = fold_convert_loc (loc, ptrtype, t);
9248 else
9249 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
9251 return t;
9254 /* Build an expression for the address of T. */
9256 tree
9257 build_fold_addr_expr_loc (location_t loc, tree t)
9259 tree ptrtype = build_pointer_type (TREE_TYPE (t));
9261 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
9264 /* Fold a unary expression of code CODE and type TYPE with operand
9265 OP0. Return the folded expression if folding is successful.
9266 Otherwise, return NULL_TREE. */
9268 tree
9269 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
9271 tree tem;
9272 tree arg0;
9273 enum tree_code_class kind = TREE_CODE_CLASS (code);
9275 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9276 && TREE_CODE_LENGTH (code) == 1);
9278 arg0 = op0;
9279 if (arg0)
9281 if (CONVERT_EXPR_CODE_P (code)
9282 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
9284 /* Don't use STRIP_NOPS, because signedness of argument type
9285 matters. */
9286 STRIP_SIGN_NOPS (arg0);
9288 else
9290 /* Strip any conversions that don't change the mode. This
9291 is safe for every expression, except for a comparison
9292 expression because its signedness is derived from its
9293 operands.
9295 Note that this is done as an internal manipulation within
9296 the constant folder, in order to find the simplest
9297 representation of the arguments so that their form can be
9298 studied. In any cases, the appropriate type conversions
9299 should be put back in the tree that will get out of the
9300 constant folder. */
9301 STRIP_NOPS (arg0);
9304 if (CONSTANT_CLASS_P (arg0))
9306 tree tem = const_unop (code, type, arg0);
9307 if (tem)
9309 if (TREE_TYPE (tem) != type)
9310 tem = fold_convert_loc (loc, type, tem);
9311 return tem;
9316 tem = generic_simplify (loc, code, type, op0);
9317 if (tem)
9318 return tem;
9320 if (TREE_CODE_CLASS (code) == tcc_unary)
9322 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9323 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9324 fold_build1_loc (loc, code, type,
9325 fold_convert_loc (loc, TREE_TYPE (op0),
9326 TREE_OPERAND (arg0, 1))));
9327 else if (TREE_CODE (arg0) == COND_EXPR)
9329 tree arg01 = TREE_OPERAND (arg0, 1);
9330 tree arg02 = TREE_OPERAND (arg0, 2);
9331 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
9332 arg01 = fold_build1_loc (loc, code, type,
9333 fold_convert_loc (loc,
9334 TREE_TYPE (op0), arg01));
9335 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
9336 arg02 = fold_build1_loc (loc, code, type,
9337 fold_convert_loc (loc,
9338 TREE_TYPE (op0), arg02));
9339 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
9340 arg01, arg02);
9342 /* If this was a conversion, and all we did was to move into
9343 inside the COND_EXPR, bring it back out. But leave it if
9344 it is a conversion from integer to integer and the
9345 result precision is no wider than a word since such a
9346 conversion is cheap and may be optimized away by combine,
9347 while it couldn't if it were outside the COND_EXPR. Then return
9348 so we don't get into an infinite recursion loop taking the
9349 conversion out and then back in. */
9351 if ((CONVERT_EXPR_CODE_P (code)
9352 || code == NON_LVALUE_EXPR)
9353 && TREE_CODE (tem) == COND_EXPR
9354 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
9355 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
9356 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
9357 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
9358 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
9359 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
9360 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9361 && (INTEGRAL_TYPE_P
9362 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
9363 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
9364 || flag_syntax_only))
9365 tem = build1_loc (loc, code, type,
9366 build3 (COND_EXPR,
9367 TREE_TYPE (TREE_OPERAND
9368 (TREE_OPERAND (tem, 1), 0)),
9369 TREE_OPERAND (tem, 0),
9370 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
9371 TREE_OPERAND (TREE_OPERAND (tem, 2),
9372 0)));
9373 return tem;
9377 switch (code)
9379 case NON_LVALUE_EXPR:
9380 if (!maybe_lvalue_p (op0))
9381 return fold_convert_loc (loc, type, op0);
9382 return NULL_TREE;
9384 CASE_CONVERT:
9385 case FLOAT_EXPR:
9386 case FIX_TRUNC_EXPR:
9387 if (COMPARISON_CLASS_P (op0))
9389 /* If we have (type) (a CMP b) and type is an integral type, return
9390 new expression involving the new type. Canonicalize
9391 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9392 non-integral type.
9393 Do not fold the result as that would not simplify further, also
9394 folding again results in recursions. */
9395 if (TREE_CODE (type) == BOOLEAN_TYPE)
9396 return build2_loc (loc, TREE_CODE (op0), type,
9397 TREE_OPERAND (op0, 0),
9398 TREE_OPERAND (op0, 1));
9399 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
9400 && TREE_CODE (type) != VECTOR_TYPE)
9401 return build3_loc (loc, COND_EXPR, type, op0,
9402 constant_boolean_node (true, type),
9403 constant_boolean_node (false, type));
9406 /* Handle (T *)&A.B.C for A being of type T and B and C
9407 living at offset zero. This occurs frequently in
9408 C++ upcasting and then accessing the base. */
9409 if (TREE_CODE (op0) == ADDR_EXPR
9410 && POINTER_TYPE_P (type)
9411 && handled_component_p (TREE_OPERAND (op0, 0)))
9413 poly_int64 bitsize, bitpos;
9414 tree offset;
9415 machine_mode mode;
9416 int unsignedp, reversep, volatilep;
9417 tree base
9418 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
9419 &offset, &mode, &unsignedp, &reversep,
9420 &volatilep);
9421 /* If the reference was to a (constant) zero offset, we can use
9422 the address of the base if it has the same base type
9423 as the result type and the pointer type is unqualified. */
9424 if (!offset
9425 && known_eq (bitpos, 0)
9426 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
9427 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
9428 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
9429 return fold_convert_loc (loc, type,
9430 build_fold_addr_expr_loc (loc, base));
9433 if (TREE_CODE (op0) == MODIFY_EXPR
9434 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
9435 /* Detect assigning a bitfield. */
9436 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
9437 && DECL_BIT_FIELD
9438 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
9440 /* Don't leave an assignment inside a conversion
9441 unless assigning a bitfield. */
9442 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
9443 /* First do the assignment, then return converted constant. */
9444 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
9445 suppress_warning (tem /* What warning? */);
9446 TREE_USED (tem) = 1;
9447 return tem;
9450 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9451 constants (if x has signed type, the sign bit cannot be set
9452 in c). This folds extension into the BIT_AND_EXPR.
9453 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9454 very likely don't have maximal range for their precision and this
9455 transformation effectively doesn't preserve non-maximal ranges. */
9456 if (TREE_CODE (type) == INTEGER_TYPE
9457 && TREE_CODE (op0) == BIT_AND_EXPR
9458 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9460 tree and_expr = op0;
9461 tree and0 = TREE_OPERAND (and_expr, 0);
9462 tree and1 = TREE_OPERAND (and_expr, 1);
9463 int change = 0;
9465 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
9466 || (TYPE_PRECISION (type)
9467 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
9468 change = 1;
9469 else if (TYPE_PRECISION (TREE_TYPE (and1))
9470 <= HOST_BITS_PER_WIDE_INT
9471 && tree_fits_uhwi_p (and1))
9473 unsigned HOST_WIDE_INT cst;
9475 cst = tree_to_uhwi (and1);
9476 cst &= HOST_WIDE_INT_M1U
9477 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
9478 change = (cst == 0);
9479 if (change
9480 && !flag_syntax_only
9481 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
9482 == ZERO_EXTEND))
9484 tree uns = unsigned_type_for (TREE_TYPE (and0));
9485 and0 = fold_convert_loc (loc, uns, and0);
9486 and1 = fold_convert_loc (loc, uns, and1);
9489 if (change)
9491 tem = force_fit_type (type, wi::to_widest (and1), 0,
9492 TREE_OVERFLOW (and1));
9493 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9494 fold_convert_loc (loc, type, and0), tem);
9498 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9499 cast (T1)X will fold away. We assume that this happens when X itself
9500 is a cast. */
9501 if (POINTER_TYPE_P (type)
9502 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
9503 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
9505 tree arg00 = TREE_OPERAND (arg0, 0);
9506 tree arg01 = TREE_OPERAND (arg0, 1);
9508 /* If -fsanitize=alignment, avoid this optimization in GENERIC
9509 when the pointed type needs higher alignment than
9510 the p+ first operand's pointed type. */
9511 if (!in_gimple_form
9512 && sanitize_flags_p (SANITIZE_ALIGNMENT)
9513 && (min_align_of_type (TREE_TYPE (type))
9514 > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00)))))
9515 return NULL_TREE;
9517 /* Similarly, avoid this optimization in GENERIC for -fsanitize=null
9518 when type is a reference type and arg00's type is not,
9519 because arg00 could be validly nullptr and if arg01 doesn't return,
9520 we don't want false positive binding of reference to nullptr. */
9521 if (TREE_CODE (type) == REFERENCE_TYPE
9522 && !in_gimple_form
9523 && sanitize_flags_p (SANITIZE_NULL)
9524 && TREE_CODE (TREE_TYPE (arg00)) != REFERENCE_TYPE)
9525 return NULL_TREE;
9527 arg00 = fold_convert_loc (loc, type, arg00);
9528 return fold_build_pointer_plus_loc (loc, arg00, arg01);
9531 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9532 of the same precision, and X is an integer type not narrower than
9533 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9534 if (INTEGRAL_TYPE_P (type)
9535 && TREE_CODE (op0) == BIT_NOT_EXPR
9536 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9537 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
9538 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
9540 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
9541 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9542 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9543 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9544 fold_convert_loc (loc, type, tem));
9547 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9548 type of X and Y (integer types only). */
9549 if (INTEGRAL_TYPE_P (type)
9550 && TREE_CODE (op0) == MULT_EXPR
9551 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9552 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
9554 /* Be careful not to introduce new overflows. */
9555 tree mult_type;
9556 if (TYPE_OVERFLOW_WRAPS (type))
9557 mult_type = type;
9558 else
9559 mult_type = unsigned_type_for (type);
9561 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9563 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9564 fold_convert_loc (loc, mult_type,
9565 TREE_OPERAND (op0, 0)),
9566 fold_convert_loc (loc, mult_type,
9567 TREE_OPERAND (op0, 1)));
9568 return fold_convert_loc (loc, type, tem);
9572 return NULL_TREE;
9574 case VIEW_CONVERT_EXPR:
9575 if (TREE_CODE (op0) == MEM_REF)
9577 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9578 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9579 tem = fold_build2_loc (loc, MEM_REF, type,
9580 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9581 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9582 return tem;
9585 return NULL_TREE;
9587 case NEGATE_EXPR:
9588 tem = fold_negate_expr (loc, arg0);
9589 if (tem)
9590 return fold_convert_loc (loc, type, tem);
9591 return NULL_TREE;
9593 case ABS_EXPR:
9594 /* Convert fabs((double)float) into (double)fabsf(float). */
9595 if (TREE_CODE (arg0) == NOP_EXPR
9596 && TREE_CODE (type) == REAL_TYPE)
9598 tree targ0 = strip_float_extensions (arg0);
9599 if (targ0 != arg0)
9600 return fold_convert_loc (loc, type,
9601 fold_build1_loc (loc, ABS_EXPR,
9602 TREE_TYPE (targ0),
9603 targ0));
9605 return NULL_TREE;
9607 case BIT_NOT_EXPR:
9608 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9609 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9610 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9611 fold_convert_loc (loc, type,
9612 TREE_OPERAND (arg0, 0)))))
9613 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9614 fold_convert_loc (loc, type,
9615 TREE_OPERAND (arg0, 1)));
9616 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9617 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9618 fold_convert_loc (loc, type,
9619 TREE_OPERAND (arg0, 1)))))
9620 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9621 fold_convert_loc (loc, type,
9622 TREE_OPERAND (arg0, 0)), tem);
9624 return NULL_TREE;
9626 case TRUTH_NOT_EXPR:
9627 /* Note that the operand of this must be an int
9628 and its values must be 0 or 1.
9629 ("true" is a fixed value perhaps depending on the language,
9630 but we don't handle values other than 1 correctly yet.) */
9631 tem = fold_truth_not_expr (loc, arg0);
9632 if (!tem)
9633 return NULL_TREE;
9634 return fold_convert_loc (loc, type, tem);
9636 case INDIRECT_REF:
9637 /* Fold *&X to X if X is an lvalue. */
9638 if (TREE_CODE (op0) == ADDR_EXPR)
9640 tree op00 = TREE_OPERAND (op0, 0);
9641 if ((VAR_P (op00)
9642 || TREE_CODE (op00) == PARM_DECL
9643 || TREE_CODE (op00) == RESULT_DECL)
9644 && !TREE_READONLY (op00))
9645 return op00;
9647 return NULL_TREE;
9649 default:
9650 return NULL_TREE;
9651 } /* switch (code) */
9655 /* If the operation was a conversion do _not_ mark a resulting constant
9656 with TREE_OVERFLOW if the original constant was not. These conversions
9657 have implementation defined behavior and retaining the TREE_OVERFLOW
9658 flag here would confuse later passes such as VRP. */
9659 tree
9660 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9661 tree type, tree op0)
9663 tree res = fold_unary_loc (loc, code, type, op0);
9664 if (res
9665 && TREE_CODE (res) == INTEGER_CST
9666 && TREE_CODE (op0) == INTEGER_CST
9667 && CONVERT_EXPR_CODE_P (code))
9668 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9670 return res;
9673 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9674 operands OP0 and OP1. LOC is the location of the resulting expression.
9675 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9676 Return the folded expression if folding is successful. Otherwise,
9677 return NULL_TREE. */
9678 static tree
9679 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9680 tree arg0, tree arg1, tree op0, tree op1)
9682 tree tem;
9684 /* We only do these simplifications if we are optimizing. */
9685 if (!optimize)
9686 return NULL_TREE;
9688 /* Check for things like (A || B) && (A || C). We can convert this
9689 to A || (B && C). Note that either operator can be any of the four
9690 truth and/or operations and the transformation will still be
9691 valid. Also note that we only care about order for the
9692 ANDIF and ORIF operators. If B contains side effects, this
9693 might change the truth-value of A. */
9694 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9695 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9696 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9697 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9698 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9699 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9701 tree a00 = TREE_OPERAND (arg0, 0);
9702 tree a01 = TREE_OPERAND (arg0, 1);
9703 tree a10 = TREE_OPERAND (arg1, 0);
9704 tree a11 = TREE_OPERAND (arg1, 1);
9705 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9706 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9707 && (code == TRUTH_AND_EXPR
9708 || code == TRUTH_OR_EXPR));
9710 if (operand_equal_p (a00, a10, 0))
9711 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9712 fold_build2_loc (loc, code, type, a01, a11));
9713 else if (commutative && operand_equal_p (a00, a11, 0))
9714 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9715 fold_build2_loc (loc, code, type, a01, a10));
9716 else if (commutative && operand_equal_p (a01, a10, 0))
9717 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9718 fold_build2_loc (loc, code, type, a00, a11));
9720 /* This case if tricky because we must either have commutative
9721 operators or else A10 must not have side-effects. */
9723 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9724 && operand_equal_p (a01, a11, 0))
9725 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9726 fold_build2_loc (loc, code, type, a00, a10),
9727 a01);
9730 /* See if we can build a range comparison. */
9731 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9732 return tem;
9734 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9735 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9737 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9738 if (tem)
9739 return fold_build2_loc (loc, code, type, tem, arg1);
9742 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9743 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9745 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9746 if (tem)
9747 return fold_build2_loc (loc, code, type, arg0, tem);
9750 /* Check for the possibility of merging component references. If our
9751 lhs is another similar operation, try to merge its rhs with our
9752 rhs. Then try to merge our lhs and rhs. */
9753 if (TREE_CODE (arg0) == code
9754 && (tem = fold_truth_andor_1 (loc, code, type,
9755 TREE_OPERAND (arg0, 1), arg1)) != 0)
9756 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9758 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9759 return tem;
9761 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9762 if (param_logical_op_non_short_circuit != -1)
9763 logical_op_non_short_circuit
9764 = param_logical_op_non_short_circuit;
9765 if (logical_op_non_short_circuit
9766 && !sanitize_coverage_p ()
9767 && (code == TRUTH_AND_EXPR
9768 || code == TRUTH_ANDIF_EXPR
9769 || code == TRUTH_OR_EXPR
9770 || code == TRUTH_ORIF_EXPR))
9772 enum tree_code ncode, icode;
9774 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9775 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9776 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9778 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9779 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9780 We don't want to pack more than two leafs to a non-IF AND/OR
9781 expression.
9782 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9783 equal to IF-CODE, then we don't want to add right-hand operand.
9784 If the inner right-hand side of left-hand operand has
9785 side-effects, or isn't simple, then we can't add to it,
9786 as otherwise we might destroy if-sequence. */
9787 if (TREE_CODE (arg0) == icode
9788 && simple_condition_p (arg1)
9789 /* Needed for sequence points to handle trappings, and
9790 side-effects. */
9791 && simple_condition_p (TREE_OPERAND (arg0, 1)))
9793 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9794 arg1);
9795 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9796 tem);
9798 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9799 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9800 else if (TREE_CODE (arg1) == icode
9801 && simple_condition_p (arg0)
9802 /* Needed for sequence points to handle trappings, and
9803 side-effects. */
9804 && simple_condition_p (TREE_OPERAND (arg1, 0)))
9806 tem = fold_build2_loc (loc, ncode, type,
9807 arg0, TREE_OPERAND (arg1, 0));
9808 return fold_build2_loc (loc, icode, type, tem,
9809 TREE_OPERAND (arg1, 1));
9811 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9812 into (A OR B).
9813 For sequence point consistancy, we need to check for trapping,
9814 and side-effects. */
9815 else if (code == icode && simple_condition_p (arg0)
9816 && simple_condition_p (arg1))
9817 return fold_build2_loc (loc, ncode, type, arg0, arg1);
9820 return NULL_TREE;
9823 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9824 by changing CODE to reduce the magnitude of constants involved in
9825 ARG0 of the comparison.
9826 Returns a canonicalized comparison tree if a simplification was
9827 possible, otherwise returns NULL_TREE.
9828 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9829 valid if signed overflow is undefined. */
9831 static tree
9832 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9833 tree arg0, tree arg1,
9834 bool *strict_overflow_p)
9836 enum tree_code code0 = TREE_CODE (arg0);
9837 tree t, cst0 = NULL_TREE;
9838 int sgn0;
9840 /* Match A +- CST code arg1. We can change this only if overflow
9841 is undefined. */
9842 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9843 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9844 /* In principle pointers also have undefined overflow behavior,
9845 but that causes problems elsewhere. */
9846 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9847 && (code0 == MINUS_EXPR
9848 || code0 == PLUS_EXPR)
9849 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9850 return NULL_TREE;
9852 /* Identify the constant in arg0 and its sign. */
9853 cst0 = TREE_OPERAND (arg0, 1);
9854 sgn0 = tree_int_cst_sgn (cst0);
9856 /* Overflowed constants and zero will cause problems. */
9857 if (integer_zerop (cst0)
9858 || TREE_OVERFLOW (cst0))
9859 return NULL_TREE;
9861 /* See if we can reduce the magnitude of the constant in
9862 arg0 by changing the comparison code. */
9863 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9864 if (code == LT_EXPR
9865 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9866 code = LE_EXPR;
9867 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9868 else if (code == GT_EXPR
9869 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9870 code = GE_EXPR;
9871 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9872 else if (code == LE_EXPR
9873 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9874 code = LT_EXPR;
9875 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9876 else if (code == GE_EXPR
9877 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9878 code = GT_EXPR;
9879 else
9880 return NULL_TREE;
9881 *strict_overflow_p = true;
9883 /* Now build the constant reduced in magnitude. But not if that
9884 would produce one outside of its types range. */
9885 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9886 && ((sgn0 == 1
9887 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9888 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9889 || (sgn0 == -1
9890 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9891 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9892 return NULL_TREE;
9894 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9895 cst0, build_int_cst (TREE_TYPE (cst0), 1));
9896 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9897 t = fold_convert (TREE_TYPE (arg1), t);
9899 return fold_build2_loc (loc, code, type, t, arg1);
9902 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9903 overflow further. Try to decrease the magnitude of constants involved
9904 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9905 and put sole constants at the second argument position.
9906 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9908 static tree
9909 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9910 tree arg0, tree arg1)
9912 tree t;
9913 bool strict_overflow_p;
9914 const char * const warnmsg = G_("assuming signed overflow does not occur "
9915 "when reducing constant in comparison");
9917 /* Try canonicalization by simplifying arg0. */
9918 strict_overflow_p = false;
9919 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9920 &strict_overflow_p);
9921 if (t)
9923 if (strict_overflow_p)
9924 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9925 return t;
9928 /* Try canonicalization by simplifying arg1 using the swapped
9929 comparison. */
9930 code = swap_tree_comparison (code);
9931 strict_overflow_p = false;
9932 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9933 &strict_overflow_p);
9934 if (t && strict_overflow_p)
9935 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9936 return t;
9939 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9940 space. This is used to avoid issuing overflow warnings for
9941 expressions like &p->x which cannot wrap. */
9943 static bool
9944 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9946 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9947 return true;
9949 if (maybe_lt (bitpos, 0))
9950 return true;
9952 poly_wide_int wi_offset;
9953 int precision = TYPE_PRECISION (TREE_TYPE (base));
9954 if (offset == NULL_TREE)
9955 wi_offset = wi::zero (precision);
9956 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9957 return true;
9958 else
9959 wi_offset = wi::to_poly_wide (offset);
9961 wi::overflow_type overflow;
9962 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9963 precision);
9964 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9965 if (overflow)
9966 return true;
9968 poly_uint64 total_hwi, size;
9969 if (!total.to_uhwi (&total_hwi)
9970 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
9971 &size)
9972 || known_eq (size, 0U))
9973 return true;
9975 if (known_le (total_hwi, size))
9976 return false;
9978 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9979 array. */
9980 if (TREE_CODE (base) == ADDR_EXPR
9981 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
9982 &size)
9983 && maybe_ne (size, 0U)
9984 && known_le (total_hwi, size))
9985 return false;
9987 return true;
9990 /* Return a positive integer when the symbol DECL is known to have
9991 a nonzero address, zero when it's known not to (e.g., it's a weak
9992 symbol), and a negative integer when the symbol is not yet in the
9993 symbol table and so whether or not its address is zero is unknown.
9994 For function local objects always return positive integer. */
9995 static int
9996 maybe_nonzero_address (tree decl)
9998 /* Normally, don't do anything for variables and functions before symtab is
9999 built; it is quite possible that DECL will be declared weak later.
10000 But if folding_initializer, we need a constant answer now, so create
10001 the symtab entry and prevent later weak declaration. */
10002 if (DECL_P (decl) && decl_in_symtab_p (decl))
10003 if (struct symtab_node *symbol
10004 = (folding_initializer
10005 ? symtab_node::get_create (decl)
10006 : symtab_node::get (decl)))
10007 return symbol->nonzero_address ();
10009 /* Function local objects are never NULL. */
10010 if (DECL_P (decl)
10011 && (DECL_CONTEXT (decl)
10012 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
10013 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
10014 return 1;
10016 return -1;
10019 /* Subroutine of fold_binary. This routine performs all of the
10020 transformations that are common to the equality/inequality
10021 operators (EQ_EXPR and NE_EXPR) and the ordering operators
10022 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
10023 fold_binary should call fold_binary. Fold a comparison with
10024 tree code CODE and type TYPE with operands OP0 and OP1. Return
10025 the folded comparison or NULL_TREE. */
10027 static tree
10028 fold_comparison (location_t loc, enum tree_code code, tree type,
10029 tree op0, tree op1)
10031 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
10032 tree arg0, arg1, tem;
10034 arg0 = op0;
10035 arg1 = op1;
10037 STRIP_SIGN_NOPS (arg0);
10038 STRIP_SIGN_NOPS (arg1);
10040 /* For comparisons of pointers we can decompose it to a compile time
10041 comparison of the base objects and the offsets into the object.
10042 This requires at least one operand being an ADDR_EXPR or a
10043 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
10044 if (POINTER_TYPE_P (TREE_TYPE (arg0))
10045 && (TREE_CODE (arg0) == ADDR_EXPR
10046 || TREE_CODE (arg1) == ADDR_EXPR
10047 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10048 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
10050 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
10051 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
10052 machine_mode mode;
10053 int volatilep, reversep, unsignedp;
10054 bool indirect_base0 = false, indirect_base1 = false;
10056 /* Get base and offset for the access. Strip ADDR_EXPR for
10057 get_inner_reference, but put it back by stripping INDIRECT_REF
10058 off the base object if possible. indirect_baseN will be true
10059 if baseN is not an address but refers to the object itself. */
10060 base0 = arg0;
10061 if (TREE_CODE (arg0) == ADDR_EXPR)
10063 base0
10064 = get_inner_reference (TREE_OPERAND (arg0, 0),
10065 &bitsize, &bitpos0, &offset0, &mode,
10066 &unsignedp, &reversep, &volatilep);
10067 if (TREE_CODE (base0) == INDIRECT_REF)
10068 base0 = TREE_OPERAND (base0, 0);
10069 else
10070 indirect_base0 = true;
10072 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10074 base0 = TREE_OPERAND (arg0, 0);
10075 STRIP_SIGN_NOPS (base0);
10076 if (TREE_CODE (base0) == ADDR_EXPR)
10078 base0
10079 = get_inner_reference (TREE_OPERAND (base0, 0),
10080 &bitsize, &bitpos0, &offset0, &mode,
10081 &unsignedp, &reversep, &volatilep);
10082 if (TREE_CODE (base0) == INDIRECT_REF)
10083 base0 = TREE_OPERAND (base0, 0);
10084 else
10085 indirect_base0 = true;
10087 if (offset0 == NULL_TREE || integer_zerop (offset0))
10088 offset0 = TREE_OPERAND (arg0, 1);
10089 else
10090 offset0 = size_binop (PLUS_EXPR, offset0,
10091 TREE_OPERAND (arg0, 1));
10092 if (poly_int_tree_p (offset0))
10094 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
10095 TYPE_PRECISION (sizetype));
10096 tem <<= LOG2_BITS_PER_UNIT;
10097 tem += bitpos0;
10098 if (tem.to_shwi (&bitpos0))
10099 offset0 = NULL_TREE;
10103 base1 = arg1;
10104 if (TREE_CODE (arg1) == ADDR_EXPR)
10106 base1
10107 = get_inner_reference (TREE_OPERAND (arg1, 0),
10108 &bitsize, &bitpos1, &offset1, &mode,
10109 &unsignedp, &reversep, &volatilep);
10110 if (TREE_CODE (base1) == INDIRECT_REF)
10111 base1 = TREE_OPERAND (base1, 0);
10112 else
10113 indirect_base1 = true;
10115 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10117 base1 = TREE_OPERAND (arg1, 0);
10118 STRIP_SIGN_NOPS (base1);
10119 if (TREE_CODE (base1) == ADDR_EXPR)
10121 base1
10122 = get_inner_reference (TREE_OPERAND (base1, 0),
10123 &bitsize, &bitpos1, &offset1, &mode,
10124 &unsignedp, &reversep, &volatilep);
10125 if (TREE_CODE (base1) == INDIRECT_REF)
10126 base1 = TREE_OPERAND (base1, 0);
10127 else
10128 indirect_base1 = true;
10130 if (offset1 == NULL_TREE || integer_zerop (offset1))
10131 offset1 = TREE_OPERAND (arg1, 1);
10132 else
10133 offset1 = size_binop (PLUS_EXPR, offset1,
10134 TREE_OPERAND (arg1, 1));
10135 if (poly_int_tree_p (offset1))
10137 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
10138 TYPE_PRECISION (sizetype));
10139 tem <<= LOG2_BITS_PER_UNIT;
10140 tem += bitpos1;
10141 if (tem.to_shwi (&bitpos1))
10142 offset1 = NULL_TREE;
10146 /* If we have equivalent bases we might be able to simplify. */
10147 if (indirect_base0 == indirect_base1
10148 && operand_equal_p (base0, base1,
10149 indirect_base0 ? OEP_ADDRESS_OF : 0))
10151 /* We can fold this expression to a constant if the non-constant
10152 offset parts are equal. */
10153 if ((offset0 == offset1
10154 || (offset0 && offset1
10155 && operand_equal_p (offset0, offset1, 0)))
10156 && (equality_code
10157 || (indirect_base0
10158 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10159 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10161 if (!equality_code
10162 && maybe_ne (bitpos0, bitpos1)
10163 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10164 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10165 fold_overflow_warning (("assuming pointer wraparound does not "
10166 "occur when comparing P +- C1 with "
10167 "P +- C2"),
10168 WARN_STRICT_OVERFLOW_CONDITIONAL);
10170 switch (code)
10172 case EQ_EXPR:
10173 if (known_eq (bitpos0, bitpos1))
10174 return constant_boolean_node (true, type);
10175 if (known_ne (bitpos0, bitpos1))
10176 return constant_boolean_node (false, type);
10177 break;
10178 case NE_EXPR:
10179 if (known_ne (bitpos0, bitpos1))
10180 return constant_boolean_node (true, type);
10181 if (known_eq (bitpos0, bitpos1))
10182 return constant_boolean_node (false, type);
10183 break;
10184 case LT_EXPR:
10185 if (known_lt (bitpos0, bitpos1))
10186 return constant_boolean_node (true, type);
10187 if (known_ge (bitpos0, bitpos1))
10188 return constant_boolean_node (false, type);
10189 break;
10190 case LE_EXPR:
10191 if (known_le (bitpos0, bitpos1))
10192 return constant_boolean_node (true, type);
10193 if (known_gt (bitpos0, bitpos1))
10194 return constant_boolean_node (false, type);
10195 break;
10196 case GE_EXPR:
10197 if (known_ge (bitpos0, bitpos1))
10198 return constant_boolean_node (true, type);
10199 if (known_lt (bitpos0, bitpos1))
10200 return constant_boolean_node (false, type);
10201 break;
10202 case GT_EXPR:
10203 if (known_gt (bitpos0, bitpos1))
10204 return constant_boolean_node (true, type);
10205 if (known_le (bitpos0, bitpos1))
10206 return constant_boolean_node (false, type);
10207 break;
10208 default:;
10211 /* We can simplify the comparison to a comparison of the variable
10212 offset parts if the constant offset parts are equal.
10213 Be careful to use signed sizetype here because otherwise we
10214 mess with array offsets in the wrong way. This is possible
10215 because pointer arithmetic is restricted to retain within an
10216 object and overflow on pointer differences is undefined as of
10217 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10218 else if (known_eq (bitpos0, bitpos1)
10219 && (equality_code
10220 || (indirect_base0
10221 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10222 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10224 /* By converting to signed sizetype we cover middle-end pointer
10225 arithmetic which operates on unsigned pointer types of size
10226 type size and ARRAY_REF offsets which are properly sign or
10227 zero extended from their type in case it is narrower than
10228 sizetype. */
10229 if (offset0 == NULL_TREE)
10230 offset0 = build_int_cst (ssizetype, 0);
10231 else
10232 offset0 = fold_convert_loc (loc, ssizetype, offset0);
10233 if (offset1 == NULL_TREE)
10234 offset1 = build_int_cst (ssizetype, 0);
10235 else
10236 offset1 = fold_convert_loc (loc, ssizetype, offset1);
10238 if (!equality_code
10239 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10240 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10241 fold_overflow_warning (("assuming pointer wraparound does not "
10242 "occur when comparing P +- C1 with "
10243 "P +- C2"),
10244 WARN_STRICT_OVERFLOW_COMPARISON);
10246 return fold_build2_loc (loc, code, type, offset0, offset1);
10249 /* For equal offsets we can simplify to a comparison of the
10250 base addresses. */
10251 else if (known_eq (bitpos0, bitpos1)
10252 && (indirect_base0
10253 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
10254 && (indirect_base1
10255 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
10256 && ((offset0 == offset1)
10257 || (offset0 && offset1
10258 && operand_equal_p (offset0, offset1, 0))))
10260 if (indirect_base0)
10261 base0 = build_fold_addr_expr_loc (loc, base0);
10262 if (indirect_base1)
10263 base1 = build_fold_addr_expr_loc (loc, base1);
10264 return fold_build2_loc (loc, code, type, base0, base1);
10266 /* Comparison between an ordinary (non-weak) symbol and a null
10267 pointer can be eliminated since such symbols must have a non
10268 null address. In C, relational expressions between pointers
10269 to objects and null pointers are undefined. The results
10270 below follow the C++ rules with the additional property that
10271 every object pointer compares greater than a null pointer.
10273 else if (((DECL_P (base0)
10274 && maybe_nonzero_address (base0) > 0
10275 /* Avoid folding references to struct members at offset 0 to
10276 prevent tests like '&ptr->firstmember == 0' from getting
10277 eliminated. When ptr is null, although the -> expression
10278 is strictly speaking invalid, GCC retains it as a matter
10279 of QoI. See PR c/44555. */
10280 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
10281 || CONSTANT_CLASS_P (base0))
10282 && indirect_base0
10283 /* The caller guarantees that when one of the arguments is
10284 constant (i.e., null in this case) it is second. */
10285 && integer_zerop (arg1))
10287 switch (code)
10289 case EQ_EXPR:
10290 case LE_EXPR:
10291 case LT_EXPR:
10292 return constant_boolean_node (false, type);
10293 case GE_EXPR:
10294 case GT_EXPR:
10295 case NE_EXPR:
10296 return constant_boolean_node (true, type);
10297 default:
10298 gcc_unreachable ();
10303 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10304 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10305 the resulting offset is smaller in absolute value than the
10306 original one and has the same sign. */
10307 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10308 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
10309 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10310 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10311 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
10312 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
10313 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10314 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
10316 tree const1 = TREE_OPERAND (arg0, 1);
10317 tree const2 = TREE_OPERAND (arg1, 1);
10318 tree variable1 = TREE_OPERAND (arg0, 0);
10319 tree variable2 = TREE_OPERAND (arg1, 0);
10320 tree cst;
10321 const char * const warnmsg = G_("assuming signed overflow does not "
10322 "occur when combining constants around "
10323 "a comparison");
10325 /* Put the constant on the side where it doesn't overflow and is
10326 of lower absolute value and of same sign than before. */
10327 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10328 ? MINUS_EXPR : PLUS_EXPR,
10329 const2, const1);
10330 if (!TREE_OVERFLOW (cst)
10331 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
10332 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
10334 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10335 return fold_build2_loc (loc, code, type,
10336 variable1,
10337 fold_build2_loc (loc, TREE_CODE (arg1),
10338 TREE_TYPE (arg1),
10339 variable2, cst));
10342 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10343 ? MINUS_EXPR : PLUS_EXPR,
10344 const1, const2);
10345 if (!TREE_OVERFLOW (cst)
10346 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
10347 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
10349 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10350 return fold_build2_loc (loc, code, type,
10351 fold_build2_loc (loc, TREE_CODE (arg0),
10352 TREE_TYPE (arg0),
10353 variable1, cst),
10354 variable2);
10358 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
10359 if (tem)
10360 return tem;
10362 /* If we are comparing an expression that just has comparisons
10363 of two integer values, arithmetic expressions of those comparisons,
10364 and constants, we can simplify it. There are only three cases
10365 to check: the two values can either be equal, the first can be
10366 greater, or the second can be greater. Fold the expression for
10367 those three values. Since each value must be 0 or 1, we have
10368 eight possibilities, each of which corresponds to the constant 0
10369 or 1 or one of the six possible comparisons.
10371 This handles common cases like (a > b) == 0 but also handles
10372 expressions like ((x > y) - (y > x)) > 0, which supposedly
10373 occur in macroized code. */
10375 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
10377 tree cval1 = 0, cval2 = 0;
10379 if (twoval_comparison_p (arg0, &cval1, &cval2)
10380 /* Don't handle degenerate cases here; they should already
10381 have been handled anyway. */
10382 && cval1 != 0 && cval2 != 0
10383 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
10384 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
10385 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
10386 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
10387 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
10388 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
10389 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
10391 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
10392 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
10394 /* We can't just pass T to eval_subst in case cval1 or cval2
10395 was the same as ARG1. */
10397 tree high_result
10398 = fold_build2_loc (loc, code, type,
10399 eval_subst (loc, arg0, cval1, maxval,
10400 cval2, minval),
10401 arg1);
10402 tree equal_result
10403 = fold_build2_loc (loc, code, type,
10404 eval_subst (loc, arg0, cval1, maxval,
10405 cval2, maxval),
10406 arg1);
10407 tree low_result
10408 = fold_build2_loc (loc, code, type,
10409 eval_subst (loc, arg0, cval1, minval,
10410 cval2, maxval),
10411 arg1);
10413 /* All three of these results should be 0 or 1. Confirm they are.
10414 Then use those values to select the proper code to use. */
10416 if (TREE_CODE (high_result) == INTEGER_CST
10417 && TREE_CODE (equal_result) == INTEGER_CST
10418 && TREE_CODE (low_result) == INTEGER_CST)
10420 /* Make a 3-bit mask with the high-order bit being the
10421 value for `>', the next for '=', and the low for '<'. */
10422 switch ((integer_onep (high_result) * 4)
10423 + (integer_onep (equal_result) * 2)
10424 + integer_onep (low_result))
10426 case 0:
10427 /* Always false. */
10428 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10429 case 1:
10430 code = LT_EXPR;
10431 break;
10432 case 2:
10433 code = EQ_EXPR;
10434 break;
10435 case 3:
10436 code = LE_EXPR;
10437 break;
10438 case 4:
10439 code = GT_EXPR;
10440 break;
10441 case 5:
10442 code = NE_EXPR;
10443 break;
10444 case 6:
10445 code = GE_EXPR;
10446 break;
10447 case 7:
10448 /* Always true. */
10449 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10452 return fold_build2_loc (loc, code, type, cval1, cval2);
10457 return NULL_TREE;
10461 /* Subroutine of fold_binary. Optimize complex multiplications of the
10462 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10463 argument EXPR represents the expression "z" of type TYPE. */
10465 static tree
10466 fold_mult_zconjz (location_t loc, tree type, tree expr)
10468 tree itype = TREE_TYPE (type);
10469 tree rpart, ipart, tem;
10471 if (TREE_CODE (expr) == COMPLEX_EXPR)
10473 rpart = TREE_OPERAND (expr, 0);
10474 ipart = TREE_OPERAND (expr, 1);
10476 else if (TREE_CODE (expr) == COMPLEX_CST)
10478 rpart = TREE_REALPART (expr);
10479 ipart = TREE_IMAGPART (expr);
10481 else
10483 expr = save_expr (expr);
10484 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
10485 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
10488 rpart = save_expr (rpart);
10489 ipart = save_expr (ipart);
10490 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
10491 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
10492 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
10493 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
10494 build_zero_cst (itype));
10498 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10499 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10500 true if successful. */
10502 static bool
10503 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
10505 unsigned HOST_WIDE_INT i, nunits;
10507 if (TREE_CODE (arg) == VECTOR_CST
10508 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
10510 for (i = 0; i < nunits; ++i)
10511 elts[i] = VECTOR_CST_ELT (arg, i);
10513 else if (TREE_CODE (arg) == CONSTRUCTOR)
10515 constructor_elt *elt;
10517 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
10518 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
10519 return false;
10520 else
10521 elts[i] = elt->value;
10523 else
10524 return false;
10525 for (; i < nelts; i++)
10526 elts[i]
10527 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
10528 return true;
10531 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10532 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10533 NULL_TREE otherwise. */
10535 tree
10536 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
10538 unsigned int i;
10539 unsigned HOST_WIDE_INT nelts;
10540 bool need_ctor = false;
10542 if (!sel.length ().is_constant (&nelts))
10543 return NULL_TREE;
10544 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
10545 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
10546 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
10547 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
10548 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10549 return NULL_TREE;
10551 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10552 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
10553 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
10554 return NULL_TREE;
10556 tree_vector_builder out_elts (type, nelts, 1);
10557 for (i = 0; i < nelts; i++)
10559 HOST_WIDE_INT index;
10560 if (!sel[i].is_constant (&index))
10561 return NULL_TREE;
10562 if (!CONSTANT_CLASS_P (in_elts[index]))
10563 need_ctor = true;
10564 out_elts.quick_push (unshare_expr (in_elts[index]));
10567 if (need_ctor)
10569 vec<constructor_elt, va_gc> *v;
10570 vec_alloc (v, nelts);
10571 for (i = 0; i < nelts; i++)
10572 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
10573 return build_constructor (type, v);
10575 else
10576 return out_elts.build ();
10579 /* Try to fold a pointer difference of type TYPE two address expressions of
10580 array references AREF0 and AREF1 using location LOC. Return a
10581 simplified expression for the difference or NULL_TREE. */
10583 static tree
10584 fold_addr_of_array_ref_difference (location_t loc, tree type,
10585 tree aref0, tree aref1,
10586 bool use_pointer_diff)
10588 tree base0 = TREE_OPERAND (aref0, 0);
10589 tree base1 = TREE_OPERAND (aref1, 0);
10590 tree base_offset = build_int_cst (type, 0);
10592 /* If the bases are array references as well, recurse. If the bases
10593 are pointer indirections compute the difference of the pointers.
10594 If the bases are equal, we are set. */
10595 if ((TREE_CODE (base0) == ARRAY_REF
10596 && TREE_CODE (base1) == ARRAY_REF
10597 && (base_offset
10598 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
10599 use_pointer_diff)))
10600 || (INDIRECT_REF_P (base0)
10601 && INDIRECT_REF_P (base1)
10602 && (base_offset
10603 = use_pointer_diff
10604 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
10605 TREE_OPERAND (base0, 0),
10606 TREE_OPERAND (base1, 0))
10607 : fold_binary_loc (loc, MINUS_EXPR, type,
10608 fold_convert (type,
10609 TREE_OPERAND (base0, 0)),
10610 fold_convert (type,
10611 TREE_OPERAND (base1, 0)))))
10612 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
10614 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10615 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10616 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
10617 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
10618 return fold_build2_loc (loc, PLUS_EXPR, type,
10619 base_offset,
10620 fold_build2_loc (loc, MULT_EXPR, type,
10621 diff, esz));
10623 return NULL_TREE;
10626 /* If the real or vector real constant CST of type TYPE has an exact
10627 inverse, return it, else return NULL. */
10629 tree
10630 exact_inverse (tree type, tree cst)
10632 REAL_VALUE_TYPE r;
10633 tree unit_type;
10634 machine_mode mode;
10636 switch (TREE_CODE (cst))
10638 case REAL_CST:
10639 r = TREE_REAL_CST (cst);
10641 if (exact_real_inverse (TYPE_MODE (type), &r))
10642 return build_real (type, r);
10644 return NULL_TREE;
10646 case VECTOR_CST:
10648 unit_type = TREE_TYPE (type);
10649 mode = TYPE_MODE (unit_type);
10651 tree_vector_builder elts;
10652 if (!elts.new_unary_operation (type, cst, false))
10653 return NULL_TREE;
10654 unsigned int count = elts.encoded_nelts ();
10655 for (unsigned int i = 0; i < count; ++i)
10657 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10658 if (!exact_real_inverse (mode, &r))
10659 return NULL_TREE;
10660 elts.quick_push (build_real (unit_type, r));
10663 return elts.build ();
10666 default:
10667 return NULL_TREE;
10671 /* Mask out the tz least significant bits of X of type TYPE where
10672 tz is the number of trailing zeroes in Y. */
10673 static wide_int
10674 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10676 int tz = wi::ctz (y);
10677 if (tz > 0)
10678 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10679 return x;
10682 /* Return true when T is an address and is known to be nonzero.
10683 For floating point we further ensure that T is not denormal.
10684 Similar logic is present in nonzero_address in rtlanal.h.
10686 If the return value is based on the assumption that signed overflow
10687 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10688 change *STRICT_OVERFLOW_P. */
10690 static bool
10691 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10693 tree type = TREE_TYPE (t);
10694 enum tree_code code;
10696 /* Doing something useful for floating point would need more work. */
10697 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10698 return false;
10700 code = TREE_CODE (t);
10701 switch (TREE_CODE_CLASS (code))
10703 case tcc_unary:
10704 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10705 strict_overflow_p);
10706 case tcc_binary:
10707 case tcc_comparison:
10708 return tree_binary_nonzero_warnv_p (code, type,
10709 TREE_OPERAND (t, 0),
10710 TREE_OPERAND (t, 1),
10711 strict_overflow_p);
10712 case tcc_constant:
10713 case tcc_declaration:
10714 case tcc_reference:
10715 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10717 default:
10718 break;
10721 switch (code)
10723 case TRUTH_NOT_EXPR:
10724 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10725 strict_overflow_p);
10727 case TRUTH_AND_EXPR:
10728 case TRUTH_OR_EXPR:
10729 case TRUTH_XOR_EXPR:
10730 return tree_binary_nonzero_warnv_p (code, type,
10731 TREE_OPERAND (t, 0),
10732 TREE_OPERAND (t, 1),
10733 strict_overflow_p);
10735 case COND_EXPR:
10736 case CONSTRUCTOR:
10737 case OBJ_TYPE_REF:
10738 case ASSERT_EXPR:
10739 case ADDR_EXPR:
10740 case WITH_SIZE_EXPR:
10741 case SSA_NAME:
10742 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10744 case COMPOUND_EXPR:
10745 case MODIFY_EXPR:
10746 case BIND_EXPR:
10747 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10748 strict_overflow_p);
10750 case SAVE_EXPR:
10751 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10752 strict_overflow_p);
10754 case CALL_EXPR:
10756 tree fndecl = get_callee_fndecl (t);
10757 if (!fndecl) return false;
10758 if (flag_delete_null_pointer_checks && !flag_check_new
10759 && DECL_IS_OPERATOR_NEW_P (fndecl)
10760 && !TREE_NOTHROW (fndecl))
10761 return true;
10762 if (flag_delete_null_pointer_checks
10763 && lookup_attribute ("returns_nonnull",
10764 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10765 return true;
10766 return alloca_call_p (t);
10769 default:
10770 break;
10772 return false;
10775 /* Return true when T is an address and is known to be nonzero.
10776 Handle warnings about undefined signed overflow. */
10778 bool
10779 tree_expr_nonzero_p (tree t)
10781 bool ret, strict_overflow_p;
10783 strict_overflow_p = false;
10784 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10785 if (strict_overflow_p)
10786 fold_overflow_warning (("assuming signed overflow does not occur when "
10787 "determining that expression is always "
10788 "non-zero"),
10789 WARN_STRICT_OVERFLOW_MISC);
10790 return ret;
10793 /* Return true if T is known not to be equal to an integer W. */
10795 bool
10796 expr_not_equal_to (tree t, const wide_int &w)
10798 int_range_max vr;
10799 switch (TREE_CODE (t))
10801 case INTEGER_CST:
10802 return wi::to_wide (t) != w;
10804 case SSA_NAME:
10805 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10806 return false;
10808 if (cfun)
10809 get_range_query (cfun)->range_of_expr (vr, t);
10810 else
10811 get_global_range_query ()->range_of_expr (vr, t);
10813 if (!vr.undefined_p ()
10814 && !vr.contains_p (wide_int_to_tree (TREE_TYPE (t), w)))
10815 return true;
10816 /* If T has some known zero bits and W has any of those bits set,
10817 then T is known not to be equal to W. */
10818 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10819 TYPE_PRECISION (TREE_TYPE (t))), 0))
10820 return true;
10821 return false;
10823 default:
10824 return false;
10828 /* Fold a binary expression of code CODE and type TYPE with operands
10829 OP0 and OP1. LOC is the location of the resulting expression.
10830 Return the folded expression if folding is successful. Otherwise,
10831 return NULL_TREE. */
10833 tree
10834 fold_binary_loc (location_t loc, enum tree_code code, tree type,
10835 tree op0, tree op1)
10837 enum tree_code_class kind = TREE_CODE_CLASS (code);
10838 tree arg0, arg1, tem;
10839 tree t1 = NULL_TREE;
10840 bool strict_overflow_p;
10841 unsigned int prec;
10843 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10844 && TREE_CODE_LENGTH (code) == 2
10845 && op0 != NULL_TREE
10846 && op1 != NULL_TREE);
10848 arg0 = op0;
10849 arg1 = op1;
10851 /* Strip any conversions that don't change the mode. This is
10852 safe for every expression, except for a comparison expression
10853 because its signedness is derived from its operands. So, in
10854 the latter case, only strip conversions that don't change the
10855 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10856 preserved.
10858 Note that this is done as an internal manipulation within the
10859 constant folder, in order to find the simplest representation
10860 of the arguments so that their form can be studied. In any
10861 cases, the appropriate type conversions should be put back in
10862 the tree that will get out of the constant folder. */
10864 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10866 STRIP_SIGN_NOPS (arg0);
10867 STRIP_SIGN_NOPS (arg1);
10869 else
10871 STRIP_NOPS (arg0);
10872 STRIP_NOPS (arg1);
10875 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10876 constant but we can't do arithmetic on them. */
10877 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
10879 tem = const_binop (code, type, arg0, arg1);
10880 if (tem != NULL_TREE)
10882 if (TREE_TYPE (tem) != type)
10883 tem = fold_convert_loc (loc, type, tem);
10884 return tem;
10888 /* If this is a commutative operation, and ARG0 is a constant, move it
10889 to ARG1 to reduce the number of tests below. */
10890 if (commutative_tree_code (code)
10891 && tree_swap_operands_p (arg0, arg1))
10892 return fold_build2_loc (loc, code, type, op1, op0);
10894 /* Likewise if this is a comparison, and ARG0 is a constant, move it
10895 to ARG1 to reduce the number of tests below. */
10896 if (kind == tcc_comparison
10897 && tree_swap_operands_p (arg0, arg1))
10898 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
10900 tem = generic_simplify (loc, code, type, op0, op1);
10901 if (tem)
10902 return tem;
10904 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10906 First check for cases where an arithmetic operation is applied to a
10907 compound, conditional, or comparison operation. Push the arithmetic
10908 operation inside the compound or conditional to see if any folding
10909 can then be done. Convert comparison to conditional for this purpose.
10910 The also optimizes non-constant cases that used to be done in
10911 expand_expr.
10913 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10914 one of the operands is a comparison and the other is a comparison, a
10915 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10916 code below would make the expression more complex. Change it to a
10917 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10918 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10920 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10921 || code == EQ_EXPR || code == NE_EXPR)
10922 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
10923 && ((truth_value_p (TREE_CODE (arg0))
10924 && (truth_value_p (TREE_CODE (arg1))
10925 || (TREE_CODE (arg1) == BIT_AND_EXPR
10926 && integer_onep (TREE_OPERAND (arg1, 1)))))
10927 || (truth_value_p (TREE_CODE (arg1))
10928 && (truth_value_p (TREE_CODE (arg0))
10929 || (TREE_CODE (arg0) == BIT_AND_EXPR
10930 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10932 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10933 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10934 : TRUTH_XOR_EXPR,
10935 boolean_type_node,
10936 fold_convert_loc (loc, boolean_type_node, arg0),
10937 fold_convert_loc (loc, boolean_type_node, arg1));
10939 if (code == EQ_EXPR)
10940 tem = invert_truthvalue_loc (loc, tem);
10942 return fold_convert_loc (loc, type, tem);
10945 if (TREE_CODE_CLASS (code) == tcc_binary
10946 || TREE_CODE_CLASS (code) == tcc_comparison)
10948 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10950 tem = fold_build2_loc (loc, code, type,
10951 fold_convert_loc (loc, TREE_TYPE (op0),
10952 TREE_OPERAND (arg0, 1)), op1);
10953 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10954 tem);
10956 if (TREE_CODE (arg1) == COMPOUND_EXPR)
10958 tem = fold_build2_loc (loc, code, type, op0,
10959 fold_convert_loc (loc, TREE_TYPE (op1),
10960 TREE_OPERAND (arg1, 1)));
10961 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10962 tem);
10965 if (TREE_CODE (arg0) == COND_EXPR
10966 || TREE_CODE (arg0) == VEC_COND_EXPR
10967 || COMPARISON_CLASS_P (arg0))
10969 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10970 arg0, arg1,
10971 /*cond_first_p=*/1);
10972 if (tem != NULL_TREE)
10973 return tem;
10976 if (TREE_CODE (arg1) == COND_EXPR
10977 || TREE_CODE (arg1) == VEC_COND_EXPR
10978 || COMPARISON_CLASS_P (arg1))
10980 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10981 arg1, arg0,
10982 /*cond_first_p=*/0);
10983 if (tem != NULL_TREE)
10984 return tem;
10988 switch (code)
10990 case MEM_REF:
10991 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10992 if (TREE_CODE (arg0) == ADDR_EXPR
10993 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10995 tree iref = TREE_OPERAND (arg0, 0);
10996 return fold_build2 (MEM_REF, type,
10997 TREE_OPERAND (iref, 0),
10998 int_const_binop (PLUS_EXPR, arg1,
10999 TREE_OPERAND (iref, 1)));
11002 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
11003 if (TREE_CODE (arg0) == ADDR_EXPR
11004 && handled_component_p (TREE_OPERAND (arg0, 0)))
11006 tree base;
11007 poly_int64 coffset;
11008 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
11009 &coffset);
11010 if (!base)
11011 return NULL_TREE;
11012 return fold_build2 (MEM_REF, type,
11013 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
11014 int_const_binop (PLUS_EXPR, arg1,
11015 size_int (coffset)));
11018 return NULL_TREE;
11020 case POINTER_PLUS_EXPR:
11021 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
11022 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11023 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
11024 return fold_convert_loc (loc, type,
11025 fold_build2_loc (loc, PLUS_EXPR, sizetype,
11026 fold_convert_loc (loc, sizetype,
11027 arg1),
11028 fold_convert_loc (loc, sizetype,
11029 arg0)));
11031 return NULL_TREE;
11033 case PLUS_EXPR:
11034 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
11036 /* X + (X / CST) * -CST is X % CST. */
11037 if (TREE_CODE (arg1) == MULT_EXPR
11038 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
11039 && operand_equal_p (arg0,
11040 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
11042 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
11043 tree cst1 = TREE_OPERAND (arg1, 1);
11044 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
11045 cst1, cst0);
11046 if (sum && integer_zerop (sum))
11047 return fold_convert_loc (loc, type,
11048 fold_build2_loc (loc, TRUNC_MOD_EXPR,
11049 TREE_TYPE (arg0), arg0,
11050 cst0));
11054 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
11055 one. Make sure the type is not saturating and has the signedness of
11056 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11057 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11058 if ((TREE_CODE (arg0) == MULT_EXPR
11059 || TREE_CODE (arg1) == MULT_EXPR)
11060 && !TYPE_SATURATING (type)
11061 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11062 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11063 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11065 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11066 if (tem)
11067 return tem;
11070 if (! FLOAT_TYPE_P (type))
11072 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
11073 (plus (plus (mult) (mult)) (foo)) so that we can
11074 take advantage of the factoring cases below. */
11075 if (ANY_INTEGRAL_TYPE_P (type)
11076 && TYPE_OVERFLOW_WRAPS (type)
11077 && (((TREE_CODE (arg0) == PLUS_EXPR
11078 || TREE_CODE (arg0) == MINUS_EXPR)
11079 && TREE_CODE (arg1) == MULT_EXPR)
11080 || ((TREE_CODE (arg1) == PLUS_EXPR
11081 || TREE_CODE (arg1) == MINUS_EXPR)
11082 && TREE_CODE (arg0) == MULT_EXPR)))
11084 tree parg0, parg1, parg, marg;
11085 enum tree_code pcode;
11087 if (TREE_CODE (arg1) == MULT_EXPR)
11088 parg = arg0, marg = arg1;
11089 else
11090 parg = arg1, marg = arg0;
11091 pcode = TREE_CODE (parg);
11092 parg0 = TREE_OPERAND (parg, 0);
11093 parg1 = TREE_OPERAND (parg, 1);
11094 STRIP_NOPS (parg0);
11095 STRIP_NOPS (parg1);
11097 if (TREE_CODE (parg0) == MULT_EXPR
11098 && TREE_CODE (parg1) != MULT_EXPR)
11099 return fold_build2_loc (loc, pcode, type,
11100 fold_build2_loc (loc, PLUS_EXPR, type,
11101 fold_convert_loc (loc, type,
11102 parg0),
11103 fold_convert_loc (loc, type,
11104 marg)),
11105 fold_convert_loc (loc, type, parg1));
11106 if (TREE_CODE (parg0) != MULT_EXPR
11107 && TREE_CODE (parg1) == MULT_EXPR)
11108 return
11109 fold_build2_loc (loc, PLUS_EXPR, type,
11110 fold_convert_loc (loc, type, parg0),
11111 fold_build2_loc (loc, pcode, type,
11112 fold_convert_loc (loc, type, marg),
11113 fold_convert_loc (loc, type,
11114 parg1)));
11117 else
11119 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
11120 to __complex__ ( x, y ). This is not the same for SNaNs or
11121 if signed zeros are involved. */
11122 if (!HONOR_SNANS (arg0)
11123 && !HONOR_SIGNED_ZEROS (arg0)
11124 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11126 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11127 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11128 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11129 bool arg0rz = false, arg0iz = false;
11130 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11131 || (arg0i && (arg0iz = real_zerop (arg0i))))
11133 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11134 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11135 if (arg0rz && arg1i && real_zerop (arg1i))
11137 tree rp = arg1r ? arg1r
11138 : build1 (REALPART_EXPR, rtype, arg1);
11139 tree ip = arg0i ? arg0i
11140 : build1 (IMAGPART_EXPR, rtype, arg0);
11141 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11143 else if (arg0iz && arg1r && real_zerop (arg1r))
11145 tree rp = arg0r ? arg0r
11146 : build1 (REALPART_EXPR, rtype, arg0);
11147 tree ip = arg1i ? arg1i
11148 : build1 (IMAGPART_EXPR, rtype, arg1);
11149 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11154 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11155 We associate floats only if the user has specified
11156 -fassociative-math. */
11157 if (flag_associative_math
11158 && TREE_CODE (arg1) == PLUS_EXPR
11159 && TREE_CODE (arg0) != MULT_EXPR)
11161 tree tree10 = TREE_OPERAND (arg1, 0);
11162 tree tree11 = TREE_OPERAND (arg1, 1);
11163 if (TREE_CODE (tree11) == MULT_EXPR
11164 && TREE_CODE (tree10) == MULT_EXPR)
11166 tree tree0;
11167 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
11168 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
11171 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11172 We associate floats only if the user has specified
11173 -fassociative-math. */
11174 if (flag_associative_math
11175 && TREE_CODE (arg0) == PLUS_EXPR
11176 && TREE_CODE (arg1) != MULT_EXPR)
11178 tree tree00 = TREE_OPERAND (arg0, 0);
11179 tree tree01 = TREE_OPERAND (arg0, 1);
11180 if (TREE_CODE (tree01) == MULT_EXPR
11181 && TREE_CODE (tree00) == MULT_EXPR)
11183 tree tree0;
11184 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
11185 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
11190 bit_rotate:
11191 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11192 is a rotate of A by C1 bits. */
11193 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11194 is a rotate of A by B bits.
11195 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11196 though in this case CODE must be | and not + or ^, otherwise
11197 it doesn't return A when B is 0. */
11199 enum tree_code code0, code1;
11200 tree rtype;
11201 code0 = TREE_CODE (arg0);
11202 code1 = TREE_CODE (arg1);
11203 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
11204 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
11205 && operand_equal_p (TREE_OPERAND (arg0, 0),
11206 TREE_OPERAND (arg1, 0), 0)
11207 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
11208 TYPE_UNSIGNED (rtype))
11209 /* Only create rotates in complete modes. Other cases are not
11210 expanded properly. */
11211 && (element_precision (rtype)
11212 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
11214 tree tree01, tree11;
11215 tree orig_tree01, orig_tree11;
11216 enum tree_code code01, code11;
11218 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
11219 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
11220 STRIP_NOPS (tree01);
11221 STRIP_NOPS (tree11);
11222 code01 = TREE_CODE (tree01);
11223 code11 = TREE_CODE (tree11);
11224 if (code11 != MINUS_EXPR
11225 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
11227 std::swap (code0, code1);
11228 std::swap (code01, code11);
11229 std::swap (tree01, tree11);
11230 std::swap (orig_tree01, orig_tree11);
11232 if (code01 == INTEGER_CST
11233 && code11 == INTEGER_CST
11234 && (wi::to_widest (tree01) + wi::to_widest (tree11)
11235 == element_precision (rtype)))
11237 tem = build2_loc (loc, LROTATE_EXPR,
11238 rtype, TREE_OPERAND (arg0, 0),
11239 code0 == LSHIFT_EXPR
11240 ? orig_tree01 : orig_tree11);
11241 return fold_convert_loc (loc, type, tem);
11243 else if (code11 == MINUS_EXPR)
11245 tree tree110, tree111;
11246 tree110 = TREE_OPERAND (tree11, 0);
11247 tree111 = TREE_OPERAND (tree11, 1);
11248 STRIP_NOPS (tree110);
11249 STRIP_NOPS (tree111);
11250 if (TREE_CODE (tree110) == INTEGER_CST
11251 && compare_tree_int (tree110,
11252 element_precision (rtype)) == 0
11253 && operand_equal_p (tree01, tree111, 0))
11255 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11256 ? LROTATE_EXPR : RROTATE_EXPR),
11257 rtype, TREE_OPERAND (arg0, 0),
11258 orig_tree01);
11259 return fold_convert_loc (loc, type, tem);
11262 else if (code == BIT_IOR_EXPR
11263 && code11 == BIT_AND_EXPR
11264 && pow2p_hwi (element_precision (rtype)))
11266 tree tree110, tree111;
11267 tree110 = TREE_OPERAND (tree11, 0);
11268 tree111 = TREE_OPERAND (tree11, 1);
11269 STRIP_NOPS (tree110);
11270 STRIP_NOPS (tree111);
11271 if (TREE_CODE (tree110) == NEGATE_EXPR
11272 && TREE_CODE (tree111) == INTEGER_CST
11273 && compare_tree_int (tree111,
11274 element_precision (rtype) - 1) == 0
11275 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
11277 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11278 ? LROTATE_EXPR : RROTATE_EXPR),
11279 rtype, TREE_OPERAND (arg0, 0),
11280 orig_tree01);
11281 return fold_convert_loc (loc, type, tem);
11287 associate:
11288 /* In most languages, can't associate operations on floats through
11289 parentheses. Rather than remember where the parentheses were, we
11290 don't associate floats at all, unless the user has specified
11291 -fassociative-math.
11292 And, we need to make sure type is not saturating. */
11294 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
11295 && !TYPE_SATURATING (type))
11297 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
11298 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
11299 tree atype = type;
11300 bool ok = true;
11302 /* Split both trees into variables, constants, and literals. Then
11303 associate each group together, the constants with literals,
11304 then the result with variables. This increases the chances of
11305 literals being recombined later and of generating relocatable
11306 expressions for the sum of a constant and literal. */
11307 var0 = split_tree (arg0, type, code,
11308 &minus_var0, &con0, &minus_con0,
11309 &lit0, &minus_lit0, 0);
11310 var1 = split_tree (arg1, type, code,
11311 &minus_var1, &con1, &minus_con1,
11312 &lit1, &minus_lit1, code == MINUS_EXPR);
11314 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11315 if (code == MINUS_EXPR)
11316 code = PLUS_EXPR;
11318 /* With undefined overflow prefer doing association in a type
11319 which wraps on overflow, if that is one of the operand types. */
11320 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
11321 && !TYPE_OVERFLOW_WRAPS (type))
11323 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11324 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11325 atype = TREE_TYPE (arg0);
11326 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11327 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
11328 atype = TREE_TYPE (arg1);
11329 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
11332 /* With undefined overflow we can only associate constants with one
11333 variable, and constants whose association doesn't overflow. */
11334 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
11335 && !TYPE_OVERFLOW_WRAPS (atype))
11337 if ((var0 && var1) || (minus_var0 && minus_var1))
11339 /* ??? If split_tree would handle NEGATE_EXPR we could
11340 simply reject these cases and the allowed cases would
11341 be the var0/minus_var1 ones. */
11342 tree tmp0 = var0 ? var0 : minus_var0;
11343 tree tmp1 = var1 ? var1 : minus_var1;
11344 bool one_neg = false;
11346 if (TREE_CODE (tmp0) == NEGATE_EXPR)
11348 tmp0 = TREE_OPERAND (tmp0, 0);
11349 one_neg = !one_neg;
11351 if (CONVERT_EXPR_P (tmp0)
11352 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11353 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11354 <= TYPE_PRECISION (atype)))
11355 tmp0 = TREE_OPERAND (tmp0, 0);
11356 if (TREE_CODE (tmp1) == NEGATE_EXPR)
11358 tmp1 = TREE_OPERAND (tmp1, 0);
11359 one_neg = !one_neg;
11361 if (CONVERT_EXPR_P (tmp1)
11362 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11363 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11364 <= TYPE_PRECISION (atype)))
11365 tmp1 = TREE_OPERAND (tmp1, 0);
11366 /* The only case we can still associate with two variables
11367 is if they cancel out. */
11368 if (!one_neg
11369 || !operand_equal_p (tmp0, tmp1, 0))
11370 ok = false;
11372 else if ((var0 && minus_var1
11373 && ! operand_equal_p (var0, minus_var1, 0))
11374 || (minus_var0 && var1
11375 && ! operand_equal_p (minus_var0, var1, 0)))
11376 ok = false;
11379 /* Only do something if we found more than two objects. Otherwise,
11380 nothing has changed and we risk infinite recursion. */
11381 if (ok
11382 && ((var0 != 0) + (var1 != 0)
11383 + (minus_var0 != 0) + (minus_var1 != 0)
11384 + (con0 != 0) + (con1 != 0)
11385 + (minus_con0 != 0) + (minus_con1 != 0)
11386 + (lit0 != 0) + (lit1 != 0)
11387 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
11389 var0 = associate_trees (loc, var0, var1, code, atype);
11390 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
11391 code, atype);
11392 con0 = associate_trees (loc, con0, con1, code, atype);
11393 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
11394 code, atype);
11395 lit0 = associate_trees (loc, lit0, lit1, code, atype);
11396 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
11397 code, atype);
11399 if (minus_var0 && var0)
11401 var0 = associate_trees (loc, var0, minus_var0,
11402 MINUS_EXPR, atype);
11403 minus_var0 = 0;
11405 if (minus_con0 && con0)
11407 con0 = associate_trees (loc, con0, minus_con0,
11408 MINUS_EXPR, atype);
11409 minus_con0 = 0;
11412 /* Preserve the MINUS_EXPR if the negative part of the literal is
11413 greater than the positive part. Otherwise, the multiplicative
11414 folding code (i.e extract_muldiv) may be fooled in case
11415 unsigned constants are subtracted, like in the following
11416 example: ((X*2 + 4) - 8U)/2. */
11417 if (minus_lit0 && lit0)
11419 if (TREE_CODE (lit0) == INTEGER_CST
11420 && TREE_CODE (minus_lit0) == INTEGER_CST
11421 && tree_int_cst_lt (lit0, minus_lit0)
11422 /* But avoid ending up with only negated parts. */
11423 && (var0 || con0))
11425 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
11426 MINUS_EXPR, atype);
11427 lit0 = 0;
11429 else
11431 lit0 = associate_trees (loc, lit0, minus_lit0,
11432 MINUS_EXPR, atype);
11433 minus_lit0 = 0;
11437 /* Don't introduce overflows through reassociation. */
11438 if ((lit0 && TREE_OVERFLOW_P (lit0))
11439 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
11440 return NULL_TREE;
11442 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11443 con0 = associate_trees (loc, con0, lit0, code, atype);
11444 lit0 = 0;
11445 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
11446 code, atype);
11447 minus_lit0 = 0;
11449 /* Eliminate minus_con0. */
11450 if (minus_con0)
11452 if (con0)
11453 con0 = associate_trees (loc, con0, minus_con0,
11454 MINUS_EXPR, atype);
11455 else if (var0)
11456 var0 = associate_trees (loc, var0, minus_con0,
11457 MINUS_EXPR, atype);
11458 else
11459 gcc_unreachable ();
11460 minus_con0 = 0;
11463 /* Eliminate minus_var0. */
11464 if (minus_var0)
11466 if (con0)
11467 con0 = associate_trees (loc, con0, minus_var0,
11468 MINUS_EXPR, atype);
11469 else
11470 gcc_unreachable ();
11471 minus_var0 = 0;
11474 return
11475 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
11476 code, atype));
11480 return NULL_TREE;
11482 case POINTER_DIFF_EXPR:
11483 case MINUS_EXPR:
11484 /* Fold &a[i] - &a[j] to i-j. */
11485 if (TREE_CODE (arg0) == ADDR_EXPR
11486 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11487 && TREE_CODE (arg1) == ADDR_EXPR
11488 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11490 tree tem = fold_addr_of_array_ref_difference (loc, type,
11491 TREE_OPERAND (arg0, 0),
11492 TREE_OPERAND (arg1, 0),
11493 code
11494 == POINTER_DIFF_EXPR);
11495 if (tem)
11496 return tem;
11499 /* Further transformations are not for pointers. */
11500 if (code == POINTER_DIFF_EXPR)
11501 return NULL_TREE;
11503 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11504 if (TREE_CODE (arg0) == NEGATE_EXPR
11505 && negate_expr_p (op1)
11506 /* If arg0 is e.g. unsigned int and type is int, then this could
11507 introduce UB, because if A is INT_MIN at runtime, the original
11508 expression can be well defined while the latter is not.
11509 See PR83269. */
11510 && !(ANY_INTEGRAL_TYPE_P (type)
11511 && TYPE_OVERFLOW_UNDEFINED (type)
11512 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11513 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
11514 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
11515 fold_convert_loc (loc, type,
11516 TREE_OPERAND (arg0, 0)));
11518 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11519 __complex__ ( x, -y ). This is not the same for SNaNs or if
11520 signed zeros are involved. */
11521 if (!HONOR_SNANS (arg0)
11522 && !HONOR_SIGNED_ZEROS (arg0)
11523 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11525 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11526 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11527 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11528 bool arg0rz = false, arg0iz = false;
11529 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11530 || (arg0i && (arg0iz = real_zerop (arg0i))))
11532 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11533 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11534 if (arg0rz && arg1i && real_zerop (arg1i))
11536 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11537 arg1r ? arg1r
11538 : build1 (REALPART_EXPR, rtype, arg1));
11539 tree ip = arg0i ? arg0i
11540 : build1 (IMAGPART_EXPR, rtype, arg0);
11541 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11543 else if (arg0iz && arg1r && real_zerop (arg1r))
11545 tree rp = arg0r ? arg0r
11546 : build1 (REALPART_EXPR, rtype, arg0);
11547 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11548 arg1i ? arg1i
11549 : build1 (IMAGPART_EXPR, rtype, arg1));
11550 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11555 /* A - B -> A + (-B) if B is easily negatable. */
11556 if (negate_expr_p (op1)
11557 && ! TYPE_OVERFLOW_SANITIZED (type)
11558 && ((FLOAT_TYPE_P (type)
11559 /* Avoid this transformation if B is a positive REAL_CST. */
11560 && (TREE_CODE (op1) != REAL_CST
11561 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
11562 || INTEGRAL_TYPE_P (type)))
11563 return fold_build2_loc (loc, PLUS_EXPR, type,
11564 fold_convert_loc (loc, type, arg0),
11565 negate_expr (op1));
11567 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11568 one. Make sure the type is not saturating and has the signedness of
11569 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11570 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11571 if ((TREE_CODE (arg0) == MULT_EXPR
11572 || TREE_CODE (arg1) == MULT_EXPR)
11573 && !TYPE_SATURATING (type)
11574 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11575 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11576 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11578 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11579 if (tem)
11580 return tem;
11583 goto associate;
11585 case MULT_EXPR:
11586 if (! FLOAT_TYPE_P (type))
11588 /* Transform x * -C into -x * C if x is easily negatable. */
11589 if (TREE_CODE (op1) == INTEGER_CST
11590 && tree_int_cst_sgn (op1) == -1
11591 && negate_expr_p (op0)
11592 && negate_expr_p (op1)
11593 && (tem = negate_expr (op1)) != op1
11594 && ! TREE_OVERFLOW (tem))
11595 return fold_build2_loc (loc, MULT_EXPR, type,
11596 fold_convert_loc (loc, type,
11597 negate_expr (op0)), tem);
11599 strict_overflow_p = false;
11600 if (TREE_CODE (arg1) == INTEGER_CST
11601 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11602 &strict_overflow_p)) != 0)
11604 if (strict_overflow_p)
11605 fold_overflow_warning (("assuming signed overflow does not "
11606 "occur when simplifying "
11607 "multiplication"),
11608 WARN_STRICT_OVERFLOW_MISC);
11609 return fold_convert_loc (loc, type, tem);
11612 /* Optimize z * conj(z) for integer complex numbers. */
11613 if (TREE_CODE (arg0) == CONJ_EXPR
11614 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11615 return fold_mult_zconjz (loc, type, arg1);
11616 if (TREE_CODE (arg1) == CONJ_EXPR
11617 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11618 return fold_mult_zconjz (loc, type, arg0);
11620 else
11622 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11623 This is not the same for NaNs or if signed zeros are
11624 involved. */
11625 if (!HONOR_NANS (arg0)
11626 && !HONOR_SIGNED_ZEROS (arg0)
11627 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11628 && TREE_CODE (arg1) == COMPLEX_CST
11629 && real_zerop (TREE_REALPART (arg1)))
11631 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11632 if (real_onep (TREE_IMAGPART (arg1)))
11633 return
11634 fold_build2_loc (loc, COMPLEX_EXPR, type,
11635 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11636 rtype, arg0)),
11637 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11638 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11639 return
11640 fold_build2_loc (loc, COMPLEX_EXPR, type,
11641 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11642 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11643 rtype, arg0)));
11646 /* Optimize z * conj(z) for floating point complex numbers.
11647 Guarded by flag_unsafe_math_optimizations as non-finite
11648 imaginary components don't produce scalar results. */
11649 if (flag_unsafe_math_optimizations
11650 && TREE_CODE (arg0) == CONJ_EXPR
11651 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11652 return fold_mult_zconjz (loc, type, arg1);
11653 if (flag_unsafe_math_optimizations
11654 && TREE_CODE (arg1) == CONJ_EXPR
11655 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11656 return fold_mult_zconjz (loc, type, arg0);
11658 goto associate;
11660 case BIT_IOR_EXPR:
11661 /* Canonicalize (X & C1) | C2. */
11662 if (TREE_CODE (arg0) == BIT_AND_EXPR
11663 && TREE_CODE (arg1) == INTEGER_CST
11664 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11666 int width = TYPE_PRECISION (type), w;
11667 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
11668 wide_int c2 = wi::to_wide (arg1);
11670 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11671 if ((c1 & c2) == c1)
11672 return omit_one_operand_loc (loc, type, arg1,
11673 TREE_OPERAND (arg0, 0));
11675 wide_int msk = wi::mask (width, false,
11676 TYPE_PRECISION (TREE_TYPE (arg1)));
11678 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11679 if (wi::bit_and_not (msk, c1 | c2) == 0)
11681 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11682 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11685 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11686 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11687 mode which allows further optimizations. */
11688 c1 &= msk;
11689 c2 &= msk;
11690 wide_int c3 = wi::bit_and_not (c1, c2);
11691 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11693 wide_int mask = wi::mask (w, false,
11694 TYPE_PRECISION (type));
11695 if (((c1 | c2) & mask) == mask
11696 && wi::bit_and_not (c1, mask) == 0)
11698 c3 = mask;
11699 break;
11703 if (c3 != c1)
11705 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11706 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
11707 wide_int_to_tree (type, c3));
11708 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11712 /* See if this can be simplified into a rotate first. If that
11713 is unsuccessful continue in the association code. */
11714 goto bit_rotate;
11716 case BIT_XOR_EXPR:
11717 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11718 if (TREE_CODE (arg0) == BIT_AND_EXPR
11719 && INTEGRAL_TYPE_P (type)
11720 && integer_onep (TREE_OPERAND (arg0, 1))
11721 && integer_onep (arg1))
11722 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11723 build_zero_cst (TREE_TYPE (arg0)));
11725 /* See if this can be simplified into a rotate first. If that
11726 is unsuccessful continue in the association code. */
11727 goto bit_rotate;
11729 case BIT_AND_EXPR:
11730 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11731 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11732 && INTEGRAL_TYPE_P (type)
11733 && integer_onep (TREE_OPERAND (arg0, 1))
11734 && integer_onep (arg1))
11736 tree tem2;
11737 tem = TREE_OPERAND (arg0, 0);
11738 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11739 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11740 tem, tem2);
11741 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11742 build_zero_cst (TREE_TYPE (tem)));
11744 /* Fold ~X & 1 as (X & 1) == 0. */
11745 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11746 && INTEGRAL_TYPE_P (type)
11747 && integer_onep (arg1))
11749 tree tem2;
11750 tem = TREE_OPERAND (arg0, 0);
11751 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11752 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11753 tem, tem2);
11754 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11755 build_zero_cst (TREE_TYPE (tem)));
11757 /* Fold !X & 1 as X == 0. */
11758 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11759 && integer_onep (arg1))
11761 tem = TREE_OPERAND (arg0, 0);
11762 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11763 build_zero_cst (TREE_TYPE (tem)));
11766 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11767 multiple of 1 << CST. */
11768 if (TREE_CODE (arg1) == INTEGER_CST)
11770 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11771 wide_int ncst1 = -cst1;
11772 if ((cst1 & ncst1) == ncst1
11773 && multiple_of_p (type, arg0,
11774 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11775 return fold_convert_loc (loc, type, arg0);
11778 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11779 bits from CST2. */
11780 if (TREE_CODE (arg1) == INTEGER_CST
11781 && TREE_CODE (arg0) == MULT_EXPR
11782 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11784 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11785 wide_int masked
11786 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11788 if (masked == 0)
11789 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11790 arg0, arg1);
11791 else if (masked != warg1)
11793 /* Avoid the transform if arg1 is a mask of some
11794 mode which allows further optimizations. */
11795 int pop = wi::popcount (warg1);
11796 if (!(pop >= BITS_PER_UNIT
11797 && pow2p_hwi (pop)
11798 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11799 return fold_build2_loc (loc, code, type, op0,
11800 wide_int_to_tree (type, masked));
11804 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11805 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11806 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11808 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11810 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11811 if (mask == -1)
11812 return
11813 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11816 goto associate;
11818 case RDIV_EXPR:
11819 /* Don't touch a floating-point divide by zero unless the mode
11820 of the constant can represent infinity. */
11821 if (TREE_CODE (arg1) == REAL_CST
11822 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11823 && real_zerop (arg1))
11824 return NULL_TREE;
11826 /* (-A) / (-B) -> A / B */
11827 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11828 return fold_build2_loc (loc, RDIV_EXPR, type,
11829 TREE_OPERAND (arg0, 0),
11830 negate_expr (arg1));
11831 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11832 return fold_build2_loc (loc, RDIV_EXPR, type,
11833 negate_expr (arg0),
11834 TREE_OPERAND (arg1, 0));
11835 return NULL_TREE;
11837 case TRUNC_DIV_EXPR:
11838 /* Fall through */
11840 case FLOOR_DIV_EXPR:
11841 /* Simplify A / (B << N) where A and B are positive and B is
11842 a power of 2, to A >> (N + log2(B)). */
11843 strict_overflow_p = false;
11844 if (TREE_CODE (arg1) == LSHIFT_EXPR
11845 && (TYPE_UNSIGNED (type)
11846 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11848 tree sval = TREE_OPERAND (arg1, 0);
11849 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11851 tree sh_cnt = TREE_OPERAND (arg1, 1);
11852 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11853 wi::exact_log2 (wi::to_wide (sval)));
11855 if (strict_overflow_p)
11856 fold_overflow_warning (("assuming signed overflow does not "
11857 "occur when simplifying A / (B << N)"),
11858 WARN_STRICT_OVERFLOW_MISC);
11860 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11861 sh_cnt, pow2);
11862 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11863 fold_convert_loc (loc, type, arg0), sh_cnt);
11867 /* Fall through */
11869 case ROUND_DIV_EXPR:
11870 case CEIL_DIV_EXPR:
11871 case EXACT_DIV_EXPR:
11872 if (integer_zerop (arg1))
11873 return NULL_TREE;
11875 /* Convert -A / -B to A / B when the type is signed and overflow is
11876 undefined. */
11877 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11878 && TREE_CODE (op0) == NEGATE_EXPR
11879 && negate_expr_p (op1))
11881 if (ANY_INTEGRAL_TYPE_P (type))
11882 fold_overflow_warning (("assuming signed overflow does not occur "
11883 "when distributing negation across "
11884 "division"),
11885 WARN_STRICT_OVERFLOW_MISC);
11886 return fold_build2_loc (loc, code, type,
11887 fold_convert_loc (loc, type,
11888 TREE_OPERAND (arg0, 0)),
11889 negate_expr (op1));
11891 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11892 && TREE_CODE (arg1) == NEGATE_EXPR
11893 && negate_expr_p (op0))
11895 if (ANY_INTEGRAL_TYPE_P (type))
11896 fold_overflow_warning (("assuming signed overflow does not occur "
11897 "when distributing negation across "
11898 "division"),
11899 WARN_STRICT_OVERFLOW_MISC);
11900 return fold_build2_loc (loc, code, type,
11901 negate_expr (op0),
11902 fold_convert_loc (loc, type,
11903 TREE_OPERAND (arg1, 0)));
11906 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11907 operation, EXACT_DIV_EXPR.
11909 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11910 At one time others generated faster code, it's not clear if they do
11911 after the last round to changes to the DIV code in expmed.cc. */
11912 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11913 && multiple_of_p (type, arg0, arg1))
11914 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
11915 fold_convert (type, arg0),
11916 fold_convert (type, arg1));
11918 strict_overflow_p = false;
11919 if (TREE_CODE (arg1) == INTEGER_CST
11920 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11921 &strict_overflow_p)) != 0)
11923 if (strict_overflow_p)
11924 fold_overflow_warning (("assuming signed overflow does not occur "
11925 "when simplifying division"),
11926 WARN_STRICT_OVERFLOW_MISC);
11927 return fold_convert_loc (loc, type, tem);
11930 return NULL_TREE;
11932 case CEIL_MOD_EXPR:
11933 case FLOOR_MOD_EXPR:
11934 case ROUND_MOD_EXPR:
11935 case TRUNC_MOD_EXPR:
11936 strict_overflow_p = false;
11937 if (TREE_CODE (arg1) == INTEGER_CST
11938 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11939 &strict_overflow_p)) != 0)
11941 if (strict_overflow_p)
11942 fold_overflow_warning (("assuming signed overflow does not occur "
11943 "when simplifying modulus"),
11944 WARN_STRICT_OVERFLOW_MISC);
11945 return fold_convert_loc (loc, type, tem);
11948 return NULL_TREE;
11950 case LROTATE_EXPR:
11951 case RROTATE_EXPR:
11952 case RSHIFT_EXPR:
11953 case LSHIFT_EXPR:
11954 /* Since negative shift count is not well-defined,
11955 don't try to compute it in the compiler. */
11956 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11957 return NULL_TREE;
11959 prec = element_precision (type);
11961 /* If we have a rotate of a bit operation with the rotate count and
11962 the second operand of the bit operation both constant,
11963 permute the two operations. */
11964 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11965 && (TREE_CODE (arg0) == BIT_AND_EXPR
11966 || TREE_CODE (arg0) == BIT_IOR_EXPR
11967 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11968 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11970 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11971 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11972 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11973 fold_build2_loc (loc, code, type,
11974 arg00, arg1),
11975 fold_build2_loc (loc, code, type,
11976 arg01, arg1));
11979 /* Two consecutive rotates adding up to the some integer
11980 multiple of the precision of the type can be ignored. */
11981 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11982 && TREE_CODE (arg0) == RROTATE_EXPR
11983 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11984 && wi::umod_trunc (wi::to_wide (arg1)
11985 + wi::to_wide (TREE_OPERAND (arg0, 1)),
11986 prec) == 0)
11987 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11989 return NULL_TREE;
11991 case MIN_EXPR:
11992 case MAX_EXPR:
11993 goto associate;
11995 case TRUTH_ANDIF_EXPR:
11996 /* Note that the operands of this must be ints
11997 and their values must be 0 or 1.
11998 ("true" is a fixed value perhaps depending on the language.) */
11999 /* If first arg is constant zero, return it. */
12000 if (integer_zerop (arg0))
12001 return fold_convert_loc (loc, type, arg0);
12002 /* FALLTHRU */
12003 case TRUTH_AND_EXPR:
12004 /* If either arg is constant true, drop it. */
12005 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12006 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12007 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12008 /* Preserve sequence points. */
12009 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12010 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12011 /* If second arg is constant zero, result is zero, but first arg
12012 must be evaluated. */
12013 if (integer_zerop (arg1))
12014 return omit_one_operand_loc (loc, type, arg1, arg0);
12015 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12016 case will be handled here. */
12017 if (integer_zerop (arg0))
12018 return omit_one_operand_loc (loc, type, arg0, arg1);
12020 /* !X && X is always false. */
12021 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12022 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12023 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12024 /* X && !X is always false. */
12025 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12026 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12027 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12029 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12030 means A >= Y && A != MAX, but in this case we know that
12031 A < X <= MAX. */
12033 if (!TREE_SIDE_EFFECTS (arg0)
12034 && !TREE_SIDE_EFFECTS (arg1))
12036 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12037 if (tem && !operand_equal_p (tem, arg0, 0))
12038 return fold_convert (type,
12039 fold_build2_loc (loc, code, TREE_TYPE (arg1),
12040 tem, arg1));
12042 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12043 if (tem && !operand_equal_p (tem, arg1, 0))
12044 return fold_convert (type,
12045 fold_build2_loc (loc, code, TREE_TYPE (arg0),
12046 arg0, tem));
12049 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12050 != NULL_TREE)
12051 return tem;
12053 return NULL_TREE;
12055 case TRUTH_ORIF_EXPR:
12056 /* Note that the operands of this must be ints
12057 and their values must be 0 or true.
12058 ("true" is a fixed value perhaps depending on the language.) */
12059 /* If first arg is constant true, return it. */
12060 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12061 return fold_convert_loc (loc, type, arg0);
12062 /* FALLTHRU */
12063 case TRUTH_OR_EXPR:
12064 /* If either arg is constant zero, drop it. */
12065 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12066 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12067 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12068 /* Preserve sequence points. */
12069 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12070 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12071 /* If second arg is constant true, result is true, but we must
12072 evaluate first arg. */
12073 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12074 return omit_one_operand_loc (loc, type, arg1, arg0);
12075 /* Likewise for first arg, but note this only occurs here for
12076 TRUTH_OR_EXPR. */
12077 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12078 return omit_one_operand_loc (loc, type, arg0, arg1);
12080 /* !X || X is always true. */
12081 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12082 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12083 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12084 /* X || !X is always true. */
12085 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12086 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12087 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12089 /* (X && !Y) || (!X && Y) is X ^ Y */
12090 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12091 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12093 tree a0, a1, l0, l1, n0, n1;
12095 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12096 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12098 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12099 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12101 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12102 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12104 if ((operand_equal_p (n0, a0, 0)
12105 && operand_equal_p (n1, a1, 0))
12106 || (operand_equal_p (n0, a1, 0)
12107 && operand_equal_p (n1, a0, 0)))
12108 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12111 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12112 != NULL_TREE)
12113 return tem;
12115 return NULL_TREE;
12117 case TRUTH_XOR_EXPR:
12118 /* If the second arg is constant zero, drop it. */
12119 if (integer_zerop (arg1))
12120 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12121 /* If the second arg is constant true, this is a logical inversion. */
12122 if (integer_onep (arg1))
12124 tem = invert_truthvalue_loc (loc, arg0);
12125 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12127 /* Identical arguments cancel to zero. */
12128 if (operand_equal_p (arg0, arg1, 0))
12129 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12131 /* !X ^ X is always true. */
12132 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12133 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12134 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12136 /* X ^ !X is always true. */
12137 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12138 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12139 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12141 return NULL_TREE;
12143 case EQ_EXPR:
12144 case NE_EXPR:
12145 STRIP_NOPS (arg0);
12146 STRIP_NOPS (arg1);
12148 tem = fold_comparison (loc, code, type, op0, op1);
12149 if (tem != NULL_TREE)
12150 return tem;
12152 /* bool_var != 1 becomes !bool_var. */
12153 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12154 && code == NE_EXPR)
12155 return fold_convert_loc (loc, type,
12156 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12157 TREE_TYPE (arg0), arg0));
12159 /* bool_var == 0 becomes !bool_var. */
12160 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12161 && code == EQ_EXPR)
12162 return fold_convert_loc (loc, type,
12163 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12164 TREE_TYPE (arg0), arg0));
12166 /* !exp != 0 becomes !exp */
12167 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12168 && code == NE_EXPR)
12169 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12171 /* If this is an EQ or NE comparison with zero and ARG0 is
12172 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12173 two operations, but the latter can be done in one less insn
12174 on machines that have only two-operand insns or on which a
12175 constant cannot be the first operand. */
12176 if (TREE_CODE (arg0) == BIT_AND_EXPR
12177 && integer_zerop (arg1))
12179 tree arg00 = TREE_OPERAND (arg0, 0);
12180 tree arg01 = TREE_OPERAND (arg0, 1);
12181 if (TREE_CODE (arg00) == LSHIFT_EXPR
12182 && integer_onep (TREE_OPERAND (arg00, 0)))
12184 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12185 arg01, TREE_OPERAND (arg00, 1));
12186 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12187 build_one_cst (TREE_TYPE (arg0)));
12188 return fold_build2_loc (loc, code, type,
12189 fold_convert_loc (loc, TREE_TYPE (arg1),
12190 tem), arg1);
12192 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12193 && integer_onep (TREE_OPERAND (arg01, 0)))
12195 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12196 arg00, TREE_OPERAND (arg01, 1));
12197 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12198 build_one_cst (TREE_TYPE (arg0)));
12199 return fold_build2_loc (loc, code, type,
12200 fold_convert_loc (loc, TREE_TYPE (arg1),
12201 tem), arg1);
12205 /* If this is a comparison of a field, we may be able to simplify it. */
12206 if ((TREE_CODE (arg0) == COMPONENT_REF
12207 || TREE_CODE (arg0) == BIT_FIELD_REF)
12208 /* Handle the constant case even without -O
12209 to make sure the warnings are given. */
12210 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12212 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12213 if (t1)
12214 return t1;
12217 /* Optimize comparisons of strlen vs zero to a compare of the
12218 first character of the string vs zero. To wit,
12219 strlen(ptr) == 0 => *ptr == 0
12220 strlen(ptr) != 0 => *ptr != 0
12221 Other cases should reduce to one of these two (or a constant)
12222 due to the return value of strlen being unsigned. */
12223 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
12225 tree fndecl = get_callee_fndecl (arg0);
12227 if (fndecl
12228 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
12229 && call_expr_nargs (arg0) == 1
12230 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
12231 == POINTER_TYPE))
12233 tree ptrtype
12234 = build_pointer_type (build_qualified_type (char_type_node,
12235 TYPE_QUAL_CONST));
12236 tree ptr = fold_convert_loc (loc, ptrtype,
12237 CALL_EXPR_ARG (arg0, 0));
12238 tree iref = build_fold_indirect_ref_loc (loc, ptr);
12239 return fold_build2_loc (loc, code, type, iref,
12240 build_int_cst (TREE_TYPE (iref), 0));
12244 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12245 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12246 if (TREE_CODE (arg0) == RSHIFT_EXPR
12247 && integer_zerop (arg1)
12248 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12250 tree arg00 = TREE_OPERAND (arg0, 0);
12251 tree arg01 = TREE_OPERAND (arg0, 1);
12252 tree itype = TREE_TYPE (arg00);
12253 if (wi::to_wide (arg01) == element_precision (itype) - 1)
12255 if (TYPE_UNSIGNED (itype))
12257 itype = signed_type_for (itype);
12258 arg00 = fold_convert_loc (loc, itype, arg00);
12260 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12261 type, arg00, build_zero_cst (itype));
12265 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12266 (X & C) == 0 when C is a single bit. */
12267 if (TREE_CODE (arg0) == BIT_AND_EXPR
12268 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12269 && integer_zerop (arg1)
12270 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12272 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12273 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12274 TREE_OPERAND (arg0, 1));
12275 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12276 type, tem,
12277 fold_convert_loc (loc, TREE_TYPE (arg0),
12278 arg1));
12281 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12282 constant C is a power of two, i.e. a single bit. */
12283 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12284 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12285 && integer_zerop (arg1)
12286 && integer_pow2p (TREE_OPERAND (arg0, 1))
12287 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12288 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12290 tree arg00 = TREE_OPERAND (arg0, 0);
12291 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12292 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12295 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12296 when is C is a power of two, i.e. a single bit. */
12297 if (TREE_CODE (arg0) == BIT_AND_EXPR
12298 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12299 && integer_zerop (arg1)
12300 && integer_pow2p (TREE_OPERAND (arg0, 1))
12301 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12302 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12304 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12305 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12306 arg000, TREE_OPERAND (arg0, 1));
12307 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12308 tem, build_int_cst (TREE_TYPE (tem), 0));
12311 if (integer_zerop (arg1)
12312 && tree_expr_nonzero_p (arg0))
12314 tree res = constant_boolean_node (code==NE_EXPR, type);
12315 return omit_one_operand_loc (loc, type, res, arg0);
12318 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12319 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12321 tree arg00 = TREE_OPERAND (arg0, 0);
12322 tree arg01 = TREE_OPERAND (arg0, 1);
12323 tree arg10 = TREE_OPERAND (arg1, 0);
12324 tree arg11 = TREE_OPERAND (arg1, 1);
12325 tree itype = TREE_TYPE (arg0);
12327 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12328 operand_equal_p guarantees no side-effects so we don't need
12329 to use omit_one_operand on Z. */
12330 if (operand_equal_p (arg01, arg11, 0))
12331 return fold_build2_loc (loc, code, type, arg00,
12332 fold_convert_loc (loc, TREE_TYPE (arg00),
12333 arg10));
12334 if (operand_equal_p (arg01, arg10, 0))
12335 return fold_build2_loc (loc, code, type, arg00,
12336 fold_convert_loc (loc, TREE_TYPE (arg00),
12337 arg11));
12338 if (operand_equal_p (arg00, arg11, 0))
12339 return fold_build2_loc (loc, code, type, arg01,
12340 fold_convert_loc (loc, TREE_TYPE (arg01),
12341 arg10));
12342 if (operand_equal_p (arg00, arg10, 0))
12343 return fold_build2_loc (loc, code, type, arg01,
12344 fold_convert_loc (loc, TREE_TYPE (arg01),
12345 arg11));
12347 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12348 if (TREE_CODE (arg01) == INTEGER_CST
12349 && TREE_CODE (arg11) == INTEGER_CST)
12351 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12352 fold_convert_loc (loc, itype, arg11));
12353 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12354 return fold_build2_loc (loc, code, type, tem,
12355 fold_convert_loc (loc, itype, arg10));
12359 /* Attempt to simplify equality/inequality comparisons of complex
12360 values. Only lower the comparison if the result is known or
12361 can be simplified to a single scalar comparison. */
12362 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12363 || TREE_CODE (arg0) == COMPLEX_CST)
12364 && (TREE_CODE (arg1) == COMPLEX_EXPR
12365 || TREE_CODE (arg1) == COMPLEX_CST))
12367 tree real0, imag0, real1, imag1;
12368 tree rcond, icond;
12370 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12372 real0 = TREE_OPERAND (arg0, 0);
12373 imag0 = TREE_OPERAND (arg0, 1);
12375 else
12377 real0 = TREE_REALPART (arg0);
12378 imag0 = TREE_IMAGPART (arg0);
12381 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12383 real1 = TREE_OPERAND (arg1, 0);
12384 imag1 = TREE_OPERAND (arg1, 1);
12386 else
12388 real1 = TREE_REALPART (arg1);
12389 imag1 = TREE_IMAGPART (arg1);
12392 rcond = fold_binary_loc (loc, code, type, real0, real1);
12393 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12395 if (integer_zerop (rcond))
12397 if (code == EQ_EXPR)
12398 return omit_two_operands_loc (loc, type, boolean_false_node,
12399 imag0, imag1);
12400 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12402 else
12404 if (code == NE_EXPR)
12405 return omit_two_operands_loc (loc, type, boolean_true_node,
12406 imag0, imag1);
12407 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12411 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12412 if (icond && TREE_CODE (icond) == INTEGER_CST)
12414 if (integer_zerop (icond))
12416 if (code == EQ_EXPR)
12417 return omit_two_operands_loc (loc, type, boolean_false_node,
12418 real0, real1);
12419 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12421 else
12423 if (code == NE_EXPR)
12424 return omit_two_operands_loc (loc, type, boolean_true_node,
12425 real0, real1);
12426 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12431 return NULL_TREE;
12433 case LT_EXPR:
12434 case GT_EXPR:
12435 case LE_EXPR:
12436 case GE_EXPR:
12437 tem = fold_comparison (loc, code, type, op0, op1);
12438 if (tem != NULL_TREE)
12439 return tem;
12441 /* Transform comparisons of the form X +- C CMP X. */
12442 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12443 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12444 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12445 && !HONOR_SNANS (arg0))
12447 tree arg01 = TREE_OPERAND (arg0, 1);
12448 enum tree_code code0 = TREE_CODE (arg0);
12449 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12451 /* (X - c) > X becomes false. */
12452 if (code == GT_EXPR
12453 && ((code0 == MINUS_EXPR && is_positive >= 0)
12454 || (code0 == PLUS_EXPR && is_positive <= 0)))
12455 return constant_boolean_node (0, type);
12457 /* Likewise (X + c) < X becomes false. */
12458 if (code == LT_EXPR
12459 && ((code0 == PLUS_EXPR && is_positive >= 0)
12460 || (code0 == MINUS_EXPR && is_positive <= 0)))
12461 return constant_boolean_node (0, type);
12463 /* Convert (X - c) <= X to true. */
12464 if (!HONOR_NANS (arg1)
12465 && code == LE_EXPR
12466 && ((code0 == MINUS_EXPR && is_positive >= 0)
12467 || (code0 == PLUS_EXPR && is_positive <= 0)))
12468 return constant_boolean_node (1, type);
12470 /* Convert (X + c) >= X to true. */
12471 if (!HONOR_NANS (arg1)
12472 && code == GE_EXPR
12473 && ((code0 == PLUS_EXPR && is_positive >= 0)
12474 || (code0 == MINUS_EXPR && is_positive <= 0)))
12475 return constant_boolean_node (1, type);
12478 /* If we are comparing an ABS_EXPR with a constant, we can
12479 convert all the cases into explicit comparisons, but they may
12480 well not be faster than doing the ABS and one comparison.
12481 But ABS (X) <= C is a range comparison, which becomes a subtraction
12482 and a comparison, and is probably faster. */
12483 if (code == LE_EXPR
12484 && TREE_CODE (arg1) == INTEGER_CST
12485 && TREE_CODE (arg0) == ABS_EXPR
12486 && ! TREE_SIDE_EFFECTS (arg0)
12487 && (tem = negate_expr (arg1)) != 0
12488 && TREE_CODE (tem) == INTEGER_CST
12489 && !TREE_OVERFLOW (tem))
12490 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12491 build2 (GE_EXPR, type,
12492 TREE_OPERAND (arg0, 0), tem),
12493 build2 (LE_EXPR, type,
12494 TREE_OPERAND (arg0, 0), arg1));
12496 /* Convert ABS_EXPR<x> >= 0 to true. */
12497 strict_overflow_p = false;
12498 if (code == GE_EXPR
12499 && (integer_zerop (arg1)
12500 || (! HONOR_NANS (arg0)
12501 && real_zerop (arg1)))
12502 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12504 if (strict_overflow_p)
12505 fold_overflow_warning (("assuming signed overflow does not occur "
12506 "when simplifying comparison of "
12507 "absolute value and zero"),
12508 WARN_STRICT_OVERFLOW_CONDITIONAL);
12509 return omit_one_operand_loc (loc, type,
12510 constant_boolean_node (true, type),
12511 arg0);
12514 /* Convert ABS_EXPR<x> < 0 to false. */
12515 strict_overflow_p = false;
12516 if (code == LT_EXPR
12517 && (integer_zerop (arg1) || real_zerop (arg1))
12518 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12520 if (strict_overflow_p)
12521 fold_overflow_warning (("assuming signed overflow does not occur "
12522 "when simplifying comparison of "
12523 "absolute value and zero"),
12524 WARN_STRICT_OVERFLOW_CONDITIONAL);
12525 return omit_one_operand_loc (loc, type,
12526 constant_boolean_node (false, type),
12527 arg0);
12530 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12531 and similarly for >= into !=. */
12532 if ((code == LT_EXPR || code == GE_EXPR)
12533 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12534 && TREE_CODE (arg1) == LSHIFT_EXPR
12535 && integer_onep (TREE_OPERAND (arg1, 0)))
12536 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12537 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12538 TREE_OPERAND (arg1, 1)),
12539 build_zero_cst (TREE_TYPE (arg0)));
12541 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12542 otherwise Y might be >= # of bits in X's type and thus e.g.
12543 (unsigned char) (1 << Y) for Y 15 might be 0.
12544 If the cast is widening, then 1 << Y should have unsigned type,
12545 otherwise if Y is number of bits in the signed shift type minus 1,
12546 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12547 31 might be 0xffffffff80000000. */
12548 if ((code == LT_EXPR || code == GE_EXPR)
12549 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12550 || VECTOR_INTEGER_TYPE_P (TREE_TYPE (arg0)))
12551 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12552 && CONVERT_EXPR_P (arg1)
12553 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12554 && (element_precision (TREE_TYPE (arg1))
12555 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12556 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12557 || (element_precision (TREE_TYPE (arg1))
12558 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12559 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12561 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12562 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12563 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12564 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12565 build_zero_cst (TREE_TYPE (arg0)));
12568 return NULL_TREE;
12570 case UNORDERED_EXPR:
12571 case ORDERED_EXPR:
12572 case UNLT_EXPR:
12573 case UNLE_EXPR:
12574 case UNGT_EXPR:
12575 case UNGE_EXPR:
12576 case UNEQ_EXPR:
12577 case LTGT_EXPR:
12578 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12580 tree targ0 = strip_float_extensions (arg0);
12581 tree targ1 = strip_float_extensions (arg1);
12582 tree newtype = TREE_TYPE (targ0);
12584 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12585 newtype = TREE_TYPE (targ1);
12587 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12588 return fold_build2_loc (loc, code, type,
12589 fold_convert_loc (loc, newtype, targ0),
12590 fold_convert_loc (loc, newtype, targ1));
12593 return NULL_TREE;
12595 case COMPOUND_EXPR:
12596 /* When pedantic, a compound expression can be neither an lvalue
12597 nor an integer constant expression. */
12598 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12599 return NULL_TREE;
12600 /* Don't let (0, 0) be null pointer constant. */
12601 tem = integer_zerop (arg1) ? build1_loc (loc, NOP_EXPR, type, arg1)
12602 : fold_convert_loc (loc, type, arg1);
12603 return tem;
12605 case ASSERT_EXPR:
12606 /* An ASSERT_EXPR should never be passed to fold_binary. */
12607 gcc_unreachable ();
12609 default:
12610 return NULL_TREE;
12611 } /* switch (code) */
12614 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12615 ((A & N) + B) & M -> (A + B) & M
12616 Similarly if (N & M) == 0,
12617 ((A | N) + B) & M -> (A + B) & M
12618 and for - instead of + (or unary - instead of +)
12619 and/or ^ instead of |.
12620 If B is constant and (B & M) == 0, fold into A & M.
12622 This function is a helper for match.pd patterns. Return non-NULL
12623 type in which the simplified operation should be performed only
12624 if any optimization is possible.
12626 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12627 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12628 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12629 +/-. */
12630 tree
12631 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12632 tree arg00, enum tree_code code00, tree arg000, tree arg001,
12633 tree arg01, enum tree_code code01, tree arg010, tree arg011,
12634 tree *pmop)
12636 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12637 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12638 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12639 if (~cst1 == 0
12640 || (cst1 & (cst1 + 1)) != 0
12641 || !INTEGRAL_TYPE_P (type)
12642 || (!TYPE_OVERFLOW_WRAPS (type)
12643 && TREE_CODE (type) != INTEGER_TYPE)
12644 || (wi::max_value (type) & cst1) != cst1)
12645 return NULL_TREE;
12647 enum tree_code codes[2] = { code00, code01 };
12648 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12649 int which = 0;
12650 wide_int cst0;
12652 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12653 arg1 (M) is == (1LL << cst) - 1.
12654 Store C into PMOP[0] and D into PMOP[1]. */
12655 pmop[0] = arg00;
12656 pmop[1] = arg01;
12657 which = code != NEGATE_EXPR;
12659 for (; which >= 0; which--)
12660 switch (codes[which])
12662 case BIT_AND_EXPR:
12663 case BIT_IOR_EXPR:
12664 case BIT_XOR_EXPR:
12665 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12666 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12667 if (codes[which] == BIT_AND_EXPR)
12669 if (cst0 != cst1)
12670 break;
12672 else if (cst0 != 0)
12673 break;
12674 /* If C or D is of the form (A & N) where
12675 (N & M) == M, or of the form (A | N) or
12676 (A ^ N) where (N & M) == 0, replace it with A. */
12677 pmop[which] = arg0xx[2 * which];
12678 break;
12679 case ERROR_MARK:
12680 if (TREE_CODE (pmop[which]) != INTEGER_CST)
12681 break;
12682 /* If C or D is a N where (N & M) == 0, it can be
12683 omitted (replaced with 0). */
12684 if ((code == PLUS_EXPR
12685 || (code == MINUS_EXPR && which == 0))
12686 && (cst1 & wi::to_wide (pmop[which])) == 0)
12687 pmop[which] = build_int_cst (type, 0);
12688 /* Similarly, with C - N where (-N & M) == 0. */
12689 if (code == MINUS_EXPR
12690 && which == 1
12691 && (cst1 & -wi::to_wide (pmop[which])) == 0)
12692 pmop[which] = build_int_cst (type, 0);
12693 break;
12694 default:
12695 gcc_unreachable ();
12698 /* Only build anything new if we optimized one or both arguments above. */
12699 if (pmop[0] == arg00 && pmop[1] == arg01)
12700 return NULL_TREE;
12702 if (TYPE_OVERFLOW_WRAPS (type))
12703 return type;
12704 else
12705 return unsigned_type_for (type);
12708 /* Used by contains_label_[p1]. */
12710 struct contains_label_data
12712 hash_set<tree> *pset;
12713 bool inside_switch_p;
12716 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12717 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12718 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12720 static tree
12721 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12723 contains_label_data *d = (contains_label_data *) data;
12724 switch (TREE_CODE (*tp))
12726 case LABEL_EXPR:
12727 return *tp;
12729 case CASE_LABEL_EXPR:
12730 if (!d->inside_switch_p)
12731 return *tp;
12732 return NULL_TREE;
12734 case SWITCH_EXPR:
12735 if (!d->inside_switch_p)
12737 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12738 return *tp;
12739 d->inside_switch_p = true;
12740 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12741 return *tp;
12742 d->inside_switch_p = false;
12743 *walk_subtrees = 0;
12745 return NULL_TREE;
12747 case GOTO_EXPR:
12748 *walk_subtrees = 0;
12749 return NULL_TREE;
12751 default:
12752 return NULL_TREE;
12756 /* Return whether the sub-tree ST contains a label which is accessible from
12757 outside the sub-tree. */
12759 static bool
12760 contains_label_p (tree st)
12762 hash_set<tree> pset;
12763 contains_label_data data = { &pset, false };
12764 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12767 /* Fold a ternary expression of code CODE and type TYPE with operands
12768 OP0, OP1, and OP2. Return the folded expression if folding is
12769 successful. Otherwise, return NULL_TREE. */
12771 tree
12772 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12773 tree op0, tree op1, tree op2)
12775 tree tem;
12776 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12777 enum tree_code_class kind = TREE_CODE_CLASS (code);
12779 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12780 && TREE_CODE_LENGTH (code) == 3);
12782 /* If this is a commutative operation, and OP0 is a constant, move it
12783 to OP1 to reduce the number of tests below. */
12784 if (commutative_ternary_tree_code (code)
12785 && tree_swap_operands_p (op0, op1))
12786 return fold_build3_loc (loc, code, type, op1, op0, op2);
12788 tem = generic_simplify (loc, code, type, op0, op1, op2);
12789 if (tem)
12790 return tem;
12792 /* Strip any conversions that don't change the mode. This is safe
12793 for every expression, except for a comparison expression because
12794 its signedness is derived from its operands. So, in the latter
12795 case, only strip conversions that don't change the signedness.
12797 Note that this is done as an internal manipulation within the
12798 constant folder, in order to find the simplest representation of
12799 the arguments so that their form can be studied. In any cases,
12800 the appropriate type conversions should be put back in the tree
12801 that will get out of the constant folder. */
12802 if (op0)
12804 arg0 = op0;
12805 STRIP_NOPS (arg0);
12808 if (op1)
12810 arg1 = op1;
12811 STRIP_NOPS (arg1);
12814 if (op2)
12816 arg2 = op2;
12817 STRIP_NOPS (arg2);
12820 switch (code)
12822 case COMPONENT_REF:
12823 if (TREE_CODE (arg0) == CONSTRUCTOR
12824 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12826 unsigned HOST_WIDE_INT idx;
12827 tree field, value;
12828 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12829 if (field == arg1)
12830 return value;
12832 return NULL_TREE;
12834 case COND_EXPR:
12835 case VEC_COND_EXPR:
12836 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12837 so all simple results must be passed through pedantic_non_lvalue. */
12838 if (TREE_CODE (arg0) == INTEGER_CST)
12840 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12841 tem = integer_zerop (arg0) ? op2 : op1;
12842 /* Only optimize constant conditions when the selected branch
12843 has the same type as the COND_EXPR. This avoids optimizing
12844 away "c ? x : throw", where the throw has a void type.
12845 Avoid throwing away that operand which contains label. */
12846 if ((!TREE_SIDE_EFFECTS (unused_op)
12847 || !contains_label_p (unused_op))
12848 && (! VOID_TYPE_P (TREE_TYPE (tem))
12849 || VOID_TYPE_P (type)))
12850 return protected_set_expr_location_unshare (tem, loc);
12851 return NULL_TREE;
12853 else if (TREE_CODE (arg0) == VECTOR_CST)
12855 unsigned HOST_WIDE_INT nelts;
12856 if ((TREE_CODE (arg1) == VECTOR_CST
12857 || TREE_CODE (arg1) == CONSTRUCTOR)
12858 && (TREE_CODE (arg2) == VECTOR_CST
12859 || TREE_CODE (arg2) == CONSTRUCTOR)
12860 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12862 vec_perm_builder sel (nelts, nelts, 1);
12863 for (unsigned int i = 0; i < nelts; i++)
12865 tree val = VECTOR_CST_ELT (arg0, i);
12866 if (integer_all_onesp (val))
12867 sel.quick_push (i);
12868 else if (integer_zerop (val))
12869 sel.quick_push (nelts + i);
12870 else /* Currently unreachable. */
12871 return NULL_TREE;
12873 vec_perm_indices indices (sel, 2, nelts);
12874 tree t = fold_vec_perm (type, arg1, arg2, indices);
12875 if (t != NULL_TREE)
12876 return t;
12880 /* If we have A op B ? A : C, we may be able to convert this to a
12881 simpler expression, depending on the operation and the values
12882 of B and C. Signed zeros prevent all of these transformations,
12883 for reasons given above each one.
12885 Also try swapping the arguments and inverting the conditional. */
12886 if (COMPARISON_CLASS_P (arg0)
12887 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
12888 && !HONOR_SIGNED_ZEROS (op1))
12890 tem = fold_cond_expr_with_comparison (loc, type, TREE_CODE (arg0),
12891 TREE_OPERAND (arg0, 0),
12892 TREE_OPERAND (arg0, 1),
12893 op1, op2);
12894 if (tem)
12895 return tem;
12898 if (COMPARISON_CLASS_P (arg0)
12899 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
12900 && !HONOR_SIGNED_ZEROS (op2))
12902 enum tree_code comp_code = TREE_CODE (arg0);
12903 tree arg00 = TREE_OPERAND (arg0, 0);
12904 tree arg01 = TREE_OPERAND (arg0, 1);
12905 comp_code = invert_tree_comparison (comp_code, HONOR_NANS (arg00));
12906 if (comp_code != ERROR_MARK)
12907 tem = fold_cond_expr_with_comparison (loc, type, comp_code,
12908 arg00,
12909 arg01,
12910 op2, op1);
12911 if (tem)
12912 return tem;
12915 /* If the second operand is simpler than the third, swap them
12916 since that produces better jump optimization results. */
12917 if (truth_value_p (TREE_CODE (arg0))
12918 && tree_swap_operands_p (op1, op2))
12920 location_t loc0 = expr_location_or (arg0, loc);
12921 /* See if this can be inverted. If it can't, possibly because
12922 it was a floating-point inequality comparison, don't do
12923 anything. */
12924 tem = fold_invert_truthvalue (loc0, arg0);
12925 if (tem)
12926 return fold_build3_loc (loc, code, type, tem, op2, op1);
12929 /* Convert A ? 1 : 0 to simply A. */
12930 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12931 : (integer_onep (op1)
12932 && !VECTOR_TYPE_P (type)))
12933 && integer_zerop (op2)
12934 /* If we try to convert OP0 to our type, the
12935 call to fold will try to move the conversion inside
12936 a COND, which will recurse. In that case, the COND_EXPR
12937 is probably the best choice, so leave it alone. */
12938 && type == TREE_TYPE (arg0))
12939 return protected_set_expr_location_unshare (arg0, loc);
12941 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12942 over COND_EXPR in cases such as floating point comparisons. */
12943 if (integer_zerop (op1)
12944 && code == COND_EXPR
12945 && integer_onep (op2)
12946 && !VECTOR_TYPE_P (type)
12947 && truth_value_p (TREE_CODE (arg0)))
12948 return fold_convert_loc (loc, type,
12949 invert_truthvalue_loc (loc, arg0));
12951 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12952 if (TREE_CODE (arg0) == LT_EXPR
12953 && integer_zerop (TREE_OPERAND (arg0, 1))
12954 && integer_zerop (op2)
12955 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12957 /* sign_bit_p looks through both zero and sign extensions,
12958 but for this optimization only sign extensions are
12959 usable. */
12960 tree tem2 = TREE_OPERAND (arg0, 0);
12961 while (tem != tem2)
12963 if (TREE_CODE (tem2) != NOP_EXPR
12964 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12966 tem = NULL_TREE;
12967 break;
12969 tem2 = TREE_OPERAND (tem2, 0);
12971 /* sign_bit_p only checks ARG1 bits within A's precision.
12972 If <sign bit of A> has wider type than A, bits outside
12973 of A's precision in <sign bit of A> need to be checked.
12974 If they are all 0, this optimization needs to be done
12975 in unsigned A's type, if they are all 1 in signed A's type,
12976 otherwise this can't be done. */
12977 if (tem
12978 && TYPE_PRECISION (TREE_TYPE (tem))
12979 < TYPE_PRECISION (TREE_TYPE (arg1))
12980 && TYPE_PRECISION (TREE_TYPE (tem))
12981 < TYPE_PRECISION (type))
12983 int inner_width, outer_width;
12984 tree tem_type;
12986 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12987 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12988 if (outer_width > TYPE_PRECISION (type))
12989 outer_width = TYPE_PRECISION (type);
12991 wide_int mask = wi::shifted_mask
12992 (inner_width, outer_width - inner_width, false,
12993 TYPE_PRECISION (TREE_TYPE (arg1)));
12995 wide_int common = mask & wi::to_wide (arg1);
12996 if (common == mask)
12998 tem_type = signed_type_for (TREE_TYPE (tem));
12999 tem = fold_convert_loc (loc, tem_type, tem);
13001 else if (common == 0)
13003 tem_type = unsigned_type_for (TREE_TYPE (tem));
13004 tem = fold_convert_loc (loc, tem_type, tem);
13006 else
13007 tem = NULL;
13010 if (tem)
13011 return
13012 fold_convert_loc (loc, type,
13013 fold_build2_loc (loc, BIT_AND_EXPR,
13014 TREE_TYPE (tem), tem,
13015 fold_convert_loc (loc,
13016 TREE_TYPE (tem),
13017 arg1)));
13020 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13021 already handled above. */
13022 if (TREE_CODE (arg0) == BIT_AND_EXPR
13023 && integer_onep (TREE_OPERAND (arg0, 1))
13024 && integer_zerop (op2)
13025 && integer_pow2p (arg1))
13027 tree tem = TREE_OPERAND (arg0, 0);
13028 STRIP_NOPS (tem);
13029 if (TREE_CODE (tem) == RSHIFT_EXPR
13030 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13031 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
13032 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
13033 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13034 fold_convert_loc (loc, type,
13035 TREE_OPERAND (tem, 0)),
13036 op1);
13039 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13040 is probably obsolete because the first operand should be a
13041 truth value (that's why we have the two cases above), but let's
13042 leave it in until we can confirm this for all front-ends. */
13043 if (integer_zerop (op2)
13044 && TREE_CODE (arg0) == NE_EXPR
13045 && integer_zerop (TREE_OPERAND (arg0, 1))
13046 && integer_pow2p (arg1)
13047 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13048 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13049 arg1, OEP_ONLY_CONST)
13050 /* operand_equal_p compares just value, not precision, so e.g.
13051 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
13052 second operand 32-bit -128, which is not a power of two (or vice
13053 versa. */
13054 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
13055 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
13057 /* Disable the transformations below for vectors, since
13058 fold_binary_op_with_conditional_arg may undo them immediately,
13059 yielding an infinite loop. */
13060 if (code == VEC_COND_EXPR)
13061 return NULL_TREE;
13063 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13064 if (integer_zerop (op2)
13065 && truth_value_p (TREE_CODE (arg0))
13066 && truth_value_p (TREE_CODE (arg1))
13067 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13068 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13069 : TRUTH_ANDIF_EXPR,
13070 type, fold_convert_loc (loc, type, arg0), op1);
13072 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13073 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13074 && truth_value_p (TREE_CODE (arg0))
13075 && truth_value_p (TREE_CODE (arg1))
13076 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13078 location_t loc0 = expr_location_or (arg0, loc);
13079 /* Only perform transformation if ARG0 is easily inverted. */
13080 tem = fold_invert_truthvalue (loc0, arg0);
13081 if (tem)
13082 return fold_build2_loc (loc, code == VEC_COND_EXPR
13083 ? BIT_IOR_EXPR
13084 : TRUTH_ORIF_EXPR,
13085 type, fold_convert_loc (loc, type, tem),
13086 op1);
13089 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13090 if (integer_zerop (arg1)
13091 && truth_value_p (TREE_CODE (arg0))
13092 && truth_value_p (TREE_CODE (op2))
13093 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13095 location_t loc0 = expr_location_or (arg0, loc);
13096 /* Only perform transformation if ARG0 is easily inverted. */
13097 tem = fold_invert_truthvalue (loc0, arg0);
13098 if (tem)
13099 return fold_build2_loc (loc, code == VEC_COND_EXPR
13100 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13101 type, fold_convert_loc (loc, type, tem),
13102 op2);
13105 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13106 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13107 && truth_value_p (TREE_CODE (arg0))
13108 && truth_value_p (TREE_CODE (op2))
13109 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13110 return fold_build2_loc (loc, code == VEC_COND_EXPR
13111 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13112 type, fold_convert_loc (loc, type, arg0), op2);
13114 return NULL_TREE;
13116 case CALL_EXPR:
13117 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13118 of fold_ternary on them. */
13119 gcc_unreachable ();
13121 case BIT_FIELD_REF:
13122 if (TREE_CODE (arg0) == VECTOR_CST
13123 && (type == TREE_TYPE (TREE_TYPE (arg0))
13124 || (VECTOR_TYPE_P (type)
13125 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
13126 && tree_fits_uhwi_p (op1)
13127 && tree_fits_uhwi_p (op2))
13129 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13130 unsigned HOST_WIDE_INT width
13131 = (TREE_CODE (eltype) == BOOLEAN_TYPE
13132 ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
13133 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13134 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13136 if (n != 0
13137 && (idx % width) == 0
13138 && (n % width) == 0
13139 && known_le ((idx + n) / width,
13140 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
13142 idx = idx / width;
13143 n = n / width;
13145 if (TREE_CODE (arg0) == VECTOR_CST)
13147 if (n == 1)
13149 tem = VECTOR_CST_ELT (arg0, idx);
13150 if (VECTOR_TYPE_P (type))
13151 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
13152 return tem;
13155 tree_vector_builder vals (type, n, 1);
13156 for (unsigned i = 0; i < n; ++i)
13157 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
13158 return vals.build ();
13163 /* On constants we can use native encode/interpret to constant
13164 fold (nearly) all BIT_FIELD_REFs. */
13165 if (CONSTANT_CLASS_P (arg0)
13166 && can_native_interpret_type_p (type)
13167 && BITS_PER_UNIT == 8
13168 && tree_fits_uhwi_p (op1)
13169 && tree_fits_uhwi_p (op2))
13171 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13172 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13173 /* Limit us to a reasonable amount of work. To relax the
13174 other limitations we need bit-shifting of the buffer
13175 and rounding up the size. */
13176 if (bitpos % BITS_PER_UNIT == 0
13177 && bitsize % BITS_PER_UNIT == 0
13178 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
13180 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
13181 unsigned HOST_WIDE_INT len
13182 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
13183 bitpos / BITS_PER_UNIT);
13184 if (len > 0
13185 && len * BITS_PER_UNIT >= bitsize)
13187 tree v = native_interpret_expr (type, b,
13188 bitsize / BITS_PER_UNIT);
13189 if (v)
13190 return v;
13195 return NULL_TREE;
13197 case VEC_PERM_EXPR:
13198 /* Perform constant folding of BIT_INSERT_EXPR. */
13199 if (TREE_CODE (arg2) == VECTOR_CST
13200 && TREE_CODE (op0) == VECTOR_CST
13201 && TREE_CODE (op1) == VECTOR_CST)
13203 /* Build a vector of integers from the tree mask. */
13204 vec_perm_builder builder;
13205 if (!tree_to_vec_perm_builder (&builder, arg2))
13206 return NULL_TREE;
13208 /* Create a vec_perm_indices for the integer vector. */
13209 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
13210 bool single_arg = (op0 == op1);
13211 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
13212 return fold_vec_perm (type, op0, op1, sel);
13214 return NULL_TREE;
13216 case BIT_INSERT_EXPR:
13217 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13218 if (TREE_CODE (arg0) == INTEGER_CST
13219 && TREE_CODE (arg1) == INTEGER_CST)
13221 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13222 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
13223 wide_int tem = (wi::to_wide (arg0)
13224 & wi::shifted_mask (bitpos, bitsize, true,
13225 TYPE_PRECISION (type)));
13226 wide_int tem2
13227 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
13228 bitsize), bitpos);
13229 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
13231 else if (TREE_CODE (arg0) == VECTOR_CST
13232 && CONSTANT_CLASS_P (arg1)
13233 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
13234 TREE_TYPE (arg1)))
13236 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13237 unsigned HOST_WIDE_INT elsize
13238 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
13239 if (bitpos % elsize == 0)
13241 unsigned k = bitpos / elsize;
13242 unsigned HOST_WIDE_INT nelts;
13243 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
13244 return arg0;
13245 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
13247 tree_vector_builder elts (type, nelts, 1);
13248 elts.quick_grow (nelts);
13249 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
13250 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
13251 return elts.build ();
13255 return NULL_TREE;
13257 default:
13258 return NULL_TREE;
13259 } /* switch (code) */
13262 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13263 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13264 constructor element index of the value returned. If the element is
13265 not found NULL_TREE is returned and *CTOR_IDX is updated to
13266 the index of the element after the ACCESS_INDEX position (which
13267 may be outside of the CTOR array). */
13269 tree
13270 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
13271 unsigned *ctor_idx)
13273 tree index_type = NULL_TREE;
13274 signop index_sgn = UNSIGNED;
13275 offset_int low_bound = 0;
13277 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
13279 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
13280 if (domain_type && TYPE_MIN_VALUE (domain_type))
13282 /* Static constructors for variably sized objects makes no sense. */
13283 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
13284 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
13285 /* ??? When it is obvious that the range is signed, treat it so. */
13286 if (TYPE_UNSIGNED (index_type)
13287 && TYPE_MAX_VALUE (domain_type)
13288 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
13289 TYPE_MIN_VALUE (domain_type)))
13291 index_sgn = SIGNED;
13292 low_bound
13293 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
13294 SIGNED);
13296 else
13298 index_sgn = TYPE_SIGN (index_type);
13299 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
13304 if (index_type)
13305 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
13306 index_sgn);
13308 offset_int index = low_bound;
13309 if (index_type)
13310 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13312 offset_int max_index = index;
13313 unsigned cnt;
13314 tree cfield, cval;
13315 bool first_p = true;
13317 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
13319 /* Array constructor might explicitly set index, or specify a range,
13320 or leave index NULL meaning that it is next index after previous
13321 one. */
13322 if (cfield)
13324 if (TREE_CODE (cfield) == INTEGER_CST)
13325 max_index = index
13326 = offset_int::from (wi::to_wide (cfield), index_sgn);
13327 else
13329 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
13330 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
13331 index_sgn);
13332 max_index
13333 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
13334 index_sgn);
13335 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
13338 else if (!first_p)
13340 index = max_index + 1;
13341 if (index_type)
13342 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13343 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
13344 max_index = index;
13346 else
13347 first_p = false;
13349 /* Do we have match? */
13350 if (wi::cmp (access_index, index, index_sgn) >= 0)
13352 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
13354 if (ctor_idx)
13355 *ctor_idx = cnt;
13356 return cval;
13359 else if (in_gimple_form)
13360 /* We're past the element we search for. Note during parsing
13361 the elements might not be sorted.
13362 ??? We should use a binary search and a flag on the
13363 CONSTRUCTOR as to whether elements are sorted in declaration
13364 order. */
13365 break;
13367 if (ctor_idx)
13368 *ctor_idx = cnt;
13369 return NULL_TREE;
13372 /* Perform constant folding and related simplification of EXPR.
13373 The related simplifications include x*1 => x, x*0 => 0, etc.,
13374 and application of the associative law.
13375 NOP_EXPR conversions may be removed freely (as long as we
13376 are careful not to change the type of the overall expression).
13377 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13378 but we can constant-fold them if they have constant operands. */
13380 #ifdef ENABLE_FOLD_CHECKING
13381 # define fold(x) fold_1 (x)
13382 static tree fold_1 (tree);
13383 static
13384 #endif
13385 tree
13386 fold (tree expr)
13388 const tree t = expr;
13389 enum tree_code code = TREE_CODE (t);
13390 enum tree_code_class kind = TREE_CODE_CLASS (code);
13391 tree tem;
13392 location_t loc = EXPR_LOCATION (expr);
13394 /* Return right away if a constant. */
13395 if (kind == tcc_constant)
13396 return t;
13398 /* CALL_EXPR-like objects with variable numbers of operands are
13399 treated specially. */
13400 if (kind == tcc_vl_exp)
13402 if (code == CALL_EXPR)
13404 tem = fold_call_expr (loc, expr, false);
13405 return tem ? tem : expr;
13407 return expr;
13410 if (IS_EXPR_CODE_CLASS (kind))
13412 tree type = TREE_TYPE (t);
13413 tree op0, op1, op2;
13415 switch (TREE_CODE_LENGTH (code))
13417 case 1:
13418 op0 = TREE_OPERAND (t, 0);
13419 tem = fold_unary_loc (loc, code, type, op0);
13420 return tem ? tem : expr;
13421 case 2:
13422 op0 = TREE_OPERAND (t, 0);
13423 op1 = TREE_OPERAND (t, 1);
13424 tem = fold_binary_loc (loc, code, type, op0, op1);
13425 return tem ? tem : expr;
13426 case 3:
13427 op0 = TREE_OPERAND (t, 0);
13428 op1 = TREE_OPERAND (t, 1);
13429 op2 = TREE_OPERAND (t, 2);
13430 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13431 return tem ? tem : expr;
13432 default:
13433 break;
13437 switch (code)
13439 case ARRAY_REF:
13441 tree op0 = TREE_OPERAND (t, 0);
13442 tree op1 = TREE_OPERAND (t, 1);
13444 if (TREE_CODE (op1) == INTEGER_CST
13445 && TREE_CODE (op0) == CONSTRUCTOR
13446 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13448 tree val = get_array_ctor_element_at_index (op0,
13449 wi::to_offset (op1));
13450 if (val)
13451 return val;
13454 return t;
13457 /* Return a VECTOR_CST if possible. */
13458 case CONSTRUCTOR:
13460 tree type = TREE_TYPE (t);
13461 if (TREE_CODE (type) != VECTOR_TYPE)
13462 return t;
13464 unsigned i;
13465 tree val;
13466 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
13467 if (! CONSTANT_CLASS_P (val))
13468 return t;
13470 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
13473 case CONST_DECL:
13474 return fold (DECL_INITIAL (t));
13476 default:
13477 return t;
13478 } /* switch (code) */
13481 #ifdef ENABLE_FOLD_CHECKING
13482 #undef fold
13484 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13485 hash_table<nofree_ptr_hash<const tree_node> > *);
13486 static void fold_check_failed (const_tree, const_tree);
13487 void print_fold_checksum (const_tree);
13489 /* When --enable-checking=fold, compute a digest of expr before
13490 and after actual fold call to see if fold did not accidentally
13491 change original expr. */
13493 tree
13494 fold (tree expr)
13496 tree ret;
13497 struct md5_ctx ctx;
13498 unsigned char checksum_before[16], checksum_after[16];
13499 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13501 md5_init_ctx (&ctx);
13502 fold_checksum_tree (expr, &ctx, &ht);
13503 md5_finish_ctx (&ctx, checksum_before);
13504 ht.empty ();
13506 ret = fold_1 (expr);
13508 md5_init_ctx (&ctx);
13509 fold_checksum_tree (expr, &ctx, &ht);
13510 md5_finish_ctx (&ctx, checksum_after);
13512 if (memcmp (checksum_before, checksum_after, 16))
13513 fold_check_failed (expr, ret);
13515 return ret;
13518 void
13519 print_fold_checksum (const_tree expr)
13521 struct md5_ctx ctx;
13522 unsigned char checksum[16], cnt;
13523 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13525 md5_init_ctx (&ctx);
13526 fold_checksum_tree (expr, &ctx, &ht);
13527 md5_finish_ctx (&ctx, checksum);
13528 for (cnt = 0; cnt < 16; ++cnt)
13529 fprintf (stderr, "%02x", checksum[cnt]);
13530 putc ('\n', stderr);
13533 static void
13534 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13536 internal_error ("fold check: original tree changed by fold");
13539 static void
13540 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13541 hash_table<nofree_ptr_hash <const tree_node> > *ht)
13543 const tree_node **slot;
13544 enum tree_code code;
13545 union tree_node *buf;
13546 int i, len;
13548 recursive_label:
13549 if (expr == NULL)
13550 return;
13551 slot = ht->find_slot (expr, INSERT);
13552 if (*slot != NULL)
13553 return;
13554 *slot = expr;
13555 code = TREE_CODE (expr);
13556 if (TREE_CODE_CLASS (code) == tcc_declaration
13557 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13559 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13560 size_t sz = tree_size (expr);
13561 buf = XALLOCAVAR (union tree_node, sz);
13562 memcpy ((char *) buf, expr, sz);
13563 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13564 buf->decl_with_vis.symtab_node = NULL;
13565 buf->base.nowarning_flag = 0;
13566 expr = (tree) buf;
13568 else if (TREE_CODE_CLASS (code) == tcc_type
13569 && (TYPE_POINTER_TO (expr)
13570 || TYPE_REFERENCE_TO (expr)
13571 || TYPE_CACHED_VALUES_P (expr)
13572 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13573 || TYPE_NEXT_VARIANT (expr)
13574 || TYPE_ALIAS_SET_KNOWN_P (expr)))
13576 /* Allow these fields to be modified. */
13577 tree tmp;
13578 size_t sz = tree_size (expr);
13579 buf = XALLOCAVAR (union tree_node, sz);
13580 memcpy ((char *) buf, expr, sz);
13581 expr = tmp = (tree) buf;
13582 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13583 TYPE_POINTER_TO (tmp) = NULL;
13584 TYPE_REFERENCE_TO (tmp) = NULL;
13585 TYPE_NEXT_VARIANT (tmp) = NULL;
13586 TYPE_ALIAS_SET (tmp) = -1;
13587 if (TYPE_CACHED_VALUES_P (tmp))
13589 TYPE_CACHED_VALUES_P (tmp) = 0;
13590 TYPE_CACHED_VALUES (tmp) = NULL;
13593 else if (warning_suppressed_p (expr) && (DECL_P (expr) || EXPR_P (expr)))
13595 /* Allow the no-warning bit to be set. Perhaps we shouldn't allow
13596 that and change builtins.cc etc. instead - see PR89543. */
13597 size_t sz = tree_size (expr);
13598 buf = XALLOCAVAR (union tree_node, sz);
13599 memcpy ((char *) buf, expr, sz);
13600 buf->base.nowarning_flag = 0;
13601 expr = (tree) buf;
13603 md5_process_bytes (expr, tree_size (expr), ctx);
13604 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13605 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13606 if (TREE_CODE_CLASS (code) != tcc_type
13607 && TREE_CODE_CLASS (code) != tcc_declaration
13608 && code != TREE_LIST
13609 && code != SSA_NAME
13610 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13611 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13612 switch (TREE_CODE_CLASS (code))
13614 case tcc_constant:
13615 switch (code)
13617 case STRING_CST:
13618 md5_process_bytes (TREE_STRING_POINTER (expr),
13619 TREE_STRING_LENGTH (expr), ctx);
13620 break;
13621 case COMPLEX_CST:
13622 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13623 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13624 break;
13625 case VECTOR_CST:
13626 len = vector_cst_encoded_nelts (expr);
13627 for (i = 0; i < len; ++i)
13628 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13629 break;
13630 default:
13631 break;
13633 break;
13634 case tcc_exceptional:
13635 switch (code)
13637 case TREE_LIST:
13638 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13639 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13640 expr = TREE_CHAIN (expr);
13641 goto recursive_label;
13642 break;
13643 case TREE_VEC:
13644 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13645 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13646 break;
13647 default:
13648 break;
13650 break;
13651 case tcc_expression:
13652 case tcc_reference:
13653 case tcc_comparison:
13654 case tcc_unary:
13655 case tcc_binary:
13656 case tcc_statement:
13657 case tcc_vl_exp:
13658 len = TREE_OPERAND_LENGTH (expr);
13659 for (i = 0; i < len; ++i)
13660 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13661 break;
13662 case tcc_declaration:
13663 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13664 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13665 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13667 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13668 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13669 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13670 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13671 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13674 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13676 if (TREE_CODE (expr) == FUNCTION_DECL)
13678 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13679 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13681 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13683 break;
13684 case tcc_type:
13685 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13686 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13687 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13688 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13689 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13690 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13691 if (INTEGRAL_TYPE_P (expr)
13692 || SCALAR_FLOAT_TYPE_P (expr))
13694 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13695 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13697 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13698 if (TREE_CODE (expr) == RECORD_TYPE
13699 || TREE_CODE (expr) == UNION_TYPE
13700 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13701 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13702 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13703 break;
13704 default:
13705 break;
13709 /* Helper function for outputting the checksum of a tree T. When
13710 debugging with gdb, you can "define mynext" to be "next" followed
13711 by "call debug_fold_checksum (op0)", then just trace down till the
13712 outputs differ. */
13714 DEBUG_FUNCTION void
13715 debug_fold_checksum (const_tree t)
13717 int i;
13718 unsigned char checksum[16];
13719 struct md5_ctx ctx;
13720 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13722 md5_init_ctx (&ctx);
13723 fold_checksum_tree (t, &ctx, &ht);
13724 md5_finish_ctx (&ctx, checksum);
13725 ht.empty ();
13727 for (i = 0; i < 16; i++)
13728 fprintf (stderr, "%d ", checksum[i]);
13730 fprintf (stderr, "\n");
13733 #endif
13735 /* Fold a unary tree expression with code CODE of type TYPE with an
13736 operand OP0. LOC is the location of the resulting expression.
13737 Return a folded expression if successful. Otherwise, return a tree
13738 expression with code CODE of type TYPE with an operand OP0. */
13740 tree
13741 fold_build1_loc (location_t loc,
13742 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13744 tree tem;
13745 #ifdef ENABLE_FOLD_CHECKING
13746 unsigned char checksum_before[16], checksum_after[16];
13747 struct md5_ctx ctx;
13748 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13750 md5_init_ctx (&ctx);
13751 fold_checksum_tree (op0, &ctx, &ht);
13752 md5_finish_ctx (&ctx, checksum_before);
13753 ht.empty ();
13754 #endif
13756 tem = fold_unary_loc (loc, code, type, op0);
13757 if (!tem)
13758 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13760 #ifdef ENABLE_FOLD_CHECKING
13761 md5_init_ctx (&ctx);
13762 fold_checksum_tree (op0, &ctx, &ht);
13763 md5_finish_ctx (&ctx, checksum_after);
13765 if (memcmp (checksum_before, checksum_after, 16))
13766 fold_check_failed (op0, tem);
13767 #endif
13768 return tem;
13771 /* Fold a binary tree expression with code CODE of type TYPE with
13772 operands OP0 and OP1. LOC is the location of the resulting
13773 expression. Return a folded expression if successful. Otherwise,
13774 return a tree expression with code CODE of type TYPE with operands
13775 OP0 and OP1. */
13777 tree
13778 fold_build2_loc (location_t loc,
13779 enum tree_code code, tree type, tree op0, tree op1
13780 MEM_STAT_DECL)
13782 tree tem;
13783 #ifdef ENABLE_FOLD_CHECKING
13784 unsigned char checksum_before_op0[16],
13785 checksum_before_op1[16],
13786 checksum_after_op0[16],
13787 checksum_after_op1[16];
13788 struct md5_ctx ctx;
13789 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13791 md5_init_ctx (&ctx);
13792 fold_checksum_tree (op0, &ctx, &ht);
13793 md5_finish_ctx (&ctx, checksum_before_op0);
13794 ht.empty ();
13796 md5_init_ctx (&ctx);
13797 fold_checksum_tree (op1, &ctx, &ht);
13798 md5_finish_ctx (&ctx, checksum_before_op1);
13799 ht.empty ();
13800 #endif
13802 tem = fold_binary_loc (loc, code, type, op0, op1);
13803 if (!tem)
13804 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13806 #ifdef ENABLE_FOLD_CHECKING
13807 md5_init_ctx (&ctx);
13808 fold_checksum_tree (op0, &ctx, &ht);
13809 md5_finish_ctx (&ctx, checksum_after_op0);
13810 ht.empty ();
13812 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13813 fold_check_failed (op0, tem);
13815 md5_init_ctx (&ctx);
13816 fold_checksum_tree (op1, &ctx, &ht);
13817 md5_finish_ctx (&ctx, checksum_after_op1);
13819 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13820 fold_check_failed (op1, tem);
13821 #endif
13822 return tem;
13825 /* Fold a ternary tree expression with code CODE of type TYPE with
13826 operands OP0, OP1, and OP2. Return a folded expression if
13827 successful. Otherwise, return a tree expression with code CODE of
13828 type TYPE with operands OP0, OP1, and OP2. */
13830 tree
13831 fold_build3_loc (location_t loc, enum tree_code code, tree type,
13832 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13834 tree tem;
13835 #ifdef ENABLE_FOLD_CHECKING
13836 unsigned char checksum_before_op0[16],
13837 checksum_before_op1[16],
13838 checksum_before_op2[16],
13839 checksum_after_op0[16],
13840 checksum_after_op1[16],
13841 checksum_after_op2[16];
13842 struct md5_ctx ctx;
13843 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13845 md5_init_ctx (&ctx);
13846 fold_checksum_tree (op0, &ctx, &ht);
13847 md5_finish_ctx (&ctx, checksum_before_op0);
13848 ht.empty ();
13850 md5_init_ctx (&ctx);
13851 fold_checksum_tree (op1, &ctx, &ht);
13852 md5_finish_ctx (&ctx, checksum_before_op1);
13853 ht.empty ();
13855 md5_init_ctx (&ctx);
13856 fold_checksum_tree (op2, &ctx, &ht);
13857 md5_finish_ctx (&ctx, checksum_before_op2);
13858 ht.empty ();
13859 #endif
13861 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13862 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13863 if (!tem)
13864 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13866 #ifdef ENABLE_FOLD_CHECKING
13867 md5_init_ctx (&ctx);
13868 fold_checksum_tree (op0, &ctx, &ht);
13869 md5_finish_ctx (&ctx, checksum_after_op0);
13870 ht.empty ();
13872 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13873 fold_check_failed (op0, tem);
13875 md5_init_ctx (&ctx);
13876 fold_checksum_tree (op1, &ctx, &ht);
13877 md5_finish_ctx (&ctx, checksum_after_op1);
13878 ht.empty ();
13880 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13881 fold_check_failed (op1, tem);
13883 md5_init_ctx (&ctx);
13884 fold_checksum_tree (op2, &ctx, &ht);
13885 md5_finish_ctx (&ctx, checksum_after_op2);
13887 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13888 fold_check_failed (op2, tem);
13889 #endif
13890 return tem;
13893 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13894 arguments in ARGARRAY, and a null static chain.
13895 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13896 of type TYPE from the given operands as constructed by build_call_array. */
13898 tree
13899 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13900 int nargs, tree *argarray)
13902 tree tem;
13903 #ifdef ENABLE_FOLD_CHECKING
13904 unsigned char checksum_before_fn[16],
13905 checksum_before_arglist[16],
13906 checksum_after_fn[16],
13907 checksum_after_arglist[16];
13908 struct md5_ctx ctx;
13909 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13910 int i;
13912 md5_init_ctx (&ctx);
13913 fold_checksum_tree (fn, &ctx, &ht);
13914 md5_finish_ctx (&ctx, checksum_before_fn);
13915 ht.empty ();
13917 md5_init_ctx (&ctx);
13918 for (i = 0; i < nargs; i++)
13919 fold_checksum_tree (argarray[i], &ctx, &ht);
13920 md5_finish_ctx (&ctx, checksum_before_arglist);
13921 ht.empty ();
13922 #endif
13924 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13925 if (!tem)
13926 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13928 #ifdef ENABLE_FOLD_CHECKING
13929 md5_init_ctx (&ctx);
13930 fold_checksum_tree (fn, &ctx, &ht);
13931 md5_finish_ctx (&ctx, checksum_after_fn);
13932 ht.empty ();
13934 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13935 fold_check_failed (fn, tem);
13937 md5_init_ctx (&ctx);
13938 for (i = 0; i < nargs; i++)
13939 fold_checksum_tree (argarray[i], &ctx, &ht);
13940 md5_finish_ctx (&ctx, checksum_after_arglist);
13942 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13943 fold_check_failed (NULL_TREE, tem);
13944 #endif
13945 return tem;
13948 /* Perform constant folding and related simplification of initializer
13949 expression EXPR. These behave identically to "fold_buildN" but ignore
13950 potential run-time traps and exceptions that fold must preserve. */
13952 #define START_FOLD_INIT \
13953 int saved_signaling_nans = flag_signaling_nans;\
13954 int saved_trapping_math = flag_trapping_math;\
13955 int saved_rounding_math = flag_rounding_math;\
13956 int saved_trapv = flag_trapv;\
13957 int saved_folding_initializer = folding_initializer;\
13958 flag_signaling_nans = 0;\
13959 flag_trapping_math = 0;\
13960 flag_rounding_math = 0;\
13961 flag_trapv = 0;\
13962 folding_initializer = 1;
13964 #define END_FOLD_INIT \
13965 flag_signaling_nans = saved_signaling_nans;\
13966 flag_trapping_math = saved_trapping_math;\
13967 flag_rounding_math = saved_rounding_math;\
13968 flag_trapv = saved_trapv;\
13969 folding_initializer = saved_folding_initializer;
13971 tree
13972 fold_init (tree expr)
13974 tree result;
13975 START_FOLD_INIT;
13977 result = fold (expr);
13979 END_FOLD_INIT;
13980 return result;
13983 tree
13984 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13985 tree type, tree op)
13987 tree result;
13988 START_FOLD_INIT;
13990 result = fold_build1_loc (loc, code, type, op);
13992 END_FOLD_INIT;
13993 return result;
13996 tree
13997 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13998 tree type, tree op0, tree op1)
14000 tree result;
14001 START_FOLD_INIT;
14003 result = fold_build2_loc (loc, code, type, op0, op1);
14005 END_FOLD_INIT;
14006 return result;
14009 tree
14010 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14011 int nargs, tree *argarray)
14013 tree result;
14014 START_FOLD_INIT;
14016 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14018 END_FOLD_INIT;
14019 return result;
14022 tree
14023 fold_binary_initializer_loc (location_t loc, tree_code code, tree type,
14024 tree lhs, tree rhs)
14026 tree result;
14027 START_FOLD_INIT;
14029 result = fold_binary_loc (loc, code, type, lhs, rhs);
14031 END_FOLD_INIT;
14032 return result;
14035 #undef START_FOLD_INIT
14036 #undef END_FOLD_INIT
14038 /* Determine if first argument is a multiple of second argument. Return 0 if
14039 it is not, or we cannot easily determined it to be.
14041 An example of the sort of thing we care about (at this point; this routine
14042 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14043 fold cases do now) is discovering that
14045 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14047 is a multiple of
14049 SAVE_EXPR (J * 8)
14051 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14053 This code also handles discovering that
14055 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14057 is a multiple of 8 so we don't have to worry about dealing with a
14058 possible remainder.
14060 Note that we *look* inside a SAVE_EXPR only to determine how it was
14061 calculated; it is not safe for fold to do much of anything else with the
14062 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14063 at run time. For example, the latter example above *cannot* be implemented
14064 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14065 evaluation time of the original SAVE_EXPR is not necessarily the same at
14066 the time the new expression is evaluated. The only optimization of this
14067 sort that would be valid is changing
14069 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14071 divided by 8 to
14073 SAVE_EXPR (I) * SAVE_EXPR (J)
14075 (where the same SAVE_EXPR (J) is used in the original and the
14076 transformed version).
14078 NOWRAP specifies whether all outer operations in TYPE should
14079 be considered not wrapping. Any type conversion within TOP acts
14080 as a barrier and we will fall back to NOWRAP being false.
14081 NOWRAP is mostly used to treat expressions in TYPE_SIZE and friends
14082 as not wrapping even though they are generally using unsigned arithmetic. */
14085 multiple_of_p (tree type, const_tree top, const_tree bottom, bool nowrap)
14087 gimple *stmt;
14088 tree op1, op2;
14090 if (operand_equal_p (top, bottom, 0))
14091 return 1;
14093 if (TREE_CODE (type) != INTEGER_TYPE)
14094 return 0;
14096 switch (TREE_CODE (top))
14098 case BIT_AND_EXPR:
14099 /* Bitwise and provides a power of two multiple. If the mask is
14100 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14101 if (!integer_pow2p (bottom))
14102 return 0;
14103 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14104 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14106 case MULT_EXPR:
14107 /* If the multiplication can wrap we cannot recurse further unless
14108 the bottom is a power of two which is where wrapping does not
14109 matter. */
14110 if (!nowrap
14111 && !TYPE_OVERFLOW_UNDEFINED (type)
14112 && !integer_pow2p (bottom))
14113 return 0;
14114 if (TREE_CODE (bottom) == INTEGER_CST)
14116 op1 = TREE_OPERAND (top, 0);
14117 op2 = TREE_OPERAND (top, 1);
14118 if (TREE_CODE (op1) == INTEGER_CST)
14119 std::swap (op1, op2);
14120 if (TREE_CODE (op2) == INTEGER_CST)
14122 if (multiple_of_p (type, op2, bottom, nowrap))
14123 return 1;
14124 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
14125 if (multiple_of_p (type, bottom, op2, nowrap))
14127 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
14128 wi::to_widest (op2));
14129 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
14131 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
14132 return multiple_of_p (type, op1, op2, nowrap);
14135 return multiple_of_p (type, op1, bottom, nowrap);
14138 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14139 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14141 case LSHIFT_EXPR:
14142 /* Handle X << CST as X * (1 << CST) and only process the constant. */
14143 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14145 op1 = TREE_OPERAND (top, 1);
14146 if (wi::to_widest (op1) < TYPE_PRECISION (type))
14148 wide_int mul_op
14149 = wi::one (TYPE_PRECISION (type)) << wi::to_wide (op1);
14150 return multiple_of_p (type,
14151 wide_int_to_tree (type, mul_op), bottom,
14152 nowrap);
14155 return 0;
14157 case MINUS_EXPR:
14158 case PLUS_EXPR:
14159 /* If the addition or subtraction can wrap we cannot recurse further
14160 unless bottom is a power of two which is where wrapping does not
14161 matter. */
14162 if (!nowrap
14163 && !TYPE_OVERFLOW_UNDEFINED (type)
14164 && !integer_pow2p (bottom))
14165 return 0;
14167 /* Handle cases like op0 + 0xfffffffd as op0 - 3 if the expression has
14168 unsigned type. For example, (X / 3) + 0xfffffffd is multiple of 3,
14169 but 0xfffffffd is not. */
14170 op1 = TREE_OPERAND (top, 1);
14171 if (TREE_CODE (top) == PLUS_EXPR
14172 && nowrap
14173 && TYPE_UNSIGNED (type)
14174 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
14175 op1 = fold_build1 (NEGATE_EXPR, type, op1);
14177 /* It is impossible to prove if op0 +- op1 is multiple of bottom
14178 precisely, so be conservative here checking if both op0 and op1
14179 are multiple of bottom. Note we check the second operand first
14180 since it's usually simpler. */
14181 return (multiple_of_p (type, op1, bottom, nowrap)
14182 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14184 CASE_CONVERT:
14185 /* Can't handle conversions from non-integral or wider integral type. */
14186 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14187 || (TYPE_PRECISION (type)
14188 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14189 return 0;
14190 /* NOWRAP only extends to operations in the outermost type so
14191 make sure to strip it off here. */
14192 return multiple_of_p (TREE_TYPE (TREE_OPERAND (top, 0)),
14193 TREE_OPERAND (top, 0), bottom, false);
14195 case SAVE_EXPR:
14196 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap);
14198 case COND_EXPR:
14199 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14200 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom, nowrap));
14202 case INTEGER_CST:
14203 if (TREE_CODE (bottom) != INTEGER_CST || integer_zerop (bottom))
14204 return 0;
14205 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14206 SIGNED);
14208 case SSA_NAME:
14209 if (TREE_CODE (bottom) == INTEGER_CST
14210 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
14211 && gimple_code (stmt) == GIMPLE_ASSIGN)
14213 enum tree_code code = gimple_assign_rhs_code (stmt);
14215 /* Check for special cases to see if top is defined as multiple
14216 of bottom:
14218 top = (X & ~(bottom - 1) ; bottom is power of 2
14222 Y = X % bottom
14223 top = X - Y. */
14224 if (code == BIT_AND_EXPR
14225 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14226 && TREE_CODE (op2) == INTEGER_CST
14227 && integer_pow2p (bottom)
14228 && wi::multiple_of_p (wi::to_widest (op2),
14229 wi::to_widest (bottom), UNSIGNED))
14230 return 1;
14232 op1 = gimple_assign_rhs1 (stmt);
14233 if (code == MINUS_EXPR
14234 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14235 && TREE_CODE (op2) == SSA_NAME
14236 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
14237 && gimple_code (stmt) == GIMPLE_ASSIGN
14238 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
14239 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
14240 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
14241 return 1;
14244 /* fall through */
14246 default:
14247 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
14248 return multiple_p (wi::to_poly_widest (top),
14249 wi::to_poly_widest (bottom));
14251 return 0;
14255 /* Return true if expression X cannot be (or contain) a NaN or infinity.
14256 This function returns true for integer expressions, and returns
14257 false if uncertain. */
14259 bool
14260 tree_expr_finite_p (const_tree x)
14262 machine_mode mode = element_mode (x);
14263 if (!HONOR_NANS (mode) && !HONOR_INFINITIES (mode))
14264 return true;
14265 switch (TREE_CODE (x))
14267 case REAL_CST:
14268 return real_isfinite (TREE_REAL_CST_PTR (x));
14269 case COMPLEX_CST:
14270 return tree_expr_finite_p (TREE_REALPART (x))
14271 && tree_expr_finite_p (TREE_IMAGPART (x));
14272 case FLOAT_EXPR:
14273 return true;
14274 case ABS_EXPR:
14275 case CONVERT_EXPR:
14276 case NON_LVALUE_EXPR:
14277 case NEGATE_EXPR:
14278 case SAVE_EXPR:
14279 return tree_expr_finite_p (TREE_OPERAND (x, 0));
14280 case MIN_EXPR:
14281 case MAX_EXPR:
14282 return tree_expr_finite_p (TREE_OPERAND (x, 0))
14283 && tree_expr_finite_p (TREE_OPERAND (x, 1));
14284 case COND_EXPR:
14285 return tree_expr_finite_p (TREE_OPERAND (x, 1))
14286 && tree_expr_finite_p (TREE_OPERAND (x, 2));
14287 case CALL_EXPR:
14288 switch (get_call_combined_fn (x))
14290 CASE_CFN_FABS:
14291 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0));
14292 CASE_CFN_FMAX:
14293 CASE_CFN_FMIN:
14294 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0))
14295 && tree_expr_finite_p (CALL_EXPR_ARG (x, 1));
14296 default:
14297 return false;
14300 default:
14301 return false;
14305 /* Return true if expression X evaluates to an infinity.
14306 This function returns false for integer expressions. */
14308 bool
14309 tree_expr_infinite_p (const_tree x)
14311 if (!HONOR_INFINITIES (x))
14312 return false;
14313 switch (TREE_CODE (x))
14315 case REAL_CST:
14316 return real_isinf (TREE_REAL_CST_PTR (x));
14317 case ABS_EXPR:
14318 case NEGATE_EXPR:
14319 case NON_LVALUE_EXPR:
14320 case SAVE_EXPR:
14321 return tree_expr_infinite_p (TREE_OPERAND (x, 0));
14322 case COND_EXPR:
14323 return tree_expr_infinite_p (TREE_OPERAND (x, 1))
14324 && tree_expr_infinite_p (TREE_OPERAND (x, 2));
14325 default:
14326 return false;
14330 /* Return true if expression X could evaluate to an infinity.
14331 This function returns false for integer expressions, and returns
14332 true if uncertain. */
14334 bool
14335 tree_expr_maybe_infinite_p (const_tree x)
14337 if (!HONOR_INFINITIES (x))
14338 return false;
14339 switch (TREE_CODE (x))
14341 case REAL_CST:
14342 return real_isinf (TREE_REAL_CST_PTR (x));
14343 case FLOAT_EXPR:
14344 return false;
14345 case ABS_EXPR:
14346 case NEGATE_EXPR:
14347 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 0));
14348 case COND_EXPR:
14349 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 1))
14350 || tree_expr_maybe_infinite_p (TREE_OPERAND (x, 2));
14351 default:
14352 return true;
14356 /* Return true if expression X evaluates to a signaling NaN.
14357 This function returns false for integer expressions. */
14359 bool
14360 tree_expr_signaling_nan_p (const_tree x)
14362 if (!HONOR_SNANS (x))
14363 return false;
14364 switch (TREE_CODE (x))
14366 case REAL_CST:
14367 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14368 case NON_LVALUE_EXPR:
14369 case SAVE_EXPR:
14370 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 0));
14371 case COND_EXPR:
14372 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 1))
14373 && tree_expr_signaling_nan_p (TREE_OPERAND (x, 2));
14374 default:
14375 return false;
14379 /* Return true if expression X could evaluate to a signaling NaN.
14380 This function returns false for integer expressions, and returns
14381 true if uncertain. */
14383 bool
14384 tree_expr_maybe_signaling_nan_p (const_tree x)
14386 if (!HONOR_SNANS (x))
14387 return false;
14388 switch (TREE_CODE (x))
14390 case REAL_CST:
14391 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14392 case FLOAT_EXPR:
14393 return false;
14394 case ABS_EXPR:
14395 case CONVERT_EXPR:
14396 case NEGATE_EXPR:
14397 case NON_LVALUE_EXPR:
14398 case SAVE_EXPR:
14399 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0));
14400 case MIN_EXPR:
14401 case MAX_EXPR:
14402 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0))
14403 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1));
14404 case COND_EXPR:
14405 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1))
14406 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 2));
14407 case CALL_EXPR:
14408 switch (get_call_combined_fn (x))
14410 CASE_CFN_FABS:
14411 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0));
14412 CASE_CFN_FMAX:
14413 CASE_CFN_FMIN:
14414 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0))
14415 || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 1));
14416 default:
14417 return true;
14419 default:
14420 return true;
14424 /* Return true if expression X evaluates to a NaN.
14425 This function returns false for integer expressions. */
14427 bool
14428 tree_expr_nan_p (const_tree x)
14430 if (!HONOR_NANS (x))
14431 return false;
14432 switch (TREE_CODE (x))
14434 case REAL_CST:
14435 return real_isnan (TREE_REAL_CST_PTR (x));
14436 case NON_LVALUE_EXPR:
14437 case SAVE_EXPR:
14438 return tree_expr_nan_p (TREE_OPERAND (x, 0));
14439 case COND_EXPR:
14440 return tree_expr_nan_p (TREE_OPERAND (x, 1))
14441 && tree_expr_nan_p (TREE_OPERAND (x, 2));
14442 default:
14443 return false;
14447 /* Return true if expression X could evaluate to a NaN.
14448 This function returns false for integer expressions, and returns
14449 true if uncertain. */
14451 bool
14452 tree_expr_maybe_nan_p (const_tree x)
14454 if (!HONOR_NANS (x))
14455 return false;
14456 switch (TREE_CODE (x))
14458 case REAL_CST:
14459 return real_isnan (TREE_REAL_CST_PTR (x));
14460 case FLOAT_EXPR:
14461 return false;
14462 case PLUS_EXPR:
14463 case MINUS_EXPR:
14464 case MULT_EXPR:
14465 return !tree_expr_finite_p (TREE_OPERAND (x, 0))
14466 || !tree_expr_finite_p (TREE_OPERAND (x, 1));
14467 case ABS_EXPR:
14468 case CONVERT_EXPR:
14469 case NEGATE_EXPR:
14470 case NON_LVALUE_EXPR:
14471 case SAVE_EXPR:
14472 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0));
14473 case MIN_EXPR:
14474 case MAX_EXPR:
14475 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0))
14476 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 1));
14477 case COND_EXPR:
14478 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 1))
14479 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 2));
14480 case CALL_EXPR:
14481 switch (get_call_combined_fn (x))
14483 CASE_CFN_FABS:
14484 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0));
14485 CASE_CFN_FMAX:
14486 CASE_CFN_FMIN:
14487 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0))
14488 || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 1));
14489 default:
14490 return true;
14492 default:
14493 return true;
14497 /* Return true if expression X could evaluate to -0.0.
14498 This function returns true if uncertain. */
14500 bool
14501 tree_expr_maybe_real_minus_zero_p (const_tree x)
14503 if (!HONOR_SIGNED_ZEROS (x))
14504 return false;
14505 switch (TREE_CODE (x))
14507 case REAL_CST:
14508 return REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (x));
14509 case INTEGER_CST:
14510 case FLOAT_EXPR:
14511 case ABS_EXPR:
14512 return false;
14513 case NON_LVALUE_EXPR:
14514 case SAVE_EXPR:
14515 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 0));
14516 case COND_EXPR:
14517 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 1))
14518 || tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 2));
14519 case CALL_EXPR:
14520 switch (get_call_combined_fn (x))
14522 CASE_CFN_FABS:
14523 return false;
14524 default:
14525 break;
14527 default:
14528 break;
14530 /* Ideally !(tree_expr_nonzero_p (X) || tree_expr_nonnegative_p (X))
14531 * but currently those predicates require tree and not const_tree. */
14532 return true;
14535 #define tree_expr_nonnegative_warnv_p(X, Y) \
14536 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14538 #define RECURSE(X) \
14539 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14541 /* Return true if CODE or TYPE is known to be non-negative. */
14543 static bool
14544 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14546 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14547 && truth_value_p (code))
14548 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14549 have a signed:1 type (where the value is -1 and 0). */
14550 return true;
14551 return false;
14554 /* Return true if (CODE OP0) is known to be non-negative. If the return
14555 value is based on the assumption that signed overflow is undefined,
14556 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14557 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14559 bool
14560 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14561 bool *strict_overflow_p, int depth)
14563 if (TYPE_UNSIGNED (type))
14564 return true;
14566 switch (code)
14568 case ABS_EXPR:
14569 /* We can't return 1 if flag_wrapv is set because
14570 ABS_EXPR<INT_MIN> = INT_MIN. */
14571 if (!ANY_INTEGRAL_TYPE_P (type))
14572 return true;
14573 if (TYPE_OVERFLOW_UNDEFINED (type))
14575 *strict_overflow_p = true;
14576 return true;
14578 break;
14580 case NON_LVALUE_EXPR:
14581 case FLOAT_EXPR:
14582 case FIX_TRUNC_EXPR:
14583 return RECURSE (op0);
14585 CASE_CONVERT:
14587 tree inner_type = TREE_TYPE (op0);
14588 tree outer_type = type;
14590 if (TREE_CODE (outer_type) == REAL_TYPE)
14592 if (TREE_CODE (inner_type) == REAL_TYPE)
14593 return RECURSE (op0);
14594 if (INTEGRAL_TYPE_P (inner_type))
14596 if (TYPE_UNSIGNED (inner_type))
14597 return true;
14598 return RECURSE (op0);
14601 else if (INTEGRAL_TYPE_P (outer_type))
14603 if (TREE_CODE (inner_type) == REAL_TYPE)
14604 return RECURSE (op0);
14605 if (INTEGRAL_TYPE_P (inner_type))
14606 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14607 && TYPE_UNSIGNED (inner_type);
14610 break;
14612 default:
14613 return tree_simple_nonnegative_warnv_p (code, type);
14616 /* We don't know sign of `t', so be conservative and return false. */
14617 return false;
14620 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14621 value is based on the assumption that signed overflow is undefined,
14622 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14623 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14625 bool
14626 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14627 tree op1, bool *strict_overflow_p,
14628 int depth)
14630 if (TYPE_UNSIGNED (type))
14631 return true;
14633 switch (code)
14635 case POINTER_PLUS_EXPR:
14636 case PLUS_EXPR:
14637 if (FLOAT_TYPE_P (type))
14638 return RECURSE (op0) && RECURSE (op1);
14640 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14641 both unsigned and at least 2 bits shorter than the result. */
14642 if (TREE_CODE (type) == INTEGER_TYPE
14643 && TREE_CODE (op0) == NOP_EXPR
14644 && TREE_CODE (op1) == NOP_EXPR)
14646 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14647 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14648 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14649 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14651 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14652 TYPE_PRECISION (inner2)) + 1;
14653 return prec < TYPE_PRECISION (type);
14656 break;
14658 case MULT_EXPR:
14659 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14661 /* x * x is always non-negative for floating point x
14662 or without overflow. */
14663 if (operand_equal_p (op0, op1, 0)
14664 || (RECURSE (op0) && RECURSE (op1)))
14666 if (ANY_INTEGRAL_TYPE_P (type)
14667 && TYPE_OVERFLOW_UNDEFINED (type))
14668 *strict_overflow_p = true;
14669 return true;
14673 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14674 both unsigned and their total bits is shorter than the result. */
14675 if (TREE_CODE (type) == INTEGER_TYPE
14676 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14677 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14679 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14680 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14681 : TREE_TYPE (op0);
14682 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14683 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14684 : TREE_TYPE (op1);
14686 bool unsigned0 = TYPE_UNSIGNED (inner0);
14687 bool unsigned1 = TYPE_UNSIGNED (inner1);
14689 if (TREE_CODE (op0) == INTEGER_CST)
14690 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14692 if (TREE_CODE (op1) == INTEGER_CST)
14693 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14695 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14696 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14698 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14699 ? tree_int_cst_min_precision (op0, UNSIGNED)
14700 : TYPE_PRECISION (inner0);
14702 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14703 ? tree_int_cst_min_precision (op1, UNSIGNED)
14704 : TYPE_PRECISION (inner1);
14706 return precision0 + precision1 < TYPE_PRECISION (type);
14709 return false;
14711 case BIT_AND_EXPR:
14712 return RECURSE (op0) || RECURSE (op1);
14714 case MAX_EXPR:
14715 /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
14716 things. */
14717 if (tree_expr_maybe_nan_p (op0) || tree_expr_maybe_nan_p (op1))
14718 return RECURSE (op0) && RECURSE (op1);
14719 return RECURSE (op0) || RECURSE (op1);
14721 case BIT_IOR_EXPR:
14722 case BIT_XOR_EXPR:
14723 case MIN_EXPR:
14724 case RDIV_EXPR:
14725 case TRUNC_DIV_EXPR:
14726 case CEIL_DIV_EXPR:
14727 case FLOOR_DIV_EXPR:
14728 case ROUND_DIV_EXPR:
14729 return RECURSE (op0) && RECURSE (op1);
14731 case TRUNC_MOD_EXPR:
14732 return RECURSE (op0);
14734 case FLOOR_MOD_EXPR:
14735 return RECURSE (op1);
14737 case CEIL_MOD_EXPR:
14738 case ROUND_MOD_EXPR:
14739 default:
14740 return tree_simple_nonnegative_warnv_p (code, type);
14743 /* We don't know sign of `t', so be conservative and return false. */
14744 return false;
14747 /* Return true if T is known to be non-negative. If the return
14748 value is based on the assumption that signed overflow is undefined,
14749 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14750 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14752 bool
14753 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14755 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14756 return true;
14758 switch (TREE_CODE (t))
14760 case INTEGER_CST:
14761 return tree_int_cst_sgn (t) >= 0;
14763 case REAL_CST:
14764 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14766 case FIXED_CST:
14767 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14769 case COND_EXPR:
14770 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14772 case SSA_NAME:
14773 /* Limit the depth of recursion to avoid quadratic behavior.
14774 This is expected to catch almost all occurrences in practice.
14775 If this code misses important cases that unbounded recursion
14776 would not, passes that need this information could be revised
14777 to provide it through dataflow propagation. */
14778 return (!name_registered_for_update_p (t)
14779 && depth < param_max_ssa_name_query_depth
14780 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
14781 strict_overflow_p, depth));
14783 default:
14784 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14788 /* Return true if T is known to be non-negative. If the return
14789 value is based on the assumption that signed overflow is undefined,
14790 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14791 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14793 bool
14794 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
14795 bool *strict_overflow_p, int depth)
14797 switch (fn)
14799 CASE_CFN_ACOS:
14800 CASE_CFN_ACOSH:
14801 CASE_CFN_CABS:
14802 CASE_CFN_COSH:
14803 CASE_CFN_ERFC:
14804 CASE_CFN_EXP:
14805 CASE_CFN_EXP10:
14806 CASE_CFN_EXP2:
14807 CASE_CFN_FABS:
14808 CASE_CFN_FDIM:
14809 CASE_CFN_HYPOT:
14810 CASE_CFN_POW10:
14811 CASE_CFN_FFS:
14812 CASE_CFN_PARITY:
14813 CASE_CFN_POPCOUNT:
14814 CASE_CFN_CLZ:
14815 CASE_CFN_CLRSB:
14816 case CFN_BUILT_IN_BSWAP16:
14817 case CFN_BUILT_IN_BSWAP32:
14818 case CFN_BUILT_IN_BSWAP64:
14819 case CFN_BUILT_IN_BSWAP128:
14820 /* Always true. */
14821 return true;
14823 CASE_CFN_SQRT:
14824 CASE_CFN_SQRT_FN:
14825 /* sqrt(-0.0) is -0.0. */
14826 if (!HONOR_SIGNED_ZEROS (type))
14827 return true;
14828 return RECURSE (arg0);
14830 CASE_CFN_ASINH:
14831 CASE_CFN_ATAN:
14832 CASE_CFN_ATANH:
14833 CASE_CFN_CBRT:
14834 CASE_CFN_CEIL:
14835 CASE_CFN_CEIL_FN:
14836 CASE_CFN_ERF:
14837 CASE_CFN_EXPM1:
14838 CASE_CFN_FLOOR:
14839 CASE_CFN_FLOOR_FN:
14840 CASE_CFN_FMOD:
14841 CASE_CFN_FREXP:
14842 CASE_CFN_ICEIL:
14843 CASE_CFN_IFLOOR:
14844 CASE_CFN_IRINT:
14845 CASE_CFN_IROUND:
14846 CASE_CFN_LCEIL:
14847 CASE_CFN_LDEXP:
14848 CASE_CFN_LFLOOR:
14849 CASE_CFN_LLCEIL:
14850 CASE_CFN_LLFLOOR:
14851 CASE_CFN_LLRINT:
14852 CASE_CFN_LLROUND:
14853 CASE_CFN_LRINT:
14854 CASE_CFN_LROUND:
14855 CASE_CFN_MODF:
14856 CASE_CFN_NEARBYINT:
14857 CASE_CFN_NEARBYINT_FN:
14858 CASE_CFN_RINT:
14859 CASE_CFN_RINT_FN:
14860 CASE_CFN_ROUND:
14861 CASE_CFN_ROUND_FN:
14862 CASE_CFN_ROUNDEVEN:
14863 CASE_CFN_ROUNDEVEN_FN:
14864 CASE_CFN_SCALB:
14865 CASE_CFN_SCALBLN:
14866 CASE_CFN_SCALBN:
14867 CASE_CFN_SIGNBIT:
14868 CASE_CFN_SIGNIFICAND:
14869 CASE_CFN_SINH:
14870 CASE_CFN_TANH:
14871 CASE_CFN_TRUNC:
14872 CASE_CFN_TRUNC_FN:
14873 /* True if the 1st argument is nonnegative. */
14874 return RECURSE (arg0);
14876 CASE_CFN_FMAX:
14877 CASE_CFN_FMAX_FN:
14878 /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
14879 things. In the presence of sNaNs, we're only guaranteed to be
14880 non-negative if both operands are non-negative. In the presence
14881 of qNaNs, we're non-negative if either operand is non-negative
14882 and can't be a qNaN, or if both operands are non-negative. */
14883 if (tree_expr_maybe_signaling_nan_p (arg0) ||
14884 tree_expr_maybe_signaling_nan_p (arg1))
14885 return RECURSE (arg0) && RECURSE (arg1);
14886 return RECURSE (arg0) ? (!tree_expr_maybe_nan_p (arg0)
14887 || RECURSE (arg1))
14888 : (RECURSE (arg1)
14889 && !tree_expr_maybe_nan_p (arg1));
14891 CASE_CFN_FMIN:
14892 CASE_CFN_FMIN_FN:
14893 /* True if the 1st AND 2nd arguments are nonnegative. */
14894 return RECURSE (arg0) && RECURSE (arg1);
14896 CASE_CFN_COPYSIGN:
14897 CASE_CFN_COPYSIGN_FN:
14898 /* True if the 2nd argument is nonnegative. */
14899 return RECURSE (arg1);
14901 CASE_CFN_POWI:
14902 /* True if the 1st argument is nonnegative or the second
14903 argument is an even integer. */
14904 if (TREE_CODE (arg1) == INTEGER_CST
14905 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14906 return true;
14907 return RECURSE (arg0);
14909 CASE_CFN_POW:
14910 /* True if the 1st argument is nonnegative or the second
14911 argument is an even integer valued real. */
14912 if (TREE_CODE (arg1) == REAL_CST)
14914 REAL_VALUE_TYPE c;
14915 HOST_WIDE_INT n;
14917 c = TREE_REAL_CST (arg1);
14918 n = real_to_integer (&c);
14919 if ((n & 1) == 0)
14921 REAL_VALUE_TYPE cint;
14922 real_from_integer (&cint, VOIDmode, n, SIGNED);
14923 if (real_identical (&c, &cint))
14924 return true;
14927 return RECURSE (arg0);
14929 default:
14930 break;
14932 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
14935 /* Return true if T is known to be non-negative. If the return
14936 value is based on the assumption that signed overflow is undefined,
14937 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14938 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14940 static bool
14941 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14943 enum tree_code code = TREE_CODE (t);
14944 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14945 return true;
14947 switch (code)
14949 case TARGET_EXPR:
14951 tree temp = TARGET_EXPR_SLOT (t);
14952 t = TARGET_EXPR_INITIAL (t);
14954 /* If the initializer is non-void, then it's a normal expression
14955 that will be assigned to the slot. */
14956 if (!VOID_TYPE_P (t))
14957 return RECURSE (t);
14959 /* Otherwise, the initializer sets the slot in some way. One common
14960 way is an assignment statement at the end of the initializer. */
14961 while (1)
14963 if (TREE_CODE (t) == BIND_EXPR)
14964 t = expr_last (BIND_EXPR_BODY (t));
14965 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14966 || TREE_CODE (t) == TRY_CATCH_EXPR)
14967 t = expr_last (TREE_OPERAND (t, 0));
14968 else if (TREE_CODE (t) == STATEMENT_LIST)
14969 t = expr_last (t);
14970 else
14971 break;
14973 if (TREE_CODE (t) == MODIFY_EXPR
14974 && TREE_OPERAND (t, 0) == temp)
14975 return RECURSE (TREE_OPERAND (t, 1));
14977 return false;
14980 case CALL_EXPR:
14982 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14983 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14985 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14986 get_call_combined_fn (t),
14987 arg0,
14988 arg1,
14989 strict_overflow_p, depth);
14991 case COMPOUND_EXPR:
14992 case MODIFY_EXPR:
14993 return RECURSE (TREE_OPERAND (t, 1));
14995 case BIND_EXPR:
14996 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
14998 case SAVE_EXPR:
14999 return RECURSE (TREE_OPERAND (t, 0));
15001 default:
15002 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
15006 #undef RECURSE
15007 #undef tree_expr_nonnegative_warnv_p
15009 /* Return true if T is known to be non-negative. If the return
15010 value is based on the assumption that signed overflow is undefined,
15011 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15012 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15014 bool
15015 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15017 enum tree_code code;
15018 if (t == error_mark_node)
15019 return false;
15021 code = TREE_CODE (t);
15022 switch (TREE_CODE_CLASS (code))
15024 case tcc_binary:
15025 case tcc_comparison:
15026 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15027 TREE_TYPE (t),
15028 TREE_OPERAND (t, 0),
15029 TREE_OPERAND (t, 1),
15030 strict_overflow_p, depth);
15032 case tcc_unary:
15033 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15034 TREE_TYPE (t),
15035 TREE_OPERAND (t, 0),
15036 strict_overflow_p, depth);
15038 case tcc_constant:
15039 case tcc_declaration:
15040 case tcc_reference:
15041 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15043 default:
15044 break;
15047 switch (code)
15049 case TRUTH_AND_EXPR:
15050 case TRUTH_OR_EXPR:
15051 case TRUTH_XOR_EXPR:
15052 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15053 TREE_TYPE (t),
15054 TREE_OPERAND (t, 0),
15055 TREE_OPERAND (t, 1),
15056 strict_overflow_p, depth);
15057 case TRUTH_NOT_EXPR:
15058 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15059 TREE_TYPE (t),
15060 TREE_OPERAND (t, 0),
15061 strict_overflow_p, depth);
15063 case COND_EXPR:
15064 case CONSTRUCTOR:
15065 case OBJ_TYPE_REF:
15066 case ASSERT_EXPR:
15067 case ADDR_EXPR:
15068 case WITH_SIZE_EXPR:
15069 case SSA_NAME:
15070 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15072 default:
15073 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
15077 /* Return true if `t' is known to be non-negative. Handle warnings
15078 about undefined signed overflow. */
15080 bool
15081 tree_expr_nonnegative_p (tree t)
15083 bool ret, strict_overflow_p;
15085 strict_overflow_p = false;
15086 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15087 if (strict_overflow_p)
15088 fold_overflow_warning (("assuming signed overflow does not occur when "
15089 "determining that expression is always "
15090 "non-negative"),
15091 WARN_STRICT_OVERFLOW_MISC);
15092 return ret;
15096 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15097 For floating point we further ensure that T is not denormal.
15098 Similar logic is present in nonzero_address in rtlanal.h.
15100 If the return value is based on the assumption that signed overflow
15101 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15102 change *STRICT_OVERFLOW_P. */
15104 bool
15105 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15106 bool *strict_overflow_p)
15108 switch (code)
15110 case ABS_EXPR:
15111 return tree_expr_nonzero_warnv_p (op0,
15112 strict_overflow_p);
15114 case NOP_EXPR:
15116 tree inner_type = TREE_TYPE (op0);
15117 tree outer_type = type;
15119 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15120 && tree_expr_nonzero_warnv_p (op0,
15121 strict_overflow_p));
15123 break;
15125 case NON_LVALUE_EXPR:
15126 return tree_expr_nonzero_warnv_p (op0,
15127 strict_overflow_p);
15129 default:
15130 break;
15133 return false;
15136 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15137 For floating point we further ensure that T is not denormal.
15138 Similar logic is present in nonzero_address in rtlanal.h.
15140 If the return value is based on the assumption that signed overflow
15141 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15142 change *STRICT_OVERFLOW_P. */
15144 bool
15145 tree_binary_nonzero_warnv_p (enum tree_code code,
15146 tree type,
15147 tree op0,
15148 tree op1, bool *strict_overflow_p)
15150 bool sub_strict_overflow_p;
15151 switch (code)
15153 case POINTER_PLUS_EXPR:
15154 case PLUS_EXPR:
15155 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15157 /* With the presence of negative values it is hard
15158 to say something. */
15159 sub_strict_overflow_p = false;
15160 if (!tree_expr_nonnegative_warnv_p (op0,
15161 &sub_strict_overflow_p)
15162 || !tree_expr_nonnegative_warnv_p (op1,
15163 &sub_strict_overflow_p))
15164 return false;
15165 /* One of operands must be positive and the other non-negative. */
15166 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15167 overflows, on a twos-complement machine the sum of two
15168 nonnegative numbers can never be zero. */
15169 return (tree_expr_nonzero_warnv_p (op0,
15170 strict_overflow_p)
15171 || tree_expr_nonzero_warnv_p (op1,
15172 strict_overflow_p));
15174 break;
15176 case MULT_EXPR:
15177 if (TYPE_OVERFLOW_UNDEFINED (type))
15179 if (tree_expr_nonzero_warnv_p (op0,
15180 strict_overflow_p)
15181 && tree_expr_nonzero_warnv_p (op1,
15182 strict_overflow_p))
15184 *strict_overflow_p = true;
15185 return true;
15188 break;
15190 case MIN_EXPR:
15191 sub_strict_overflow_p = false;
15192 if (tree_expr_nonzero_warnv_p (op0,
15193 &sub_strict_overflow_p)
15194 && tree_expr_nonzero_warnv_p (op1,
15195 &sub_strict_overflow_p))
15197 if (sub_strict_overflow_p)
15198 *strict_overflow_p = true;
15200 break;
15202 case MAX_EXPR:
15203 sub_strict_overflow_p = false;
15204 if (tree_expr_nonzero_warnv_p (op0,
15205 &sub_strict_overflow_p))
15207 if (sub_strict_overflow_p)
15208 *strict_overflow_p = true;
15210 /* When both operands are nonzero, then MAX must be too. */
15211 if (tree_expr_nonzero_warnv_p (op1,
15212 strict_overflow_p))
15213 return true;
15215 /* MAX where operand 0 is positive is positive. */
15216 return tree_expr_nonnegative_warnv_p (op0,
15217 strict_overflow_p);
15219 /* MAX where operand 1 is positive is positive. */
15220 else if (tree_expr_nonzero_warnv_p (op1,
15221 &sub_strict_overflow_p)
15222 && tree_expr_nonnegative_warnv_p (op1,
15223 &sub_strict_overflow_p))
15225 if (sub_strict_overflow_p)
15226 *strict_overflow_p = true;
15227 return true;
15229 break;
15231 case BIT_IOR_EXPR:
15232 return (tree_expr_nonzero_warnv_p (op1,
15233 strict_overflow_p)
15234 || tree_expr_nonzero_warnv_p (op0,
15235 strict_overflow_p));
15237 default:
15238 break;
15241 return false;
15244 /* Return true when T is an address and is known to be nonzero.
15245 For floating point we further ensure that T is not denormal.
15246 Similar logic is present in nonzero_address in rtlanal.h.
15248 If the return value is based on the assumption that signed overflow
15249 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15250 change *STRICT_OVERFLOW_P. */
15252 bool
15253 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15255 bool sub_strict_overflow_p;
15256 switch (TREE_CODE (t))
15258 case INTEGER_CST:
15259 return !integer_zerop (t);
15261 case ADDR_EXPR:
15263 tree base = TREE_OPERAND (t, 0);
15265 if (!DECL_P (base))
15266 base = get_base_address (base);
15268 if (base && TREE_CODE (base) == TARGET_EXPR)
15269 base = TARGET_EXPR_SLOT (base);
15271 if (!base)
15272 return false;
15274 /* For objects in symbol table check if we know they are non-zero.
15275 Don't do anything for variables and functions before symtab is built;
15276 it is quite possible that they will be declared weak later. */
15277 int nonzero_addr = maybe_nonzero_address (base);
15278 if (nonzero_addr >= 0)
15279 return nonzero_addr;
15281 /* Constants are never weak. */
15282 if (CONSTANT_CLASS_P (base))
15283 return true;
15285 return false;
15288 case COND_EXPR:
15289 sub_strict_overflow_p = false;
15290 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15291 &sub_strict_overflow_p)
15292 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15293 &sub_strict_overflow_p))
15295 if (sub_strict_overflow_p)
15296 *strict_overflow_p = true;
15297 return true;
15299 break;
15301 case SSA_NAME:
15302 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
15303 break;
15304 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
15306 default:
15307 break;
15309 return false;
15312 #define integer_valued_real_p(X) \
15313 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15315 #define RECURSE(X) \
15316 ((integer_valued_real_p) (X, depth + 1))
15318 /* Return true if the floating point result of (CODE OP0) has an
15319 integer value. We also allow +Inf, -Inf and NaN to be considered
15320 integer values. Return false for signaling NaN.
15322 DEPTH is the current nesting depth of the query. */
15324 bool
15325 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
15327 switch (code)
15329 case FLOAT_EXPR:
15330 return true;
15332 case ABS_EXPR:
15333 return RECURSE (op0);
15335 CASE_CONVERT:
15337 tree type = TREE_TYPE (op0);
15338 if (TREE_CODE (type) == INTEGER_TYPE)
15339 return true;
15340 if (TREE_CODE (type) == REAL_TYPE)
15341 return RECURSE (op0);
15342 break;
15345 default:
15346 break;
15348 return false;
15351 /* Return true if the floating point result of (CODE OP0 OP1) has an
15352 integer value. We also allow +Inf, -Inf and NaN to be considered
15353 integer values. Return false for signaling NaN.
15355 DEPTH is the current nesting depth of the query. */
15357 bool
15358 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
15360 switch (code)
15362 case PLUS_EXPR:
15363 case MINUS_EXPR:
15364 case MULT_EXPR:
15365 case MIN_EXPR:
15366 case MAX_EXPR:
15367 return RECURSE (op0) && RECURSE (op1);
15369 default:
15370 break;
15372 return false;
15375 /* Return true if the floating point result of calling FNDECL with arguments
15376 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15377 considered integer values. Return false for signaling NaN. If FNDECL
15378 takes fewer than 2 arguments, the remaining ARGn are null.
15380 DEPTH is the current nesting depth of the query. */
15382 bool
15383 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
15385 switch (fn)
15387 CASE_CFN_CEIL:
15388 CASE_CFN_CEIL_FN:
15389 CASE_CFN_FLOOR:
15390 CASE_CFN_FLOOR_FN:
15391 CASE_CFN_NEARBYINT:
15392 CASE_CFN_NEARBYINT_FN:
15393 CASE_CFN_RINT:
15394 CASE_CFN_RINT_FN:
15395 CASE_CFN_ROUND:
15396 CASE_CFN_ROUND_FN:
15397 CASE_CFN_ROUNDEVEN:
15398 CASE_CFN_ROUNDEVEN_FN:
15399 CASE_CFN_TRUNC:
15400 CASE_CFN_TRUNC_FN:
15401 return true;
15403 CASE_CFN_FMIN:
15404 CASE_CFN_FMIN_FN:
15405 CASE_CFN_FMAX:
15406 CASE_CFN_FMAX_FN:
15407 return RECURSE (arg0) && RECURSE (arg1);
15409 default:
15410 break;
15412 return false;
15415 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15416 has an integer value. We also allow +Inf, -Inf and NaN to be
15417 considered integer values. Return false for signaling NaN.
15419 DEPTH is the current nesting depth of the query. */
15421 bool
15422 integer_valued_real_single_p (tree t, int depth)
15424 switch (TREE_CODE (t))
15426 case REAL_CST:
15427 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
15429 case COND_EXPR:
15430 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15432 case SSA_NAME:
15433 /* Limit the depth of recursion to avoid quadratic behavior.
15434 This is expected to catch almost all occurrences in practice.
15435 If this code misses important cases that unbounded recursion
15436 would not, passes that need this information could be revised
15437 to provide it through dataflow propagation. */
15438 return (!name_registered_for_update_p (t)
15439 && depth < param_max_ssa_name_query_depth
15440 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
15441 depth));
15443 default:
15444 break;
15446 return false;
15449 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15450 has an integer value. We also allow +Inf, -Inf and NaN to be
15451 considered integer values. Return false for signaling NaN.
15453 DEPTH is the current nesting depth of the query. */
15455 static bool
15456 integer_valued_real_invalid_p (tree t, int depth)
15458 switch (TREE_CODE (t))
15460 case COMPOUND_EXPR:
15461 case MODIFY_EXPR:
15462 case BIND_EXPR:
15463 return RECURSE (TREE_OPERAND (t, 1));
15465 case SAVE_EXPR:
15466 return RECURSE (TREE_OPERAND (t, 0));
15468 default:
15469 break;
15471 return false;
15474 #undef RECURSE
15475 #undef integer_valued_real_p
15477 /* Return true if the floating point expression T has an integer value.
15478 We also allow +Inf, -Inf and NaN to be considered integer values.
15479 Return false for signaling NaN.
15481 DEPTH is the current nesting depth of the query. */
15483 bool
15484 integer_valued_real_p (tree t, int depth)
15486 if (t == error_mark_node)
15487 return false;
15489 STRIP_ANY_LOCATION_WRAPPER (t);
15491 tree_code code = TREE_CODE (t);
15492 switch (TREE_CODE_CLASS (code))
15494 case tcc_binary:
15495 case tcc_comparison:
15496 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
15497 TREE_OPERAND (t, 1), depth);
15499 case tcc_unary:
15500 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
15502 case tcc_constant:
15503 case tcc_declaration:
15504 case tcc_reference:
15505 return integer_valued_real_single_p (t, depth);
15507 default:
15508 break;
15511 switch (code)
15513 case COND_EXPR:
15514 case SSA_NAME:
15515 return integer_valued_real_single_p (t, depth);
15517 case CALL_EXPR:
15519 tree arg0 = (call_expr_nargs (t) > 0
15520 ? CALL_EXPR_ARG (t, 0)
15521 : NULL_TREE);
15522 tree arg1 = (call_expr_nargs (t) > 1
15523 ? CALL_EXPR_ARG (t, 1)
15524 : NULL_TREE);
15525 return integer_valued_real_call_p (get_call_combined_fn (t),
15526 arg0, arg1, depth);
15529 default:
15530 return integer_valued_real_invalid_p (t, depth);
15534 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15535 attempt to fold the expression to a constant without modifying TYPE,
15536 OP0 or OP1.
15538 If the expression could be simplified to a constant, then return
15539 the constant. If the expression would not be simplified to a
15540 constant, then return NULL_TREE. */
15542 tree
15543 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15545 tree tem = fold_binary (code, type, op0, op1);
15546 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15549 /* Given the components of a unary expression CODE, TYPE and OP0,
15550 attempt to fold the expression to a constant without modifying
15551 TYPE or OP0.
15553 If the expression could be simplified to a constant, then return
15554 the constant. If the expression would not be simplified to a
15555 constant, then return NULL_TREE. */
15557 tree
15558 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15560 tree tem = fold_unary (code, type, op0);
15561 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15564 /* If EXP represents referencing an element in a constant string
15565 (either via pointer arithmetic or array indexing), return the
15566 tree representing the value accessed, otherwise return NULL. */
15568 tree
15569 fold_read_from_constant_string (tree exp)
15571 if ((TREE_CODE (exp) == INDIRECT_REF
15572 || TREE_CODE (exp) == ARRAY_REF)
15573 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15575 tree exp1 = TREE_OPERAND (exp, 0);
15576 tree index;
15577 tree string;
15578 location_t loc = EXPR_LOCATION (exp);
15580 if (TREE_CODE (exp) == INDIRECT_REF)
15581 string = string_constant (exp1, &index, NULL, NULL);
15582 else
15584 tree low_bound = array_ref_low_bound (exp);
15585 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15587 /* Optimize the special-case of a zero lower bound.
15589 We convert the low_bound to sizetype to avoid some problems
15590 with constant folding. (E.g. suppose the lower bound is 1,
15591 and its mode is QI. Without the conversion,l (ARRAY
15592 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15593 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15594 if (! integer_zerop (low_bound))
15595 index = size_diffop_loc (loc, index,
15596 fold_convert_loc (loc, sizetype, low_bound));
15598 string = exp1;
15601 scalar_int_mode char_mode;
15602 if (string
15603 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15604 && TREE_CODE (string) == STRING_CST
15605 && tree_fits_uhwi_p (index)
15606 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15607 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
15608 &char_mode)
15609 && GET_MODE_SIZE (char_mode) == 1)
15610 return build_int_cst_type (TREE_TYPE (exp),
15611 (TREE_STRING_POINTER (string)
15612 [TREE_INT_CST_LOW (index)]));
15614 return NULL;
15617 /* Folds a read from vector element at IDX of vector ARG. */
15619 tree
15620 fold_read_from_vector (tree arg, poly_uint64 idx)
15622 unsigned HOST_WIDE_INT i;
15623 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
15624 && known_ge (idx, 0u)
15625 && idx.is_constant (&i))
15627 if (TREE_CODE (arg) == VECTOR_CST)
15628 return VECTOR_CST_ELT (arg, i);
15629 else if (TREE_CODE (arg) == CONSTRUCTOR)
15631 if (CONSTRUCTOR_NELTS (arg)
15632 && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg, 0)->value)))
15633 return NULL_TREE;
15634 if (i >= CONSTRUCTOR_NELTS (arg))
15635 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
15636 return CONSTRUCTOR_ELT (arg, i)->value;
15639 return NULL_TREE;
15642 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15643 an integer constant, real, or fixed-point constant.
15645 TYPE is the type of the result. */
15647 static tree
15648 fold_negate_const (tree arg0, tree type)
15650 tree t = NULL_TREE;
15652 switch (TREE_CODE (arg0))
15654 case REAL_CST:
15655 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15656 break;
15658 case FIXED_CST:
15660 FIXED_VALUE_TYPE f;
15661 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15662 &(TREE_FIXED_CST (arg0)), NULL,
15663 TYPE_SATURATING (type));
15664 t = build_fixed (type, f);
15665 /* Propagate overflow flags. */
15666 if (overflow_p | TREE_OVERFLOW (arg0))
15667 TREE_OVERFLOW (t) = 1;
15668 break;
15671 default:
15672 if (poly_int_tree_p (arg0))
15674 wi::overflow_type overflow;
15675 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
15676 t = force_fit_type (type, res, 1,
15677 (overflow && ! TYPE_UNSIGNED (type))
15678 || TREE_OVERFLOW (arg0));
15679 break;
15682 gcc_unreachable ();
15685 return t;
15688 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15689 an integer constant or real constant.
15691 TYPE is the type of the result. */
15693 tree
15694 fold_abs_const (tree arg0, tree type)
15696 tree t = NULL_TREE;
15698 switch (TREE_CODE (arg0))
15700 case INTEGER_CST:
15702 /* If the value is unsigned or non-negative, then the absolute value
15703 is the same as the ordinary value. */
15704 wide_int val = wi::to_wide (arg0);
15705 wi::overflow_type overflow = wi::OVF_NONE;
15706 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
15709 /* If the value is negative, then the absolute value is
15710 its negation. */
15711 else
15712 val = wi::neg (val, &overflow);
15714 /* Force to the destination type, set TREE_OVERFLOW for signed
15715 TYPE only. */
15716 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
15718 break;
15720 case REAL_CST:
15721 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15722 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15723 else
15724 t = arg0;
15725 break;
15727 default:
15728 gcc_unreachable ();
15731 return t;
15734 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15735 constant. TYPE is the type of the result. */
15737 static tree
15738 fold_not_const (const_tree arg0, tree type)
15740 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15742 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
15745 /* Given CODE, a relational operator, the target type, TYPE and two
15746 constant operands OP0 and OP1, return the result of the
15747 relational operation. If the result is not a compile time
15748 constant, then return NULL_TREE. */
15750 static tree
15751 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15753 int result, invert;
15755 /* From here on, the only cases we handle are when the result is
15756 known to be a constant. */
15758 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15760 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15761 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15763 /* Handle the cases where either operand is a NaN. */
15764 if (real_isnan (c0) || real_isnan (c1))
15766 switch (code)
15768 case EQ_EXPR:
15769 case ORDERED_EXPR:
15770 result = 0;
15771 break;
15773 case NE_EXPR:
15774 case UNORDERED_EXPR:
15775 case UNLT_EXPR:
15776 case UNLE_EXPR:
15777 case UNGT_EXPR:
15778 case UNGE_EXPR:
15779 case UNEQ_EXPR:
15780 result = 1;
15781 break;
15783 case LT_EXPR:
15784 case LE_EXPR:
15785 case GT_EXPR:
15786 case GE_EXPR:
15787 case LTGT_EXPR:
15788 if (flag_trapping_math)
15789 return NULL_TREE;
15790 result = 0;
15791 break;
15793 default:
15794 gcc_unreachable ();
15797 return constant_boolean_node (result, type);
15800 return constant_boolean_node (real_compare (code, c0, c1), type);
15803 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15805 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15806 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15807 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15810 /* Handle equality/inequality of complex constants. */
15811 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15813 tree rcond = fold_relational_const (code, type,
15814 TREE_REALPART (op0),
15815 TREE_REALPART (op1));
15816 tree icond = fold_relational_const (code, type,
15817 TREE_IMAGPART (op0),
15818 TREE_IMAGPART (op1));
15819 if (code == EQ_EXPR)
15820 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15821 else if (code == NE_EXPR)
15822 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15823 else
15824 return NULL_TREE;
15827 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15829 if (!VECTOR_TYPE_P (type))
15831 /* Have vector comparison with scalar boolean result. */
15832 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
15833 && known_eq (VECTOR_CST_NELTS (op0),
15834 VECTOR_CST_NELTS (op1)));
15835 unsigned HOST_WIDE_INT nunits;
15836 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
15837 return NULL_TREE;
15838 for (unsigned i = 0; i < nunits; i++)
15840 tree elem0 = VECTOR_CST_ELT (op0, i);
15841 tree elem1 = VECTOR_CST_ELT (op1, i);
15842 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
15843 if (tmp == NULL_TREE)
15844 return NULL_TREE;
15845 if (integer_zerop (tmp))
15846 return constant_boolean_node (code == NE_EXPR, type);
15848 return constant_boolean_node (code == EQ_EXPR, type);
15850 tree_vector_builder elts;
15851 if (!elts.new_binary_operation (type, op0, op1, false))
15852 return NULL_TREE;
15853 unsigned int count = elts.encoded_nelts ();
15854 for (unsigned i = 0; i < count; i++)
15856 tree elem_type = TREE_TYPE (type);
15857 tree elem0 = VECTOR_CST_ELT (op0, i);
15858 tree elem1 = VECTOR_CST_ELT (op1, i);
15860 tree tem = fold_relational_const (code, elem_type,
15861 elem0, elem1);
15863 if (tem == NULL_TREE)
15864 return NULL_TREE;
15866 elts.quick_push (build_int_cst (elem_type,
15867 integer_zerop (tem) ? 0 : -1));
15870 return elts.build ();
15873 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15875 To compute GT, swap the arguments and do LT.
15876 To compute GE, do LT and invert the result.
15877 To compute LE, swap the arguments, do LT and invert the result.
15878 To compute NE, do EQ and invert the result.
15880 Therefore, the code below must handle only EQ and LT. */
15882 if (code == LE_EXPR || code == GT_EXPR)
15884 std::swap (op0, op1);
15885 code = swap_tree_comparison (code);
15888 /* Note that it is safe to invert for real values here because we
15889 have already handled the one case that it matters. */
15891 invert = 0;
15892 if (code == NE_EXPR || code == GE_EXPR)
15894 invert = 1;
15895 code = invert_tree_comparison (code, false);
15898 /* Compute a result for LT or EQ if args permit;
15899 Otherwise return T. */
15900 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15902 if (code == EQ_EXPR)
15903 result = tree_int_cst_equal (op0, op1);
15904 else
15905 result = tree_int_cst_lt (op0, op1);
15907 else
15908 return NULL_TREE;
15910 if (invert)
15911 result ^= 1;
15912 return constant_boolean_node (result, type);
15915 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15916 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15917 itself. */
15919 tree
15920 fold_build_cleanup_point_expr (tree type, tree expr)
15922 /* If the expression does not have side effects then we don't have to wrap
15923 it with a cleanup point expression. */
15924 if (!TREE_SIDE_EFFECTS (expr))
15925 return expr;
15927 /* If the expression is a return, check to see if the expression inside the
15928 return has no side effects or the right hand side of the modify expression
15929 inside the return. If either don't have side effects set we don't need to
15930 wrap the expression in a cleanup point expression. Note we don't check the
15931 left hand side of the modify because it should always be a return decl. */
15932 if (TREE_CODE (expr) == RETURN_EXPR)
15934 tree op = TREE_OPERAND (expr, 0);
15935 if (!op || !TREE_SIDE_EFFECTS (op))
15936 return expr;
15937 op = TREE_OPERAND (op, 1);
15938 if (!TREE_SIDE_EFFECTS (op))
15939 return expr;
15942 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
15945 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15946 of an indirection through OP0, or NULL_TREE if no simplification is
15947 possible. */
15949 tree
15950 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15952 tree sub = op0;
15953 tree subtype;
15954 poly_uint64 const_op01;
15956 STRIP_NOPS (sub);
15957 subtype = TREE_TYPE (sub);
15958 if (!POINTER_TYPE_P (subtype)
15959 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
15960 return NULL_TREE;
15962 if (TREE_CODE (sub) == ADDR_EXPR)
15964 tree op = TREE_OPERAND (sub, 0);
15965 tree optype = TREE_TYPE (op);
15967 /* *&CONST_DECL -> to the value of the const decl. */
15968 if (TREE_CODE (op) == CONST_DECL)
15969 return DECL_INITIAL (op);
15970 /* *&p => p; make sure to handle *&"str"[cst] here. */
15971 if (type == optype)
15973 tree fop = fold_read_from_constant_string (op);
15974 if (fop)
15975 return fop;
15976 else
15977 return op;
15979 /* *(foo *)&fooarray => fooarray[0] */
15980 else if (TREE_CODE (optype) == ARRAY_TYPE
15981 && type == TREE_TYPE (optype)
15982 && (!in_gimple_form
15983 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15985 tree type_domain = TYPE_DOMAIN (optype);
15986 tree min_val = size_zero_node;
15987 if (type_domain && TYPE_MIN_VALUE (type_domain))
15988 min_val = TYPE_MIN_VALUE (type_domain);
15989 if (in_gimple_form
15990 && TREE_CODE (min_val) != INTEGER_CST)
15991 return NULL_TREE;
15992 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15993 NULL_TREE, NULL_TREE);
15995 /* *(foo *)&complexfoo => __real__ complexfoo */
15996 else if (TREE_CODE (optype) == COMPLEX_TYPE
15997 && type == TREE_TYPE (optype))
15998 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15999 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16000 else if (VECTOR_TYPE_P (optype)
16001 && type == TREE_TYPE (optype))
16003 tree part_width = TYPE_SIZE (type);
16004 tree index = bitsize_int (0);
16005 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
16006 index);
16010 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16011 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
16013 tree op00 = TREE_OPERAND (sub, 0);
16014 tree op01 = TREE_OPERAND (sub, 1);
16016 STRIP_NOPS (op00);
16017 if (TREE_CODE (op00) == ADDR_EXPR)
16019 tree op00type;
16020 op00 = TREE_OPERAND (op00, 0);
16021 op00type = TREE_TYPE (op00);
16023 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16024 if (VECTOR_TYPE_P (op00type)
16025 && type == TREE_TYPE (op00type)
16026 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
16027 but we want to treat offsets with MSB set as negative.
16028 For the code below negative offsets are invalid and
16029 TYPE_SIZE of the element is something unsigned, so
16030 check whether op01 fits into poly_int64, which implies
16031 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
16032 then just use poly_uint64 because we want to treat the
16033 value as unsigned. */
16034 && tree_fits_poly_int64_p (op01))
16036 tree part_width = TYPE_SIZE (type);
16037 poly_uint64 max_offset
16038 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
16039 * TYPE_VECTOR_SUBPARTS (op00type));
16040 if (known_lt (const_op01, max_offset))
16042 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
16043 return fold_build3_loc (loc,
16044 BIT_FIELD_REF, type, op00,
16045 part_width, index);
16048 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16049 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16050 && type == TREE_TYPE (op00type))
16052 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
16053 const_op01))
16054 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16056 /* ((foo *)&fooarray)[1] => fooarray[1] */
16057 else if (TREE_CODE (op00type) == ARRAY_TYPE
16058 && type == TREE_TYPE (op00type))
16060 tree type_domain = TYPE_DOMAIN (op00type);
16061 tree min_val = size_zero_node;
16062 if (type_domain && TYPE_MIN_VALUE (type_domain))
16063 min_val = TYPE_MIN_VALUE (type_domain);
16064 poly_uint64 type_size, index;
16065 if (poly_int_tree_p (min_val)
16066 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
16067 && multiple_p (const_op01, type_size, &index))
16069 poly_offset_int off = index + wi::to_poly_offset (min_val);
16070 op01 = wide_int_to_tree (sizetype, off);
16071 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16072 NULL_TREE, NULL_TREE);
16078 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16079 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16080 && type == TREE_TYPE (TREE_TYPE (subtype))
16081 && (!in_gimple_form
16082 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16084 tree type_domain;
16085 tree min_val = size_zero_node;
16086 sub = build_fold_indirect_ref_loc (loc, sub);
16087 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16088 if (type_domain && TYPE_MIN_VALUE (type_domain))
16089 min_val = TYPE_MIN_VALUE (type_domain);
16090 if (in_gimple_form
16091 && TREE_CODE (min_val) != INTEGER_CST)
16092 return NULL_TREE;
16093 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16094 NULL_TREE);
16097 return NULL_TREE;
16100 /* Builds an expression for an indirection through T, simplifying some
16101 cases. */
16103 tree
16104 build_fold_indirect_ref_loc (location_t loc, tree t)
16106 tree type = TREE_TYPE (TREE_TYPE (t));
16107 tree sub = fold_indirect_ref_1 (loc, type, t);
16109 if (sub)
16110 return sub;
16112 return build1_loc (loc, INDIRECT_REF, type, t);
16115 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16117 tree
16118 fold_indirect_ref_loc (location_t loc, tree t)
16120 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16122 if (sub)
16123 return sub;
16124 else
16125 return t;
16128 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16129 whose result is ignored. The type of the returned tree need not be
16130 the same as the original expression. */
16132 tree
16133 fold_ignored_result (tree t)
16135 if (!TREE_SIDE_EFFECTS (t))
16136 return integer_zero_node;
16138 for (;;)
16139 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16141 case tcc_unary:
16142 t = TREE_OPERAND (t, 0);
16143 break;
16145 case tcc_binary:
16146 case tcc_comparison:
16147 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16148 t = TREE_OPERAND (t, 0);
16149 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16150 t = TREE_OPERAND (t, 1);
16151 else
16152 return t;
16153 break;
16155 case tcc_expression:
16156 switch (TREE_CODE (t))
16158 case COMPOUND_EXPR:
16159 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16160 return t;
16161 t = TREE_OPERAND (t, 0);
16162 break;
16164 case COND_EXPR:
16165 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16166 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16167 return t;
16168 t = TREE_OPERAND (t, 0);
16169 break;
16171 default:
16172 return t;
16174 break;
16176 default:
16177 return t;
16181 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16183 tree
16184 round_up_loc (location_t loc, tree value, unsigned int divisor)
16186 tree div = NULL_TREE;
16188 if (divisor == 1)
16189 return value;
16191 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16192 have to do anything. Only do this when we are not given a const,
16193 because in that case, this check is more expensive than just
16194 doing it. */
16195 if (TREE_CODE (value) != INTEGER_CST)
16197 div = build_int_cst (TREE_TYPE (value), divisor);
16199 if (multiple_of_p (TREE_TYPE (value), value, div))
16200 return value;
16203 /* If divisor is a power of two, simplify this to bit manipulation. */
16204 if (pow2_or_zerop (divisor))
16206 if (TREE_CODE (value) == INTEGER_CST)
16208 wide_int val = wi::to_wide (value);
16209 bool overflow_p;
16211 if ((val & (divisor - 1)) == 0)
16212 return value;
16214 overflow_p = TREE_OVERFLOW (value);
16215 val += divisor - 1;
16216 val &= (int) -divisor;
16217 if (val == 0)
16218 overflow_p = true;
16220 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16222 else
16224 tree t;
16226 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16227 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16228 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16229 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16232 else
16234 if (!div)
16235 div = build_int_cst (TREE_TYPE (value), divisor);
16236 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16237 value = size_binop_loc (loc, MULT_EXPR, value, div);
16240 return value;
16243 /* Likewise, but round down. */
16245 tree
16246 round_down_loc (location_t loc, tree value, int divisor)
16248 tree div = NULL_TREE;
16250 gcc_assert (divisor > 0);
16251 if (divisor == 1)
16252 return value;
16254 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16255 have to do anything. Only do this when we are not given a const,
16256 because in that case, this check is more expensive than just
16257 doing it. */
16258 if (TREE_CODE (value) != INTEGER_CST)
16260 div = build_int_cst (TREE_TYPE (value), divisor);
16262 if (multiple_of_p (TREE_TYPE (value), value, div))
16263 return value;
16266 /* If divisor is a power of two, simplify this to bit manipulation. */
16267 if (pow2_or_zerop (divisor))
16269 tree t;
16271 t = build_int_cst (TREE_TYPE (value), -divisor);
16272 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16274 else
16276 if (!div)
16277 div = build_int_cst (TREE_TYPE (value), divisor);
16278 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16279 value = size_binop_loc (loc, MULT_EXPR, value, div);
16282 return value;
16285 /* Returns the pointer to the base of the object addressed by EXP and
16286 extracts the information about the offset of the access, storing it
16287 to PBITPOS and POFFSET. */
16289 static tree
16290 split_address_to_core_and_offset (tree exp,
16291 poly_int64_pod *pbitpos, tree *poffset)
16293 tree core;
16294 machine_mode mode;
16295 int unsignedp, reversep, volatilep;
16296 poly_int64 bitsize;
16297 location_t loc = EXPR_LOCATION (exp);
16299 if (TREE_CODE (exp) == ADDR_EXPR)
16301 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16302 poffset, &mode, &unsignedp, &reversep,
16303 &volatilep);
16304 core = build_fold_addr_expr_loc (loc, core);
16306 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
16308 core = TREE_OPERAND (exp, 0);
16309 STRIP_NOPS (core);
16310 *pbitpos = 0;
16311 *poffset = TREE_OPERAND (exp, 1);
16312 if (poly_int_tree_p (*poffset))
16314 poly_offset_int tem
16315 = wi::sext (wi::to_poly_offset (*poffset),
16316 TYPE_PRECISION (TREE_TYPE (*poffset)));
16317 tem <<= LOG2_BITS_PER_UNIT;
16318 if (tem.to_shwi (pbitpos))
16319 *poffset = NULL_TREE;
16322 else
16324 core = exp;
16325 *pbitpos = 0;
16326 *poffset = NULL_TREE;
16329 return core;
16332 /* Returns true if addresses of E1 and E2 differ by a constant, false
16333 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16335 bool
16336 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
16338 tree core1, core2;
16339 poly_int64 bitpos1, bitpos2;
16340 tree toffset1, toffset2, tdiff, type;
16342 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16343 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16345 poly_int64 bytepos1, bytepos2;
16346 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
16347 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
16348 || !operand_equal_p (core1, core2, 0))
16349 return false;
16351 if (toffset1 && toffset2)
16353 type = TREE_TYPE (toffset1);
16354 if (type != TREE_TYPE (toffset2))
16355 toffset2 = fold_convert (type, toffset2);
16357 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16358 if (!cst_and_fits_in_hwi (tdiff))
16359 return false;
16361 *diff = int_cst_value (tdiff);
16363 else if (toffset1 || toffset2)
16365 /* If only one of the offsets is non-constant, the difference cannot
16366 be a constant. */
16367 return false;
16369 else
16370 *diff = 0;
16372 *diff += bytepos1 - bytepos2;
16373 return true;
16376 /* Return OFF converted to a pointer offset type suitable as offset for
16377 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16378 tree
16379 convert_to_ptrofftype_loc (location_t loc, tree off)
16381 if (ptrofftype_p (TREE_TYPE (off)))
16382 return off;
16383 return fold_convert_loc (loc, sizetype, off);
16386 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16387 tree
16388 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16390 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16391 ptr, convert_to_ptrofftype_loc (loc, off));
16394 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16395 tree
16396 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16398 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16399 ptr, size_int (off));
16402 /* Return a pointer to a NUL-terminated string containing the sequence
16403 of bytes corresponding to the representation of the object referred to
16404 by SRC (or a subsequence of such bytes within it if SRC is a reference
16405 to an initialized constant array plus some constant offset).
16406 Set *STRSIZE the number of bytes in the constant sequence including
16407 the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16408 where A is the array that stores the constant sequence that SRC points
16409 to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16410 need not point to a string or even an array of characters but may point
16411 to an object of any type. */
16413 const char *
16414 getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
16416 /* The offset into the array A storing the string, and A's byte size. */
16417 tree offset_node;
16418 tree mem_size;
16420 if (strsize)
16421 *strsize = 0;
16423 if (strsize)
16424 src = byte_representation (src, &offset_node, &mem_size, NULL);
16425 else
16426 src = string_constant (src, &offset_node, &mem_size, NULL);
16427 if (!src)
16428 return NULL;
16430 unsigned HOST_WIDE_INT offset = 0;
16431 if (offset_node != NULL_TREE)
16433 if (!tree_fits_uhwi_p (offset_node))
16434 return NULL;
16435 else
16436 offset = tree_to_uhwi (offset_node);
16439 if (!tree_fits_uhwi_p (mem_size))
16440 return NULL;
16442 /* ARRAY_SIZE is the byte size of the array the constant sequence
16443 is stored in and equal to sizeof A. INIT_BYTES is the number
16444 of bytes in the constant sequence used to initialize the array,
16445 including any embedded NULs as well as the terminating NUL (for
16446 strings), but not including any trailing zeros/NULs past
16447 the terminating one appended implicitly to a string literal to
16448 zero out the remainder of the array it's stored in. For example,
16449 given:
16450 const char a[7] = "abc\0d";
16451 n = strlen (a + 1);
16452 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16453 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16454 is equal to strlen (A) + 1. */
16455 const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
16456 unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
16457 const char *string = TREE_STRING_POINTER (src);
16459 /* Ideally this would turn into a gcc_checking_assert over time. */
16460 if (init_bytes > array_size)
16461 init_bytes = array_size;
16463 if (init_bytes == 0 || offset >= array_size)
16464 return NULL;
16466 if (strsize)
16468 /* Compute and store the number of characters from the beginning
16469 of the substring at OFFSET to the end, including the terminating
16470 nul. Offsets past the initial length refer to null strings. */
16471 if (offset < init_bytes)
16472 *strsize = init_bytes - offset;
16473 else
16474 *strsize = 1;
16476 else
16478 tree eltype = TREE_TYPE (TREE_TYPE (src));
16479 /* Support only properly NUL-terminated single byte strings. */
16480 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
16481 return NULL;
16482 if (string[init_bytes - 1] != '\0')
16483 return NULL;
16486 return offset < init_bytes ? string + offset : "";
16489 /* Return a pointer to a NUL-terminated string corresponding to
16490 the expression STR referencing a constant string, possibly
16491 involving a constant offset. Return null if STR either doesn't
16492 reference a constant string or if it involves a nonconstant
16493 offset. */
16495 const char *
16496 c_getstr (tree str)
16498 return getbyterep (str, NULL);
16501 /* Given a tree T, compute which bits in T may be nonzero. */
16503 wide_int
16504 tree_nonzero_bits (const_tree t)
16506 switch (TREE_CODE (t))
16508 case INTEGER_CST:
16509 return wi::to_wide (t);
16510 case SSA_NAME:
16511 return get_nonzero_bits (t);
16512 case NON_LVALUE_EXPR:
16513 case SAVE_EXPR:
16514 return tree_nonzero_bits (TREE_OPERAND (t, 0));
16515 case BIT_AND_EXPR:
16516 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16517 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16518 case BIT_IOR_EXPR:
16519 case BIT_XOR_EXPR:
16520 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16521 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16522 case COND_EXPR:
16523 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
16524 tree_nonzero_bits (TREE_OPERAND (t, 2)));
16525 CASE_CONVERT:
16526 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16527 TYPE_PRECISION (TREE_TYPE (t)),
16528 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
16529 case PLUS_EXPR:
16530 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
16532 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
16533 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
16534 if (wi::bit_and (nzbits1, nzbits2) == 0)
16535 return wi::bit_or (nzbits1, nzbits2);
16537 break;
16538 case LSHIFT_EXPR:
16539 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16541 tree type = TREE_TYPE (t);
16542 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16543 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16544 TYPE_PRECISION (type));
16545 return wi::neg_p (arg1)
16546 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
16547 : wi::lshift (nzbits, arg1);
16549 break;
16550 case RSHIFT_EXPR:
16551 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16553 tree type = TREE_TYPE (t);
16554 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16555 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16556 TYPE_PRECISION (type));
16557 return wi::neg_p (arg1)
16558 ? wi::lshift (nzbits, -arg1)
16559 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
16561 break;
16562 default:
16563 break;
16566 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
16569 /* Helper function for address compare simplifications in match.pd.
16570 OP0 and OP1 are ADDR_EXPR operands being compared by CODE.
16571 TYPE is the type of comparison operands.
16572 BASE0, BASE1, OFF0 and OFF1 are set by the function.
16573 GENERIC is true if GENERIC folding and false for GIMPLE folding.
16574 Returns 0 if OP0 is known to be unequal to OP1 regardless of OFF{0,1},
16575 1 if bases are known to be equal and OP0 cmp OP1 depends on OFF0 cmp OFF1,
16576 and 2 if unknown. */
16579 address_compare (tree_code code, tree type, tree op0, tree op1,
16580 tree &base0, tree &base1, poly_int64 &off0, poly_int64 &off1,
16581 bool generic)
16583 gcc_checking_assert (TREE_CODE (op0) == ADDR_EXPR);
16584 gcc_checking_assert (TREE_CODE (op1) == ADDR_EXPR);
16585 base0 = get_addr_base_and_unit_offset (TREE_OPERAND (op0, 0), &off0);
16586 base1 = get_addr_base_and_unit_offset (TREE_OPERAND (op1, 0), &off1);
16587 if (base0 && TREE_CODE (base0) == MEM_REF)
16589 off0 += mem_ref_offset (base0).force_shwi ();
16590 base0 = TREE_OPERAND (base0, 0);
16592 if (base1 && TREE_CODE (base1) == MEM_REF)
16594 off1 += mem_ref_offset (base1).force_shwi ();
16595 base1 = TREE_OPERAND (base1, 0);
16597 if (base0 == NULL_TREE || base1 == NULL_TREE)
16598 return 2;
16600 int equal = 2;
16601 /* Punt in GENERIC on variables with value expressions;
16602 the value expressions might point to fields/elements
16603 of other vars etc. */
16604 if (generic
16605 && ((VAR_P (base0) && DECL_HAS_VALUE_EXPR_P (base0))
16606 || (VAR_P (base1) && DECL_HAS_VALUE_EXPR_P (base1))))
16607 return 2;
16608 else if (decl_in_symtab_p (base0) && decl_in_symtab_p (base1))
16610 symtab_node *node0 = symtab_node::get_create (base0);
16611 symtab_node *node1 = symtab_node::get_create (base1);
16612 equal = node0->equal_address_to (node1);
16614 else if ((DECL_P (base0)
16615 || TREE_CODE (base0) == SSA_NAME
16616 || TREE_CODE (base0) == STRING_CST)
16617 && (DECL_P (base1)
16618 || TREE_CODE (base1) == SSA_NAME
16619 || TREE_CODE (base1) == STRING_CST))
16620 equal = (base0 == base1);
16621 /* Assume different STRING_CSTs with the same content will be
16622 merged. */
16623 if (equal == 0
16624 && TREE_CODE (base0) == STRING_CST
16625 && TREE_CODE (base1) == STRING_CST
16626 && TREE_STRING_LENGTH (base0) == TREE_STRING_LENGTH (base1)
16627 && memcmp (TREE_STRING_POINTER (base0), TREE_STRING_POINTER (base1),
16628 TREE_STRING_LENGTH (base0)) == 0)
16629 equal = 1;
16630 if (equal == 1)
16632 if (code == EQ_EXPR
16633 || code == NE_EXPR
16634 /* If the offsets are equal we can ignore overflow. */
16635 || known_eq (off0, off1)
16636 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
16637 /* Or if we compare using pointers to decls or strings. */
16638 || (POINTER_TYPE_P (type)
16639 && (DECL_P (base0) || TREE_CODE (base0) == STRING_CST)))
16640 return 1;
16641 return 2;
16643 if (equal != 0)
16644 return equal;
16645 if (code != EQ_EXPR && code != NE_EXPR)
16646 return 2;
16648 /* At this point we know (or assume) the two pointers point at
16649 different objects. */
16650 HOST_WIDE_INT ioff0 = -1, ioff1 = -1;
16651 off0.is_constant (&ioff0);
16652 off1.is_constant (&ioff1);
16653 /* Punt on non-zero offsets from functions. */
16654 if ((TREE_CODE (base0) == FUNCTION_DECL && ioff0)
16655 || (TREE_CODE (base1) == FUNCTION_DECL && ioff1))
16656 return 2;
16657 /* Or if the bases are neither decls nor string literals. */
16658 if (!DECL_P (base0) && TREE_CODE (base0) != STRING_CST)
16659 return 2;
16660 if (!DECL_P (base1) && TREE_CODE (base1) != STRING_CST)
16661 return 2;
16662 /* For initializers, assume addresses of different functions are
16663 different. */
16664 if (folding_initializer
16665 && TREE_CODE (base0) == FUNCTION_DECL
16666 && TREE_CODE (base1) == FUNCTION_DECL)
16667 return 0;
16669 /* Compute whether one address points to the start of one
16670 object and another one to the end of another one. */
16671 poly_int64 size0 = 0, size1 = 0;
16672 if (TREE_CODE (base0) == STRING_CST)
16674 if (ioff0 < 0 || ioff0 > TREE_STRING_LENGTH (base0))
16675 equal = 2;
16676 else
16677 size0 = TREE_STRING_LENGTH (base0);
16679 else if (TREE_CODE (base0) == FUNCTION_DECL)
16680 size0 = 1;
16681 else
16683 tree sz0 = DECL_SIZE_UNIT (base0);
16684 if (!tree_fits_poly_int64_p (sz0))
16685 equal = 2;
16686 else
16687 size0 = tree_to_poly_int64 (sz0);
16689 if (TREE_CODE (base1) == STRING_CST)
16691 if (ioff1 < 0 || ioff1 > TREE_STRING_LENGTH (base1))
16692 equal = 2;
16693 else
16694 size1 = TREE_STRING_LENGTH (base1);
16696 else if (TREE_CODE (base1) == FUNCTION_DECL)
16697 size1 = 1;
16698 else
16700 tree sz1 = DECL_SIZE_UNIT (base1);
16701 if (!tree_fits_poly_int64_p (sz1))
16702 equal = 2;
16703 else
16704 size1 = tree_to_poly_int64 (sz1);
16706 if (equal == 0)
16708 /* If one offset is pointing (or could be) to the beginning of one
16709 object and the other is pointing to one past the last byte of the
16710 other object, punt. */
16711 if (maybe_eq (off0, 0) && maybe_eq (off1, size1))
16712 equal = 2;
16713 else if (maybe_eq (off1, 0) && maybe_eq (off0, size0))
16714 equal = 2;
16715 /* If both offsets are the same, there are some cases we know that are
16716 ok. Either if we know they aren't zero, or if we know both sizes
16717 are no zero. */
16718 if (equal == 2
16719 && known_eq (off0, off1)
16720 && (known_ne (off0, 0)
16721 || (known_ne (size0, 0) && known_ne (size1, 0))))
16722 equal = 0;
16725 /* At this point, equal is 2 if either one or both pointers are out of
16726 bounds of their object, or one points to start of its object and the
16727 other points to end of its object. This is unspecified behavior
16728 e.g. in C++. Otherwise equal is 0. */
16729 if (folding_cxx_constexpr && equal)
16730 return equal;
16732 /* When both pointers point to string literals, even when equal is 0,
16733 due to tail merging of string literals the pointers might be the same. */
16734 if (TREE_CODE (base0) == STRING_CST && TREE_CODE (base1) == STRING_CST)
16736 if (ioff0 < 0
16737 || ioff1 < 0
16738 || ioff0 > TREE_STRING_LENGTH (base0)
16739 || ioff1 > TREE_STRING_LENGTH (base1))
16740 return 2;
16742 /* If the bytes in the string literals starting at the pointers
16743 differ, the pointers need to be different. */
16744 if (memcmp (TREE_STRING_POINTER (base0) + ioff0,
16745 TREE_STRING_POINTER (base1) + ioff1,
16746 MIN (TREE_STRING_LENGTH (base0) - ioff0,
16747 TREE_STRING_LENGTH (base1) - ioff1)) == 0)
16749 HOST_WIDE_INT ioffmin = MIN (ioff0, ioff1);
16750 if (memcmp (TREE_STRING_POINTER (base0) + ioff0 - ioffmin,
16751 TREE_STRING_POINTER (base1) + ioff1 - ioffmin,
16752 ioffmin) == 0)
16753 /* If even the bytes in the string literal before the
16754 pointers are the same, the string literals could be
16755 tail merged. */
16756 return 2;
16758 return 0;
16761 if (folding_cxx_constexpr)
16762 return 0;
16764 /* If this is a pointer comparison, ignore for now even
16765 valid equalities where one pointer is the offset zero
16766 of one object and the other to one past end of another one. */
16767 if (!INTEGRAL_TYPE_P (type))
16768 return 0;
16770 /* Assume that string literals can't be adjacent to variables
16771 (automatic or global). */
16772 if (TREE_CODE (base0) == STRING_CST || TREE_CODE (base1) == STRING_CST)
16773 return 0;
16775 /* Assume that automatic variables can't be adjacent to global
16776 variables. */
16777 if (is_global_var (base0) != is_global_var (base1))
16778 return 0;
16780 return equal;
16783 /* Return the single non-zero element of a CONSTRUCTOR or NULL_TREE. */
16784 tree
16785 ctor_single_nonzero_element (const_tree t)
16787 unsigned HOST_WIDE_INT idx;
16788 constructor_elt *ce;
16789 tree elt = NULL_TREE;
16791 if (TREE_CODE (t) != CONSTRUCTOR)
16792 return NULL_TREE;
16793 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
16794 if (!integer_zerop (ce->value) && !real_zerop (ce->value))
16796 if (elt)
16797 return NULL_TREE;
16798 elt = ce->value;
16800 return elt;
16803 #if CHECKING_P
16805 namespace selftest {
16807 /* Helper functions for writing tests of folding trees. */
16809 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
16811 static void
16812 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
16813 tree constant)
16815 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
16818 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
16819 wrapping WRAPPED_EXPR. */
16821 static void
16822 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
16823 tree wrapped_expr)
16825 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
16826 ASSERT_NE (wrapped_expr, result);
16827 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
16828 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
16831 /* Verify that various arithmetic binary operations are folded
16832 correctly. */
16834 static void
16835 test_arithmetic_folding ()
16837 tree type = integer_type_node;
16838 tree x = create_tmp_var_raw (type, "x");
16839 tree zero = build_zero_cst (type);
16840 tree one = build_int_cst (type, 1);
16842 /* Addition. */
16843 /* 1 <-- (0 + 1) */
16844 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
16845 one);
16846 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
16847 one);
16849 /* (nonlvalue)x <-- (x + 0) */
16850 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
16853 /* Subtraction. */
16854 /* 0 <-- (x - x) */
16855 assert_binop_folds_to_const (x, MINUS_EXPR, x,
16856 zero);
16857 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
16860 /* Multiplication. */
16861 /* 0 <-- (x * 0) */
16862 assert_binop_folds_to_const (x, MULT_EXPR, zero,
16863 zero);
16865 /* (nonlvalue)x <-- (x * 1) */
16866 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
16870 /* Verify that various binary operations on vectors are folded
16871 correctly. */
16873 static void
16874 test_vector_folding ()
16876 tree inner_type = integer_type_node;
16877 tree type = build_vector_type (inner_type, 4);
16878 tree zero = build_zero_cst (type);
16879 tree one = build_one_cst (type);
16880 tree index = build_index_vector (type, 0, 1);
16882 /* Verify equality tests that return a scalar boolean result. */
16883 tree res_type = boolean_type_node;
16884 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
16885 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
16886 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
16887 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
16888 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
16889 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16890 index, one)));
16891 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
16892 index, index)));
16893 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16894 index, index)));
16897 /* Verify folding of VEC_DUPLICATE_EXPRs. */
16899 static void
16900 test_vec_duplicate_folding ()
16902 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
16903 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
16904 /* This will be 1 if VEC_MODE isn't a vector mode. */
16905 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
16907 tree type = build_vector_type (ssizetype, nunits);
16908 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
16909 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
16910 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
16913 /* Run all of the selftests within this file. */
16915 void
16916 fold_const_cc_tests ()
16918 test_arithmetic_folding ();
16919 test_vector_folding ();
16920 test_vec_duplicate_folding ();
16923 } // namespace selftest
16925 #endif /* CHECKING_P */