Add assember CFI directives to millicode division and remainder routines.
[official-gcc.git] / gcc / fold-const.cc
blob3b397ae2941dc2f714831f2b162eca59f855d64a
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-iterator.h"
74 #include "gimple-fold.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82 #include "stringpool.h"
83 #include "attribs.h"
84 #include "tree-vector-builder.h"
85 #include "vec-perm-indices.h"
86 #include "asan.h"
87 #include "gimple-range.h"
89 /* Nonzero if we are folding constants inside an initializer or a C++
90 manifestly-constant-evaluated context; zero otherwise.
91 Should be used when folding in initializer enables additional
92 optimizations. */
93 int folding_initializer = 0;
95 /* Nonzero if we are folding C++ manifestly-constant-evaluated context; zero
96 otherwise.
97 Should be used when certain constructs shouldn't be optimized
98 during folding in that context. */
99 bool folding_cxx_constexpr = false;
101 /* The following constants represent a bit based encoding of GCC's
102 comparison operators. This encoding simplifies transformations
103 on relational comparison operators, such as AND and OR. */
104 enum comparison_code {
105 COMPCODE_FALSE = 0,
106 COMPCODE_LT = 1,
107 COMPCODE_EQ = 2,
108 COMPCODE_LE = 3,
109 COMPCODE_GT = 4,
110 COMPCODE_LTGT = 5,
111 COMPCODE_GE = 6,
112 COMPCODE_ORD = 7,
113 COMPCODE_UNORD = 8,
114 COMPCODE_UNLT = 9,
115 COMPCODE_UNEQ = 10,
116 COMPCODE_UNLE = 11,
117 COMPCODE_UNGT = 12,
118 COMPCODE_NE = 13,
119 COMPCODE_UNGE = 14,
120 COMPCODE_TRUE = 15
123 static bool negate_expr_p (tree);
124 static tree negate_expr (tree);
125 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
126 static enum comparison_code comparison_to_compcode (enum tree_code);
127 static enum tree_code compcode_to_comparison (enum comparison_code);
128 static bool twoval_comparison_p (tree, tree *, tree *);
129 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
130 static tree optimize_bit_field_compare (location_t, enum tree_code,
131 tree, tree, tree);
132 static bool simple_operand_p (const_tree);
133 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
134 static tree range_predecessor (tree);
135 static tree range_successor (tree);
136 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
137 static tree fold_cond_expr_with_comparison (location_t, tree, enum tree_code,
138 tree, tree, tree, tree);
139 static tree unextend (tree, int, int, tree);
140 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
141 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
142 static tree fold_binary_op_with_conditional_arg (location_t,
143 enum tree_code, tree,
144 tree, tree,
145 tree, tree, int);
146 static tree fold_negate_const (tree, tree);
147 static tree fold_not_const (const_tree, tree);
148 static tree fold_relational_const (enum tree_code, tree, tree, tree);
149 static tree fold_convert_const (enum tree_code, tree, tree);
150 static tree fold_view_convert_expr (tree, tree);
151 static tree fold_negate_expr (location_t, tree);
154 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
155 Otherwise, return LOC. */
157 static location_t
158 expr_location_or (tree t, location_t loc)
160 location_t tloc = EXPR_LOCATION (t);
161 return tloc == UNKNOWN_LOCATION ? loc : tloc;
164 /* Similar to protected_set_expr_location, but never modify x in place,
165 if location can and needs to be set, unshare it. */
167 tree
168 protected_set_expr_location_unshare (tree x, location_t loc)
170 if (CAN_HAVE_LOCATION_P (x)
171 && EXPR_LOCATION (x) != loc
172 && !(TREE_CODE (x) == SAVE_EXPR
173 || TREE_CODE (x) == TARGET_EXPR
174 || TREE_CODE (x) == BIND_EXPR))
176 x = copy_node (x);
177 SET_EXPR_LOCATION (x, loc);
179 return x;
182 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
183 division and returns the quotient. Otherwise returns
184 NULL_TREE. */
186 tree
187 div_if_zero_remainder (const_tree arg1, const_tree arg2)
189 widest_int quo;
191 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
192 SIGNED, &quo))
193 return wide_int_to_tree (TREE_TYPE (arg1), quo);
195 return NULL_TREE;
198 /* This is nonzero if we should defer warnings about undefined
199 overflow. This facility exists because these warnings are a
200 special case. The code to estimate loop iterations does not want
201 to issue any warnings, since it works with expressions which do not
202 occur in user code. Various bits of cleanup code call fold(), but
203 only use the result if it has certain characteristics (e.g., is a
204 constant); that code only wants to issue a warning if the result is
205 used. */
207 static int fold_deferring_overflow_warnings;
209 /* If a warning about undefined overflow is deferred, this is the
210 warning. Note that this may cause us to turn two warnings into
211 one, but that is fine since it is sufficient to only give one
212 warning per expression. */
214 static const char* fold_deferred_overflow_warning;
216 /* If a warning about undefined overflow is deferred, this is the
217 level at which the warning should be emitted. */
219 static enum warn_strict_overflow_code fold_deferred_overflow_code;
221 /* Start deferring overflow warnings. We could use a stack here to
222 permit nested calls, but at present it is not necessary. */
224 void
225 fold_defer_overflow_warnings (void)
227 ++fold_deferring_overflow_warnings;
230 /* Stop deferring overflow warnings. If there is a pending warning,
231 and ISSUE is true, then issue the warning if appropriate. STMT is
232 the statement with which the warning should be associated (used for
233 location information); STMT may be NULL. CODE is the level of the
234 warning--a warn_strict_overflow_code value. This function will use
235 the smaller of CODE and the deferred code when deciding whether to
236 issue the warning. CODE may be zero to mean to always use the
237 deferred code. */
239 void
240 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
242 const char *warnmsg;
243 location_t locus;
245 gcc_assert (fold_deferring_overflow_warnings > 0);
246 --fold_deferring_overflow_warnings;
247 if (fold_deferring_overflow_warnings > 0)
249 if (fold_deferred_overflow_warning != NULL
250 && code != 0
251 && code < (int) fold_deferred_overflow_code)
252 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
253 return;
256 warnmsg = fold_deferred_overflow_warning;
257 fold_deferred_overflow_warning = NULL;
259 if (!issue || warnmsg == NULL)
260 return;
262 if (warning_suppressed_p (stmt, OPT_Wstrict_overflow))
263 return;
265 /* Use the smallest code level when deciding to issue the
266 warning. */
267 if (code == 0 || code > (int) fold_deferred_overflow_code)
268 code = fold_deferred_overflow_code;
270 if (!issue_strict_overflow_warning (code))
271 return;
273 if (stmt == NULL)
274 locus = input_location;
275 else
276 locus = gimple_location (stmt);
277 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
280 /* Stop deferring overflow warnings, ignoring any deferred
281 warnings. */
283 void
284 fold_undefer_and_ignore_overflow_warnings (void)
286 fold_undefer_overflow_warnings (false, NULL, 0);
289 /* Whether we are deferring overflow warnings. */
291 bool
292 fold_deferring_overflow_warnings_p (void)
294 return fold_deferring_overflow_warnings > 0;
297 /* This is called when we fold something based on the fact that signed
298 overflow is undefined. */
300 void
301 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
303 if (fold_deferring_overflow_warnings > 0)
305 if (fold_deferred_overflow_warning == NULL
306 || wc < fold_deferred_overflow_code)
308 fold_deferred_overflow_warning = gmsgid;
309 fold_deferred_overflow_code = wc;
312 else if (issue_strict_overflow_warning (wc))
313 warning (OPT_Wstrict_overflow, gmsgid);
316 /* Return true if the built-in mathematical function specified by CODE
317 is odd, i.e. -f(x) == f(-x). */
319 bool
320 negate_mathfn_p (combined_fn fn)
322 switch (fn)
324 CASE_CFN_ASIN:
325 CASE_CFN_ASIN_FN:
326 CASE_CFN_ASINH:
327 CASE_CFN_ASINH_FN:
328 CASE_CFN_ATAN:
329 CASE_CFN_ATAN_FN:
330 CASE_CFN_ATANH:
331 CASE_CFN_ATANH_FN:
332 CASE_CFN_CASIN:
333 CASE_CFN_CASIN_FN:
334 CASE_CFN_CASINH:
335 CASE_CFN_CASINH_FN:
336 CASE_CFN_CATAN:
337 CASE_CFN_CATAN_FN:
338 CASE_CFN_CATANH:
339 CASE_CFN_CATANH_FN:
340 CASE_CFN_CBRT:
341 CASE_CFN_CBRT_FN:
342 CASE_CFN_CPROJ:
343 CASE_CFN_CPROJ_FN:
344 CASE_CFN_CSIN:
345 CASE_CFN_CSIN_FN:
346 CASE_CFN_CSINH:
347 CASE_CFN_CSINH_FN:
348 CASE_CFN_CTAN:
349 CASE_CFN_CTAN_FN:
350 CASE_CFN_CTANH:
351 CASE_CFN_CTANH_FN:
352 CASE_CFN_ERF:
353 CASE_CFN_ERF_FN:
354 CASE_CFN_LLROUND:
355 CASE_CFN_LLROUND_FN:
356 CASE_CFN_LROUND:
357 CASE_CFN_LROUND_FN:
358 CASE_CFN_ROUND:
359 CASE_CFN_ROUNDEVEN:
360 CASE_CFN_ROUNDEVEN_FN:
361 CASE_CFN_SIN:
362 CASE_CFN_SIN_FN:
363 CASE_CFN_SINH:
364 CASE_CFN_SINH_FN:
365 CASE_CFN_TAN:
366 CASE_CFN_TAN_FN:
367 CASE_CFN_TANH:
368 CASE_CFN_TANH_FN:
369 CASE_CFN_TRUNC:
370 CASE_CFN_TRUNC_FN:
371 return true;
373 CASE_CFN_LLRINT:
374 CASE_CFN_LLRINT_FN:
375 CASE_CFN_LRINT:
376 CASE_CFN_LRINT_FN:
377 CASE_CFN_NEARBYINT:
378 CASE_CFN_NEARBYINT_FN:
379 CASE_CFN_RINT:
380 CASE_CFN_RINT_FN:
381 return !flag_rounding_math;
383 default:
384 break;
386 return false;
389 /* Check whether we may negate an integer constant T without causing
390 overflow. */
392 bool
393 may_negate_without_overflow_p (const_tree t)
395 tree type;
397 gcc_assert (TREE_CODE (t) == INTEGER_CST);
399 type = TREE_TYPE (t);
400 if (TYPE_UNSIGNED (type))
401 return false;
403 return !wi::only_sign_bit_p (wi::to_wide (t));
406 /* Determine whether an expression T can be cheaply negated using
407 the function negate_expr without introducing undefined overflow. */
409 static bool
410 negate_expr_p (tree t)
412 tree type;
414 if (t == 0)
415 return false;
417 type = TREE_TYPE (t);
419 STRIP_SIGN_NOPS (t);
420 switch (TREE_CODE (t))
422 case INTEGER_CST:
423 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
424 return true;
426 /* Check that -CST will not overflow type. */
427 return may_negate_without_overflow_p (t);
428 case BIT_NOT_EXPR:
429 return (INTEGRAL_TYPE_P (type)
430 && TYPE_OVERFLOW_WRAPS (type));
432 case FIXED_CST:
433 return true;
435 case NEGATE_EXPR:
436 return !TYPE_OVERFLOW_SANITIZED (type);
438 case REAL_CST:
439 /* We want to canonicalize to positive real constants. Pretend
440 that only negative ones can be easily negated. */
441 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
443 case COMPLEX_CST:
444 return negate_expr_p (TREE_REALPART (t))
445 && negate_expr_p (TREE_IMAGPART (t));
447 case VECTOR_CST:
449 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
450 return true;
452 /* Steps don't prevent negation. */
453 unsigned int count = vector_cst_encoded_nelts (t);
454 for (unsigned int i = 0; i < count; ++i)
455 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
456 return false;
458 return true;
461 case COMPLEX_EXPR:
462 return negate_expr_p (TREE_OPERAND (t, 0))
463 && negate_expr_p (TREE_OPERAND (t, 1));
465 case CONJ_EXPR:
466 return negate_expr_p (TREE_OPERAND (t, 0));
468 case PLUS_EXPR:
469 if (HONOR_SIGN_DEPENDENT_ROUNDING (type)
470 || HONOR_SIGNED_ZEROS (type)
471 || (ANY_INTEGRAL_TYPE_P (type)
472 && ! TYPE_OVERFLOW_WRAPS (type)))
473 return false;
474 /* -(A + B) -> (-B) - A. */
475 if (negate_expr_p (TREE_OPERAND (t, 1)))
476 return true;
477 /* -(A + B) -> (-A) - B. */
478 return negate_expr_p (TREE_OPERAND (t, 0));
480 case MINUS_EXPR:
481 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
482 return !HONOR_SIGN_DEPENDENT_ROUNDING (type)
483 && !HONOR_SIGNED_ZEROS (type)
484 && (! ANY_INTEGRAL_TYPE_P (type)
485 || TYPE_OVERFLOW_WRAPS (type));
487 case MULT_EXPR:
488 if (TYPE_UNSIGNED (type))
489 break;
490 /* INT_MIN/n * n doesn't overflow while negating one operand it does
491 if n is a (negative) power of two. */
492 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
493 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
494 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
495 && (wi::popcount
496 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
497 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
498 && (wi::popcount
499 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
500 break;
502 /* Fall through. */
504 case RDIV_EXPR:
505 if (! HONOR_SIGN_DEPENDENT_ROUNDING (t))
506 return negate_expr_p (TREE_OPERAND (t, 1))
507 || negate_expr_p (TREE_OPERAND (t, 0));
508 break;
510 case TRUNC_DIV_EXPR:
511 case ROUND_DIV_EXPR:
512 case EXACT_DIV_EXPR:
513 if (TYPE_UNSIGNED (type))
514 break;
515 /* In general we can't negate A in A / B, because if A is INT_MIN and
516 B is not 1 we change the sign of the result. */
517 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
518 && negate_expr_p (TREE_OPERAND (t, 0)))
519 return true;
520 /* In general we can't negate B in A / B, because if A is INT_MIN and
521 B is 1, we may turn this into INT_MIN / -1 which is undefined
522 and actually traps on some architectures. */
523 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
524 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
525 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
526 && ! integer_onep (TREE_OPERAND (t, 1))))
527 return negate_expr_p (TREE_OPERAND (t, 1));
528 break;
530 case NOP_EXPR:
531 /* Negate -((double)float) as (double)(-float). */
532 if (TREE_CODE (type) == REAL_TYPE)
534 tree tem = strip_float_extensions (t);
535 if (tem != t)
536 return negate_expr_p (tem);
538 break;
540 case CALL_EXPR:
541 /* Negate -f(x) as f(-x). */
542 if (negate_mathfn_p (get_call_combined_fn (t)))
543 return negate_expr_p (CALL_EXPR_ARG (t, 0));
544 break;
546 case RSHIFT_EXPR:
547 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
548 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
550 tree op1 = TREE_OPERAND (t, 1);
551 if (wi::to_wide (op1) == element_precision (type) - 1)
552 return true;
554 break;
556 default:
557 break;
559 return false;
562 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
563 simplification is possible.
564 If negate_expr_p would return true for T, NULL_TREE will never be
565 returned. */
567 static tree
568 fold_negate_expr_1 (location_t loc, tree t)
570 tree type = TREE_TYPE (t);
571 tree tem;
573 switch (TREE_CODE (t))
575 /* Convert - (~A) to A + 1. */
576 case BIT_NOT_EXPR:
577 if (INTEGRAL_TYPE_P (type))
578 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
579 build_one_cst (type));
580 break;
582 case INTEGER_CST:
583 tem = fold_negate_const (t, type);
584 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
585 || (ANY_INTEGRAL_TYPE_P (type)
586 && !TYPE_OVERFLOW_TRAPS (type)
587 && TYPE_OVERFLOW_WRAPS (type))
588 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
589 return tem;
590 break;
592 case POLY_INT_CST:
593 case REAL_CST:
594 case FIXED_CST:
595 tem = fold_negate_const (t, type);
596 return tem;
598 case COMPLEX_CST:
600 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
601 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
602 if (rpart && ipart)
603 return build_complex (type, rpart, ipart);
605 break;
607 case VECTOR_CST:
609 tree_vector_builder elts;
610 elts.new_unary_operation (type, t, true);
611 unsigned int count = elts.encoded_nelts ();
612 for (unsigned int i = 0; i < count; ++i)
614 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
615 if (elt == NULL_TREE)
616 return NULL_TREE;
617 elts.quick_push (elt);
620 return elts.build ();
623 case COMPLEX_EXPR:
624 if (negate_expr_p (t))
625 return fold_build2_loc (loc, COMPLEX_EXPR, type,
626 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
627 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
628 break;
630 case CONJ_EXPR:
631 if (negate_expr_p (t))
632 return fold_build1_loc (loc, CONJ_EXPR, type,
633 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
634 break;
636 case NEGATE_EXPR:
637 if (!TYPE_OVERFLOW_SANITIZED (type))
638 return TREE_OPERAND (t, 0);
639 break;
641 case PLUS_EXPR:
642 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
643 && !HONOR_SIGNED_ZEROS (type))
645 /* -(A + B) -> (-B) - A. */
646 if (negate_expr_p (TREE_OPERAND (t, 1)))
648 tem = negate_expr (TREE_OPERAND (t, 1));
649 return fold_build2_loc (loc, MINUS_EXPR, type,
650 tem, TREE_OPERAND (t, 0));
653 /* -(A + B) -> (-A) - B. */
654 if (negate_expr_p (TREE_OPERAND (t, 0)))
656 tem = negate_expr (TREE_OPERAND (t, 0));
657 return fold_build2_loc (loc, MINUS_EXPR, type,
658 tem, TREE_OPERAND (t, 1));
661 break;
663 case MINUS_EXPR:
664 /* - (A - B) -> B - A */
665 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
666 && !HONOR_SIGNED_ZEROS (type))
667 return fold_build2_loc (loc, MINUS_EXPR, type,
668 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
669 break;
671 case MULT_EXPR:
672 if (TYPE_UNSIGNED (type))
673 break;
675 /* Fall through. */
677 case RDIV_EXPR:
678 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
680 tem = TREE_OPERAND (t, 1);
681 if (negate_expr_p (tem))
682 return fold_build2_loc (loc, TREE_CODE (t), type,
683 TREE_OPERAND (t, 0), negate_expr (tem));
684 tem = TREE_OPERAND (t, 0);
685 if (negate_expr_p (tem))
686 return fold_build2_loc (loc, TREE_CODE (t), type,
687 negate_expr (tem), TREE_OPERAND (t, 1));
689 break;
691 case TRUNC_DIV_EXPR:
692 case ROUND_DIV_EXPR:
693 case EXACT_DIV_EXPR:
694 if (TYPE_UNSIGNED (type))
695 break;
696 /* In general we can't negate A in A / B, because if A is INT_MIN and
697 B is not 1 we change the sign of the result. */
698 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
699 && negate_expr_p (TREE_OPERAND (t, 0)))
700 return fold_build2_loc (loc, TREE_CODE (t), type,
701 negate_expr (TREE_OPERAND (t, 0)),
702 TREE_OPERAND (t, 1));
703 /* In general we can't negate B in A / B, because if A is INT_MIN and
704 B is 1, we may turn this into INT_MIN / -1 which is undefined
705 and actually traps on some architectures. */
706 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
707 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
708 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
709 && ! integer_onep (TREE_OPERAND (t, 1))))
710 && negate_expr_p (TREE_OPERAND (t, 1)))
711 return fold_build2_loc (loc, TREE_CODE (t), type,
712 TREE_OPERAND (t, 0),
713 negate_expr (TREE_OPERAND (t, 1)));
714 break;
716 case NOP_EXPR:
717 /* Convert -((double)float) into (double)(-float). */
718 if (TREE_CODE (type) == REAL_TYPE)
720 tem = strip_float_extensions (t);
721 if (tem != t && negate_expr_p (tem))
722 return fold_convert_loc (loc, type, negate_expr (tem));
724 break;
726 case CALL_EXPR:
727 /* Negate -f(x) as f(-x). */
728 if (negate_mathfn_p (get_call_combined_fn (t))
729 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
731 tree fndecl, arg;
733 fndecl = get_callee_fndecl (t);
734 arg = negate_expr (CALL_EXPR_ARG (t, 0));
735 return build_call_expr_loc (loc, fndecl, 1, arg);
737 break;
739 case RSHIFT_EXPR:
740 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
741 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
743 tree op1 = TREE_OPERAND (t, 1);
744 if (wi::to_wide (op1) == element_precision (type) - 1)
746 tree ntype = TYPE_UNSIGNED (type)
747 ? signed_type_for (type)
748 : unsigned_type_for (type);
749 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
750 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
751 return fold_convert_loc (loc, type, temp);
754 break;
756 default:
757 break;
760 return NULL_TREE;
763 /* A wrapper for fold_negate_expr_1. */
765 static tree
766 fold_negate_expr (location_t loc, tree t)
768 tree type = TREE_TYPE (t);
769 STRIP_SIGN_NOPS (t);
770 tree tem = fold_negate_expr_1 (loc, t);
771 if (tem == NULL_TREE)
772 return NULL_TREE;
773 return fold_convert_loc (loc, type, tem);
776 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
777 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
778 return NULL_TREE. */
780 static tree
781 negate_expr (tree t)
783 tree type, tem;
784 location_t loc;
786 if (t == NULL_TREE)
787 return NULL_TREE;
789 loc = EXPR_LOCATION (t);
790 type = TREE_TYPE (t);
791 STRIP_SIGN_NOPS (t);
793 tem = fold_negate_expr (loc, t);
794 if (!tem)
795 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
796 return fold_convert_loc (loc, type, tem);
799 /* Split a tree IN into a constant, literal and variable parts that could be
800 combined with CODE to make IN. "constant" means an expression with
801 TREE_CONSTANT but that isn't an actual constant. CODE must be a
802 commutative arithmetic operation. Store the constant part into *CONP,
803 the literal in *LITP and return the variable part. If a part isn't
804 present, set it to null. If the tree does not decompose in this way,
805 return the entire tree as the variable part and the other parts as null.
807 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
808 case, we negate an operand that was subtracted. Except if it is a
809 literal for which we use *MINUS_LITP instead.
811 If NEGATE_P is true, we are negating all of IN, again except a literal
812 for which we use *MINUS_LITP instead. If a variable part is of pointer
813 type, it is negated after converting to TYPE. This prevents us from
814 generating illegal MINUS pointer expression. LOC is the location of
815 the converted variable part.
817 If IN is itself a literal or constant, return it as appropriate.
819 Note that we do not guarantee that any of the three values will be the
820 same type as IN, but they will have the same signedness and mode. */
822 static tree
823 split_tree (tree in, tree type, enum tree_code code,
824 tree *minus_varp, tree *conp, tree *minus_conp,
825 tree *litp, tree *minus_litp, int negate_p)
827 tree var = 0;
828 *minus_varp = 0;
829 *conp = 0;
830 *minus_conp = 0;
831 *litp = 0;
832 *minus_litp = 0;
834 /* Strip any conversions that don't change the machine mode or signedness. */
835 STRIP_SIGN_NOPS (in);
837 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
838 || TREE_CODE (in) == FIXED_CST)
839 *litp = in;
840 else if (TREE_CODE (in) == code
841 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
842 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
843 /* We can associate addition and subtraction together (even
844 though the C standard doesn't say so) for integers because
845 the value is not affected. For reals, the value might be
846 affected, so we can't. */
847 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
848 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
849 || (code == MINUS_EXPR
850 && (TREE_CODE (in) == PLUS_EXPR
851 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
853 tree op0 = TREE_OPERAND (in, 0);
854 tree op1 = TREE_OPERAND (in, 1);
855 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
856 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
858 /* First see if either of the operands is a literal, then a constant. */
859 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
860 || TREE_CODE (op0) == FIXED_CST)
861 *litp = op0, op0 = 0;
862 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
863 || TREE_CODE (op1) == FIXED_CST)
864 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
866 if (op0 != 0 && TREE_CONSTANT (op0))
867 *conp = op0, op0 = 0;
868 else if (op1 != 0 && TREE_CONSTANT (op1))
869 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
871 /* If we haven't dealt with either operand, this is not a case we can
872 decompose. Otherwise, VAR is either of the ones remaining, if any. */
873 if (op0 != 0 && op1 != 0)
874 var = in;
875 else if (op0 != 0)
876 var = op0;
877 else
878 var = op1, neg_var_p = neg1_p;
880 /* Now do any needed negations. */
881 if (neg_litp_p)
882 *minus_litp = *litp, *litp = 0;
883 if (neg_conp_p && *conp)
884 *minus_conp = *conp, *conp = 0;
885 if (neg_var_p && var)
886 *minus_varp = var, var = 0;
888 else if (TREE_CONSTANT (in))
889 *conp = in;
890 else if (TREE_CODE (in) == BIT_NOT_EXPR
891 && code == PLUS_EXPR)
893 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
894 when IN is constant. */
895 *litp = build_minus_one_cst (type);
896 *minus_varp = TREE_OPERAND (in, 0);
898 else
899 var = in;
901 if (negate_p)
903 if (*litp)
904 *minus_litp = *litp, *litp = 0;
905 else if (*minus_litp)
906 *litp = *minus_litp, *minus_litp = 0;
907 if (*conp)
908 *minus_conp = *conp, *conp = 0;
909 else if (*minus_conp)
910 *conp = *minus_conp, *minus_conp = 0;
911 if (var)
912 *minus_varp = var, var = 0;
913 else if (*minus_varp)
914 var = *minus_varp, *minus_varp = 0;
917 if (*litp
918 && TREE_OVERFLOW_P (*litp))
919 *litp = drop_tree_overflow (*litp);
920 if (*minus_litp
921 && TREE_OVERFLOW_P (*minus_litp))
922 *minus_litp = drop_tree_overflow (*minus_litp);
924 return var;
927 /* Re-associate trees split by the above function. T1 and T2 are
928 either expressions to associate or null. Return the new
929 expression, if any. LOC is the location of the new expression. If
930 we build an operation, do it in TYPE and with CODE. */
932 static tree
933 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
935 if (t1 == 0)
937 gcc_assert (t2 == 0 || code != MINUS_EXPR);
938 return t2;
940 else if (t2 == 0)
941 return t1;
943 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
944 try to fold this since we will have infinite recursion. But do
945 deal with any NEGATE_EXPRs. */
946 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
947 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
948 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
950 if (code == PLUS_EXPR)
952 if (TREE_CODE (t1) == NEGATE_EXPR)
953 return build2_loc (loc, MINUS_EXPR, type,
954 fold_convert_loc (loc, type, t2),
955 fold_convert_loc (loc, type,
956 TREE_OPERAND (t1, 0)));
957 else if (TREE_CODE (t2) == NEGATE_EXPR)
958 return build2_loc (loc, MINUS_EXPR, type,
959 fold_convert_loc (loc, type, t1),
960 fold_convert_loc (loc, type,
961 TREE_OPERAND (t2, 0)));
962 else if (integer_zerop (t2))
963 return fold_convert_loc (loc, type, t1);
965 else if (code == MINUS_EXPR)
967 if (integer_zerop (t2))
968 return fold_convert_loc (loc, type, t1);
971 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
972 fold_convert_loc (loc, type, t2));
975 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
976 fold_convert_loc (loc, type, t2));
979 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
980 for use in int_const_binop, size_binop and size_diffop. */
982 static bool
983 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
985 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
986 return false;
987 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
988 return false;
990 switch (code)
992 case LSHIFT_EXPR:
993 case RSHIFT_EXPR:
994 case LROTATE_EXPR:
995 case RROTATE_EXPR:
996 return true;
998 default:
999 break;
1002 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1003 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1004 && TYPE_MODE (type1) == TYPE_MODE (type2);
1007 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
1008 a new constant in RES. Return FALSE if we don't know how to
1009 evaluate CODE at compile-time. */
1011 bool
1012 wide_int_binop (wide_int &res,
1013 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
1014 signop sign, wi::overflow_type *overflow)
1016 wide_int tmp;
1017 *overflow = wi::OVF_NONE;
1018 switch (code)
1020 case BIT_IOR_EXPR:
1021 res = wi::bit_or (arg1, arg2);
1022 break;
1024 case BIT_XOR_EXPR:
1025 res = wi::bit_xor (arg1, arg2);
1026 break;
1028 case BIT_AND_EXPR:
1029 res = wi::bit_and (arg1, arg2);
1030 break;
1032 case LSHIFT_EXPR:
1033 if (wi::neg_p (arg2))
1034 return false;
1035 res = wi::lshift (arg1, arg2);
1036 break;
1038 case RSHIFT_EXPR:
1039 if (wi::neg_p (arg2))
1040 return false;
1041 /* It's unclear from the C standard whether shifts can overflow.
1042 The following code ignores overflow; perhaps a C standard
1043 interpretation ruling is needed. */
1044 res = wi::rshift (arg1, arg2, sign);
1045 break;
1047 case RROTATE_EXPR:
1048 case LROTATE_EXPR:
1049 if (wi::neg_p (arg2))
1051 tmp = -arg2;
1052 if (code == RROTATE_EXPR)
1053 code = LROTATE_EXPR;
1054 else
1055 code = RROTATE_EXPR;
1057 else
1058 tmp = arg2;
1060 if (code == RROTATE_EXPR)
1061 res = wi::rrotate (arg1, tmp);
1062 else
1063 res = wi::lrotate (arg1, tmp);
1064 break;
1066 case PLUS_EXPR:
1067 res = wi::add (arg1, arg2, sign, overflow);
1068 break;
1070 case MINUS_EXPR:
1071 res = wi::sub (arg1, arg2, sign, overflow);
1072 break;
1074 case MULT_EXPR:
1075 res = wi::mul (arg1, arg2, sign, overflow);
1076 break;
1078 case MULT_HIGHPART_EXPR:
1079 res = wi::mul_high (arg1, arg2, sign);
1080 break;
1082 case TRUNC_DIV_EXPR:
1083 case EXACT_DIV_EXPR:
1084 if (arg2 == 0)
1085 return false;
1086 res = wi::div_trunc (arg1, arg2, sign, overflow);
1087 break;
1089 case FLOOR_DIV_EXPR:
1090 if (arg2 == 0)
1091 return false;
1092 res = wi::div_floor (arg1, arg2, sign, overflow);
1093 break;
1095 case CEIL_DIV_EXPR:
1096 if (arg2 == 0)
1097 return false;
1098 res = wi::div_ceil (arg1, arg2, sign, overflow);
1099 break;
1101 case ROUND_DIV_EXPR:
1102 if (arg2 == 0)
1103 return false;
1104 res = wi::div_round (arg1, arg2, sign, overflow);
1105 break;
1107 case TRUNC_MOD_EXPR:
1108 if (arg2 == 0)
1109 return false;
1110 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1111 break;
1113 case FLOOR_MOD_EXPR:
1114 if (arg2 == 0)
1115 return false;
1116 res = wi::mod_floor (arg1, arg2, sign, overflow);
1117 break;
1119 case CEIL_MOD_EXPR:
1120 if (arg2 == 0)
1121 return false;
1122 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1123 break;
1125 case ROUND_MOD_EXPR:
1126 if (arg2 == 0)
1127 return false;
1128 res = wi::mod_round (arg1, arg2, sign, overflow);
1129 break;
1131 case MIN_EXPR:
1132 res = wi::min (arg1, arg2, sign);
1133 break;
1135 case MAX_EXPR:
1136 res = wi::max (arg1, arg2, sign);
1137 break;
1139 default:
1140 return false;
1142 return true;
1145 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1146 produce a new constant in RES. Return FALSE if we don't know how
1147 to evaluate CODE at compile-time. */
1149 static bool
1150 poly_int_binop (poly_wide_int &res, enum tree_code code,
1151 const_tree arg1, const_tree arg2,
1152 signop sign, wi::overflow_type *overflow)
1154 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1155 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1156 switch (code)
1158 case PLUS_EXPR:
1159 res = wi::add (wi::to_poly_wide (arg1),
1160 wi::to_poly_wide (arg2), sign, overflow);
1161 break;
1163 case MINUS_EXPR:
1164 res = wi::sub (wi::to_poly_wide (arg1),
1165 wi::to_poly_wide (arg2), sign, overflow);
1166 break;
1168 case MULT_EXPR:
1169 if (TREE_CODE (arg2) == INTEGER_CST)
1170 res = wi::mul (wi::to_poly_wide (arg1),
1171 wi::to_wide (arg2), sign, overflow);
1172 else if (TREE_CODE (arg1) == INTEGER_CST)
1173 res = wi::mul (wi::to_poly_wide (arg2),
1174 wi::to_wide (arg1), sign, overflow);
1175 else
1176 return NULL_TREE;
1177 break;
1179 case LSHIFT_EXPR:
1180 if (TREE_CODE (arg2) == INTEGER_CST)
1181 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1182 else
1183 return false;
1184 break;
1186 case BIT_IOR_EXPR:
1187 if (TREE_CODE (arg2) != INTEGER_CST
1188 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1189 &res))
1190 return false;
1191 break;
1193 default:
1194 return false;
1196 return true;
1199 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1200 produce a new constant. Return NULL_TREE if we don't know how to
1201 evaluate CODE at compile-time. */
1203 tree
1204 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1205 int overflowable)
1207 poly_wide_int poly_res;
1208 tree type = TREE_TYPE (arg1);
1209 signop sign = TYPE_SIGN (type);
1210 wi::overflow_type overflow = wi::OVF_NONE;
1212 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1214 wide_int warg1 = wi::to_wide (arg1), res;
1215 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1216 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1217 return NULL_TREE;
1218 poly_res = res;
1220 else if (!poly_int_tree_p (arg1)
1221 || !poly_int_tree_p (arg2)
1222 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1223 return NULL_TREE;
1224 return force_fit_type (type, poly_res, overflowable,
1225 (((sign == SIGNED || overflowable == -1)
1226 && overflow)
1227 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1230 /* Return true if binary operation OP distributes over addition in operand
1231 OPNO, with the other operand being held constant. OPNO counts from 1. */
1233 static bool
1234 distributes_over_addition_p (tree_code op, int opno)
1236 switch (op)
1238 case PLUS_EXPR:
1239 case MINUS_EXPR:
1240 case MULT_EXPR:
1241 return true;
1243 case LSHIFT_EXPR:
1244 return opno == 1;
1246 default:
1247 return false;
1251 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1252 constant. We assume ARG1 and ARG2 have the same data type, or at least
1253 are the same kind of constant and the same machine mode. Return zero if
1254 combining the constants is not allowed in the current operating mode. */
1256 static tree
1257 const_binop (enum tree_code code, tree arg1, tree arg2)
1259 /* Sanity check for the recursive cases. */
1260 if (!arg1 || !arg2)
1261 return NULL_TREE;
1263 STRIP_NOPS (arg1);
1264 STRIP_NOPS (arg2);
1266 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1268 if (code == POINTER_PLUS_EXPR)
1269 return int_const_binop (PLUS_EXPR,
1270 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1272 return int_const_binop (code, arg1, arg2);
1275 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1277 machine_mode mode;
1278 REAL_VALUE_TYPE d1;
1279 REAL_VALUE_TYPE d2;
1280 REAL_VALUE_TYPE value;
1281 REAL_VALUE_TYPE result;
1282 bool inexact;
1283 tree t, type;
1285 /* The following codes are handled by real_arithmetic. */
1286 switch (code)
1288 case PLUS_EXPR:
1289 case MINUS_EXPR:
1290 case MULT_EXPR:
1291 case RDIV_EXPR:
1292 case MIN_EXPR:
1293 case MAX_EXPR:
1294 break;
1296 default:
1297 return NULL_TREE;
1300 d1 = TREE_REAL_CST (arg1);
1301 d2 = TREE_REAL_CST (arg2);
1303 type = TREE_TYPE (arg1);
1304 mode = TYPE_MODE (type);
1306 /* Don't perform operation if we honor signaling NaNs and
1307 either operand is a signaling NaN. */
1308 if (HONOR_SNANS (mode)
1309 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1310 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1311 return NULL_TREE;
1313 /* Don't perform operation if it would raise a division
1314 by zero exception. */
1315 if (code == RDIV_EXPR
1316 && real_equal (&d2, &dconst0)
1317 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1318 return NULL_TREE;
1320 /* If either operand is a NaN, just return it. Otherwise, set up
1321 for floating-point trap; we return an overflow. */
1322 if (REAL_VALUE_ISNAN (d1))
1324 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1325 is off. */
1326 d1.signalling = 0;
1327 t = build_real (type, d1);
1328 return t;
1330 else if (REAL_VALUE_ISNAN (d2))
1332 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1333 is off. */
1334 d2.signalling = 0;
1335 t = build_real (type, d2);
1336 return t;
1339 inexact = real_arithmetic (&value, code, &d1, &d2);
1340 real_convert (&result, mode, &value);
1342 /* Don't constant fold this floating point operation if
1343 both operands are not NaN but the result is NaN, and
1344 flag_trapping_math. Such operations should raise an
1345 invalid operation exception. */
1346 if (flag_trapping_math
1347 && MODE_HAS_NANS (mode)
1348 && REAL_VALUE_ISNAN (result)
1349 && !REAL_VALUE_ISNAN (d1)
1350 && !REAL_VALUE_ISNAN (d2))
1351 return NULL_TREE;
1353 /* Don't constant fold this floating point operation if
1354 the result has overflowed and flag_trapping_math. */
1355 if (flag_trapping_math
1356 && MODE_HAS_INFINITIES (mode)
1357 && REAL_VALUE_ISINF (result)
1358 && !REAL_VALUE_ISINF (d1)
1359 && !REAL_VALUE_ISINF (d2))
1360 return NULL_TREE;
1362 /* Don't constant fold this floating point operation if the
1363 result may dependent upon the run-time rounding mode and
1364 flag_rounding_math is set, or if GCC's software emulation
1365 is unable to accurately represent the result. */
1366 if ((flag_rounding_math
1367 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1368 && (inexact || !real_identical (&result, &value)))
1369 return NULL_TREE;
1371 t = build_real (type, result);
1373 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1374 return t;
1377 if (TREE_CODE (arg1) == FIXED_CST)
1379 FIXED_VALUE_TYPE f1;
1380 FIXED_VALUE_TYPE f2;
1381 FIXED_VALUE_TYPE result;
1382 tree t, type;
1383 int sat_p;
1384 bool overflow_p;
1386 /* The following codes are handled by fixed_arithmetic. */
1387 switch (code)
1389 case PLUS_EXPR:
1390 case MINUS_EXPR:
1391 case MULT_EXPR:
1392 case TRUNC_DIV_EXPR:
1393 if (TREE_CODE (arg2) != FIXED_CST)
1394 return NULL_TREE;
1395 f2 = TREE_FIXED_CST (arg2);
1396 break;
1398 case LSHIFT_EXPR:
1399 case RSHIFT_EXPR:
1401 if (TREE_CODE (arg2) != INTEGER_CST)
1402 return NULL_TREE;
1403 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1404 f2.data.high = w2.elt (1);
1405 f2.data.low = w2.ulow ();
1406 f2.mode = SImode;
1408 break;
1410 default:
1411 return NULL_TREE;
1414 f1 = TREE_FIXED_CST (arg1);
1415 type = TREE_TYPE (arg1);
1416 sat_p = TYPE_SATURATING (type);
1417 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1418 t = build_fixed (type, result);
1419 /* Propagate overflow flags. */
1420 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1421 TREE_OVERFLOW (t) = 1;
1422 return t;
1425 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1427 tree type = TREE_TYPE (arg1);
1428 tree r1 = TREE_REALPART (arg1);
1429 tree i1 = TREE_IMAGPART (arg1);
1430 tree r2 = TREE_REALPART (arg2);
1431 tree i2 = TREE_IMAGPART (arg2);
1432 tree real, imag;
1434 switch (code)
1436 case PLUS_EXPR:
1437 case MINUS_EXPR:
1438 real = const_binop (code, r1, r2);
1439 imag = const_binop (code, i1, i2);
1440 break;
1442 case MULT_EXPR:
1443 if (COMPLEX_FLOAT_TYPE_P (type))
1444 return do_mpc_arg2 (arg1, arg2, type,
1445 /* do_nonfinite= */ folding_initializer,
1446 mpc_mul);
1448 real = const_binop (MINUS_EXPR,
1449 const_binop (MULT_EXPR, r1, r2),
1450 const_binop (MULT_EXPR, i1, i2));
1451 imag = const_binop (PLUS_EXPR,
1452 const_binop (MULT_EXPR, r1, i2),
1453 const_binop (MULT_EXPR, i1, r2));
1454 break;
1456 case RDIV_EXPR:
1457 if (COMPLEX_FLOAT_TYPE_P (type))
1458 return do_mpc_arg2 (arg1, arg2, type,
1459 /* do_nonfinite= */ folding_initializer,
1460 mpc_div);
1461 /* Fallthru. */
1462 case TRUNC_DIV_EXPR:
1463 case CEIL_DIV_EXPR:
1464 case FLOOR_DIV_EXPR:
1465 case ROUND_DIV_EXPR:
1466 if (flag_complex_method == 0)
1468 /* Keep this algorithm in sync with
1469 tree-complex.cc:expand_complex_div_straight().
1471 Expand complex division to scalars, straightforward algorithm.
1472 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1473 t = br*br + bi*bi
1475 tree magsquared
1476 = const_binop (PLUS_EXPR,
1477 const_binop (MULT_EXPR, r2, r2),
1478 const_binop (MULT_EXPR, i2, i2));
1479 tree t1
1480 = const_binop (PLUS_EXPR,
1481 const_binop (MULT_EXPR, r1, r2),
1482 const_binop (MULT_EXPR, i1, i2));
1483 tree t2
1484 = const_binop (MINUS_EXPR,
1485 const_binop (MULT_EXPR, i1, r2),
1486 const_binop (MULT_EXPR, r1, i2));
1488 real = const_binop (code, t1, magsquared);
1489 imag = const_binop (code, t2, magsquared);
1491 else
1493 /* Keep this algorithm in sync with
1494 tree-complex.cc:expand_complex_div_wide().
1496 Expand complex division to scalars, modified algorithm to minimize
1497 overflow with wide input ranges. */
1498 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1499 fold_abs_const (r2, TREE_TYPE (type)),
1500 fold_abs_const (i2, TREE_TYPE (type)));
1502 if (integer_nonzerop (compare))
1504 /* In the TRUE branch, we compute
1505 ratio = br/bi;
1506 div = (br * ratio) + bi;
1507 tr = (ar * ratio) + ai;
1508 ti = (ai * ratio) - ar;
1509 tr = tr / div;
1510 ti = ti / div; */
1511 tree ratio = const_binop (code, r2, i2);
1512 tree div = const_binop (PLUS_EXPR, i2,
1513 const_binop (MULT_EXPR, r2, ratio));
1514 real = const_binop (MULT_EXPR, r1, ratio);
1515 real = const_binop (PLUS_EXPR, real, i1);
1516 real = const_binop (code, real, div);
1518 imag = const_binop (MULT_EXPR, i1, ratio);
1519 imag = const_binop (MINUS_EXPR, imag, r1);
1520 imag = const_binop (code, imag, div);
1522 else
1524 /* In the FALSE branch, we compute
1525 ratio = d/c;
1526 divisor = (d * ratio) + c;
1527 tr = (b * ratio) + a;
1528 ti = b - (a * ratio);
1529 tr = tr / div;
1530 ti = ti / div; */
1531 tree ratio = const_binop (code, i2, r2);
1532 tree div = const_binop (PLUS_EXPR, r2,
1533 const_binop (MULT_EXPR, i2, ratio));
1535 real = const_binop (MULT_EXPR, i1, ratio);
1536 real = const_binop (PLUS_EXPR, real, r1);
1537 real = const_binop (code, real, div);
1539 imag = const_binop (MULT_EXPR, r1, ratio);
1540 imag = const_binop (MINUS_EXPR, i1, imag);
1541 imag = const_binop (code, imag, div);
1544 break;
1546 default:
1547 return NULL_TREE;
1550 if (real && imag)
1551 return build_complex (type, real, imag);
1554 if (TREE_CODE (arg1) == VECTOR_CST
1555 && TREE_CODE (arg2) == VECTOR_CST
1556 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1557 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1559 tree type = TREE_TYPE (arg1);
1560 bool step_ok_p;
1561 if (VECTOR_CST_STEPPED_P (arg1)
1562 && VECTOR_CST_STEPPED_P (arg2))
1563 /* We can operate directly on the encoding if:
1565 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1566 implies
1567 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1569 Addition and subtraction are the supported operators
1570 for which this is true. */
1571 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1572 else if (VECTOR_CST_STEPPED_P (arg1))
1573 /* We can operate directly on stepped encodings if:
1575 a3 - a2 == a2 - a1
1576 implies:
1577 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1579 which is true if (x -> x op c) distributes over addition. */
1580 step_ok_p = distributes_over_addition_p (code, 1);
1581 else
1582 /* Similarly in reverse. */
1583 step_ok_p = distributes_over_addition_p (code, 2);
1584 tree_vector_builder elts;
1585 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1586 return NULL_TREE;
1587 unsigned int count = elts.encoded_nelts ();
1588 for (unsigned int i = 0; i < count; ++i)
1590 tree elem1 = VECTOR_CST_ELT (arg1, i);
1591 tree elem2 = VECTOR_CST_ELT (arg2, i);
1593 tree elt = const_binop (code, elem1, elem2);
1595 /* It is possible that const_binop cannot handle the given
1596 code and return NULL_TREE */
1597 if (elt == NULL_TREE)
1598 return NULL_TREE;
1599 elts.quick_push (elt);
1602 return elts.build ();
1605 /* Shifts allow a scalar offset for a vector. */
1606 if (TREE_CODE (arg1) == VECTOR_CST
1607 && TREE_CODE (arg2) == INTEGER_CST)
1609 tree type = TREE_TYPE (arg1);
1610 bool step_ok_p = distributes_over_addition_p (code, 1);
1611 tree_vector_builder elts;
1612 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1613 return NULL_TREE;
1614 unsigned int count = elts.encoded_nelts ();
1615 for (unsigned int i = 0; i < count; ++i)
1617 tree elem1 = VECTOR_CST_ELT (arg1, i);
1619 tree elt = const_binop (code, elem1, arg2);
1621 /* It is possible that const_binop cannot handle the given
1622 code and return NULL_TREE. */
1623 if (elt == NULL_TREE)
1624 return NULL_TREE;
1625 elts.quick_push (elt);
1628 return elts.build ();
1630 return NULL_TREE;
1633 /* Overload that adds a TYPE parameter to be able to dispatch
1634 to fold_relational_const. */
1636 tree
1637 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1639 if (TREE_CODE_CLASS (code) == tcc_comparison)
1640 return fold_relational_const (code, type, arg1, arg2);
1642 /* ??? Until we make the const_binop worker take the type of the
1643 result as argument put those cases that need it here. */
1644 switch (code)
1646 case VEC_SERIES_EXPR:
1647 if (CONSTANT_CLASS_P (arg1)
1648 && CONSTANT_CLASS_P (arg2))
1649 return build_vec_series (type, arg1, arg2);
1650 return NULL_TREE;
1652 case COMPLEX_EXPR:
1653 if ((TREE_CODE (arg1) == REAL_CST
1654 && TREE_CODE (arg2) == REAL_CST)
1655 || (TREE_CODE (arg1) == INTEGER_CST
1656 && TREE_CODE (arg2) == INTEGER_CST))
1657 return build_complex (type, arg1, arg2);
1658 return NULL_TREE;
1660 case POINTER_DIFF_EXPR:
1661 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1663 poly_offset_int res = (wi::to_poly_offset (arg1)
1664 - wi::to_poly_offset (arg2));
1665 return force_fit_type (type, res, 1,
1666 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1668 return NULL_TREE;
1670 case VEC_PACK_TRUNC_EXPR:
1671 case VEC_PACK_FIX_TRUNC_EXPR:
1672 case VEC_PACK_FLOAT_EXPR:
1674 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1676 if (TREE_CODE (arg1) != VECTOR_CST
1677 || TREE_CODE (arg2) != VECTOR_CST)
1678 return NULL_TREE;
1680 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1681 return NULL_TREE;
1683 out_nelts = in_nelts * 2;
1684 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1685 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1687 tree_vector_builder elts (type, out_nelts, 1);
1688 for (i = 0; i < out_nelts; i++)
1690 tree elt = (i < in_nelts
1691 ? VECTOR_CST_ELT (arg1, i)
1692 : VECTOR_CST_ELT (arg2, i - in_nelts));
1693 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1694 ? NOP_EXPR
1695 : code == VEC_PACK_FLOAT_EXPR
1696 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1697 TREE_TYPE (type), elt);
1698 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1699 return NULL_TREE;
1700 elts.quick_push (elt);
1703 return elts.build ();
1706 case VEC_WIDEN_MULT_LO_EXPR:
1707 case VEC_WIDEN_MULT_HI_EXPR:
1708 case VEC_WIDEN_MULT_EVEN_EXPR:
1709 case VEC_WIDEN_MULT_ODD_EXPR:
1711 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1713 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1714 return NULL_TREE;
1716 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1717 return NULL_TREE;
1718 out_nelts = in_nelts / 2;
1719 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1720 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1722 if (code == VEC_WIDEN_MULT_LO_EXPR)
1723 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1724 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1725 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1726 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1727 scale = 1, ofs = 0;
1728 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1729 scale = 1, ofs = 1;
1731 tree_vector_builder elts (type, out_nelts, 1);
1732 for (out = 0; out < out_nelts; out++)
1734 unsigned int in = (out << scale) + ofs;
1735 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1736 VECTOR_CST_ELT (arg1, in));
1737 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1738 VECTOR_CST_ELT (arg2, in));
1740 if (t1 == NULL_TREE || t2 == NULL_TREE)
1741 return NULL_TREE;
1742 tree elt = const_binop (MULT_EXPR, t1, t2);
1743 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1744 return NULL_TREE;
1745 elts.quick_push (elt);
1748 return elts.build ();
1751 default:;
1754 if (TREE_CODE_CLASS (code) != tcc_binary)
1755 return NULL_TREE;
1757 /* Make sure type and arg0 have the same saturating flag. */
1758 gcc_checking_assert (TYPE_SATURATING (type)
1759 == TYPE_SATURATING (TREE_TYPE (arg1)));
1761 return const_binop (code, arg1, arg2);
1764 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1765 Return zero if computing the constants is not possible. */
1767 tree
1768 const_unop (enum tree_code code, tree type, tree arg0)
1770 /* Don't perform the operation, other than NEGATE and ABS, if
1771 flag_signaling_nans is on and the operand is a signaling NaN. */
1772 if (TREE_CODE (arg0) == REAL_CST
1773 && HONOR_SNANS (arg0)
1774 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1775 && code != NEGATE_EXPR
1776 && code != ABS_EXPR
1777 && code != ABSU_EXPR)
1778 return NULL_TREE;
1780 switch (code)
1782 CASE_CONVERT:
1783 case FLOAT_EXPR:
1784 case FIX_TRUNC_EXPR:
1785 case FIXED_CONVERT_EXPR:
1786 return fold_convert_const (code, type, arg0);
1788 case ADDR_SPACE_CONVERT_EXPR:
1789 /* If the source address is 0, and the source address space
1790 cannot have a valid object at 0, fold to dest type null. */
1791 if (integer_zerop (arg0)
1792 && !(targetm.addr_space.zero_address_valid
1793 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1794 return fold_convert_const (code, type, arg0);
1795 break;
1797 case VIEW_CONVERT_EXPR:
1798 return fold_view_convert_expr (type, arg0);
1800 case NEGATE_EXPR:
1802 /* Can't call fold_negate_const directly here as that doesn't
1803 handle all cases and we might not be able to negate some
1804 constants. */
1805 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1806 if (tem && CONSTANT_CLASS_P (tem))
1807 return tem;
1808 break;
1811 case ABS_EXPR:
1812 case ABSU_EXPR:
1813 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1814 return fold_abs_const (arg0, type);
1815 break;
1817 case CONJ_EXPR:
1818 if (TREE_CODE (arg0) == COMPLEX_CST)
1820 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1821 TREE_TYPE (type));
1822 return build_complex (type, TREE_REALPART (arg0), ipart);
1824 break;
1826 case BIT_NOT_EXPR:
1827 if (TREE_CODE (arg0) == INTEGER_CST)
1828 return fold_not_const (arg0, type);
1829 else if (POLY_INT_CST_P (arg0))
1830 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1831 /* Perform BIT_NOT_EXPR on each element individually. */
1832 else if (TREE_CODE (arg0) == VECTOR_CST)
1834 tree elem;
1836 /* This can cope with stepped encodings because ~x == -1 - x. */
1837 tree_vector_builder elements;
1838 elements.new_unary_operation (type, arg0, true);
1839 unsigned int i, count = elements.encoded_nelts ();
1840 for (i = 0; i < count; ++i)
1842 elem = VECTOR_CST_ELT (arg0, i);
1843 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1844 if (elem == NULL_TREE)
1845 break;
1846 elements.quick_push (elem);
1848 if (i == count)
1849 return elements.build ();
1851 break;
1853 case TRUTH_NOT_EXPR:
1854 if (TREE_CODE (arg0) == INTEGER_CST)
1855 return constant_boolean_node (integer_zerop (arg0), type);
1856 break;
1858 case REALPART_EXPR:
1859 if (TREE_CODE (arg0) == COMPLEX_CST)
1860 return fold_convert (type, TREE_REALPART (arg0));
1861 break;
1863 case IMAGPART_EXPR:
1864 if (TREE_CODE (arg0) == COMPLEX_CST)
1865 return fold_convert (type, TREE_IMAGPART (arg0));
1866 break;
1868 case VEC_UNPACK_LO_EXPR:
1869 case VEC_UNPACK_HI_EXPR:
1870 case VEC_UNPACK_FLOAT_LO_EXPR:
1871 case VEC_UNPACK_FLOAT_HI_EXPR:
1872 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1873 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1875 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1876 enum tree_code subcode;
1878 if (TREE_CODE (arg0) != VECTOR_CST)
1879 return NULL_TREE;
1881 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1882 return NULL_TREE;
1883 out_nelts = in_nelts / 2;
1884 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1886 unsigned int offset = 0;
1887 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1888 || code == VEC_UNPACK_FLOAT_LO_EXPR
1889 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1890 offset = out_nelts;
1892 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1893 subcode = NOP_EXPR;
1894 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1895 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1896 subcode = FLOAT_EXPR;
1897 else
1898 subcode = FIX_TRUNC_EXPR;
1900 tree_vector_builder elts (type, out_nelts, 1);
1901 for (i = 0; i < out_nelts; i++)
1903 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1904 VECTOR_CST_ELT (arg0, i + offset));
1905 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1906 return NULL_TREE;
1907 elts.quick_push (elt);
1910 return elts.build ();
1913 case VEC_DUPLICATE_EXPR:
1914 if (CONSTANT_CLASS_P (arg0))
1915 return build_vector_from_val (type, arg0);
1916 return NULL_TREE;
1918 default:
1919 break;
1922 return NULL_TREE;
1925 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1926 indicates which particular sizetype to create. */
1928 tree
1929 size_int_kind (poly_int64 number, enum size_type_kind kind)
1931 return build_int_cst (sizetype_tab[(int) kind], number);
1934 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1935 is a tree code. The type of the result is taken from the operands.
1936 Both must be equivalent integer types, ala int_binop_types_match_p.
1937 If the operands are constant, so is the result. */
1939 tree
1940 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1942 tree type = TREE_TYPE (arg0);
1944 if (arg0 == error_mark_node || arg1 == error_mark_node)
1945 return error_mark_node;
1947 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1948 TREE_TYPE (arg1)));
1950 /* Handle the special case of two poly_int constants faster. */
1951 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1953 /* And some specific cases even faster than that. */
1954 if (code == PLUS_EXPR)
1956 if (integer_zerop (arg0)
1957 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1958 return arg1;
1959 if (integer_zerop (arg1)
1960 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1961 return arg0;
1963 else if (code == MINUS_EXPR)
1965 if (integer_zerop (arg1)
1966 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1967 return arg0;
1969 else if (code == MULT_EXPR)
1971 if (integer_onep (arg0)
1972 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1973 return arg1;
1976 /* Handle general case of two integer constants. For sizetype
1977 constant calculations we always want to know about overflow,
1978 even in the unsigned case. */
1979 tree res = int_const_binop (code, arg0, arg1, -1);
1980 if (res != NULL_TREE)
1981 return res;
1984 return fold_build2_loc (loc, code, type, arg0, arg1);
1987 /* Given two values, either both of sizetype or both of bitsizetype,
1988 compute the difference between the two values. Return the value
1989 in signed type corresponding to the type of the operands. */
1991 tree
1992 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1994 tree type = TREE_TYPE (arg0);
1995 tree ctype;
1997 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1998 TREE_TYPE (arg1)));
2000 /* If the type is already signed, just do the simple thing. */
2001 if (!TYPE_UNSIGNED (type))
2002 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
2004 if (type == sizetype)
2005 ctype = ssizetype;
2006 else if (type == bitsizetype)
2007 ctype = sbitsizetype;
2008 else
2009 ctype = signed_type_for (type);
2011 /* If either operand is not a constant, do the conversions to the signed
2012 type and subtract. The hardware will do the right thing with any
2013 overflow in the subtraction. */
2014 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2015 return size_binop_loc (loc, MINUS_EXPR,
2016 fold_convert_loc (loc, ctype, arg0),
2017 fold_convert_loc (loc, ctype, arg1));
2019 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2020 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2021 overflow) and negate (which can't either). Special-case a result
2022 of zero while we're here. */
2023 if (tree_int_cst_equal (arg0, arg1))
2024 return build_int_cst (ctype, 0);
2025 else if (tree_int_cst_lt (arg1, arg0))
2026 return fold_convert_loc (loc, ctype,
2027 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2028 else
2029 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2030 fold_convert_loc (loc, ctype,
2031 size_binop_loc (loc,
2032 MINUS_EXPR,
2033 arg1, arg0)));
2036 /* A subroutine of fold_convert_const handling conversions of an
2037 INTEGER_CST to another integer type. */
2039 static tree
2040 fold_convert_const_int_from_int (tree type, const_tree arg1)
2042 /* Given an integer constant, make new constant with new type,
2043 appropriately sign-extended or truncated. Use widest_int
2044 so that any extension is done according ARG1's type. */
2045 return force_fit_type (type, wi::to_widest (arg1),
2046 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2047 TREE_OVERFLOW (arg1));
2050 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2051 to an integer type. */
2053 static tree
2054 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2056 bool overflow = false;
2057 tree t;
2059 /* The following code implements the floating point to integer
2060 conversion rules required by the Java Language Specification,
2061 that IEEE NaNs are mapped to zero and values that overflow
2062 the target precision saturate, i.e. values greater than
2063 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2064 are mapped to INT_MIN. These semantics are allowed by the
2065 C and C++ standards that simply state that the behavior of
2066 FP-to-integer conversion is unspecified upon overflow. */
2068 wide_int val;
2069 REAL_VALUE_TYPE r;
2070 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2072 switch (code)
2074 case FIX_TRUNC_EXPR:
2075 real_trunc (&r, VOIDmode, &x);
2076 break;
2078 default:
2079 gcc_unreachable ();
2082 /* If R is NaN, return zero and show we have an overflow. */
2083 if (REAL_VALUE_ISNAN (r))
2085 overflow = true;
2086 val = wi::zero (TYPE_PRECISION (type));
2089 /* See if R is less than the lower bound or greater than the
2090 upper bound. */
2092 if (! overflow)
2094 tree lt = TYPE_MIN_VALUE (type);
2095 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2096 if (real_less (&r, &l))
2098 overflow = true;
2099 val = wi::to_wide (lt);
2103 if (! overflow)
2105 tree ut = TYPE_MAX_VALUE (type);
2106 if (ut)
2108 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2109 if (real_less (&u, &r))
2111 overflow = true;
2112 val = wi::to_wide (ut);
2117 if (! overflow)
2118 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2120 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2121 return t;
2124 /* A subroutine of fold_convert_const handling conversions of a
2125 FIXED_CST to an integer type. */
2127 static tree
2128 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2130 tree t;
2131 double_int temp, temp_trunc;
2132 scalar_mode mode;
2134 /* Right shift FIXED_CST to temp by fbit. */
2135 temp = TREE_FIXED_CST (arg1).data;
2136 mode = TREE_FIXED_CST (arg1).mode;
2137 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2139 temp = temp.rshift (GET_MODE_FBIT (mode),
2140 HOST_BITS_PER_DOUBLE_INT,
2141 SIGNED_FIXED_POINT_MODE_P (mode));
2143 /* Left shift temp to temp_trunc by fbit. */
2144 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2145 HOST_BITS_PER_DOUBLE_INT,
2146 SIGNED_FIXED_POINT_MODE_P (mode));
2148 else
2150 temp = double_int_zero;
2151 temp_trunc = double_int_zero;
2154 /* If FIXED_CST is negative, we need to round the value toward 0.
2155 By checking if the fractional bits are not zero to add 1 to temp. */
2156 if (SIGNED_FIXED_POINT_MODE_P (mode)
2157 && temp_trunc.is_negative ()
2158 && TREE_FIXED_CST (arg1).data != temp_trunc)
2159 temp += double_int_one;
2161 /* Given a fixed-point constant, make new constant with new type,
2162 appropriately sign-extended or truncated. */
2163 t = force_fit_type (type, temp, -1,
2164 (temp.is_negative ()
2165 && (TYPE_UNSIGNED (type)
2166 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2167 | TREE_OVERFLOW (arg1));
2169 return t;
2172 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2173 to another floating point type. */
2175 static tree
2176 fold_convert_const_real_from_real (tree type, const_tree arg1)
2178 REAL_VALUE_TYPE value;
2179 tree t;
2181 /* If the underlying modes are the same, simply treat it as
2182 copy and rebuild with TREE_REAL_CST information and the
2183 given type. */
2184 if (TYPE_MODE (type) == TYPE_MODE (TREE_TYPE (arg1)))
2186 t = build_real (type, TREE_REAL_CST (arg1));
2187 return t;
2190 /* Don't perform the operation if flag_signaling_nans is on
2191 and the operand is a signaling NaN. */
2192 if (HONOR_SNANS (arg1)
2193 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2194 return NULL_TREE;
2196 /* With flag_rounding_math we should respect the current rounding mode
2197 unless the conversion is exact. */
2198 if (HONOR_SIGN_DEPENDENT_ROUNDING (arg1)
2199 && !exact_real_truncate (TYPE_MODE (type), &TREE_REAL_CST (arg1)))
2200 return NULL_TREE;
2202 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2203 t = build_real (type, value);
2205 /* If converting an infinity or NAN to a representation that doesn't
2206 have one, set the overflow bit so that we can produce some kind of
2207 error message at the appropriate point if necessary. It's not the
2208 most user-friendly message, but it's better than nothing. */
2209 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2210 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2211 TREE_OVERFLOW (t) = 1;
2212 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2213 && !MODE_HAS_NANS (TYPE_MODE (type)))
2214 TREE_OVERFLOW (t) = 1;
2215 /* Regular overflow, conversion produced an infinity in a mode that
2216 can't represent them. */
2217 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2218 && REAL_VALUE_ISINF (value)
2219 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2220 TREE_OVERFLOW (t) = 1;
2221 else
2222 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2223 return t;
2226 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2227 to a floating point type. */
2229 static tree
2230 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2232 REAL_VALUE_TYPE value;
2233 tree t;
2235 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2236 &TREE_FIXED_CST (arg1));
2237 t = build_real (type, value);
2239 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2240 return t;
2243 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2244 to another fixed-point type. */
2246 static tree
2247 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2249 FIXED_VALUE_TYPE value;
2250 tree t;
2251 bool overflow_p;
2253 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2254 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2255 t = build_fixed (type, value);
2257 /* Propagate overflow flags. */
2258 if (overflow_p | TREE_OVERFLOW (arg1))
2259 TREE_OVERFLOW (t) = 1;
2260 return t;
2263 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2264 to a fixed-point type. */
2266 static tree
2267 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2269 FIXED_VALUE_TYPE value;
2270 tree t;
2271 bool overflow_p;
2272 double_int di;
2274 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2276 di.low = TREE_INT_CST_ELT (arg1, 0);
2277 if (TREE_INT_CST_NUNITS (arg1) == 1)
2278 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2279 else
2280 di.high = TREE_INT_CST_ELT (arg1, 1);
2282 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2283 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2284 TYPE_SATURATING (type));
2285 t = build_fixed (type, value);
2287 /* Propagate overflow flags. */
2288 if (overflow_p | TREE_OVERFLOW (arg1))
2289 TREE_OVERFLOW (t) = 1;
2290 return t;
2293 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2294 to a fixed-point type. */
2296 static tree
2297 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2299 FIXED_VALUE_TYPE value;
2300 tree t;
2301 bool overflow_p;
2303 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2304 &TREE_REAL_CST (arg1),
2305 TYPE_SATURATING (type));
2306 t = build_fixed (type, value);
2308 /* Propagate overflow flags. */
2309 if (overflow_p | TREE_OVERFLOW (arg1))
2310 TREE_OVERFLOW (t) = 1;
2311 return t;
2314 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2315 type TYPE. If no simplification can be done return NULL_TREE. */
2317 static tree
2318 fold_convert_const (enum tree_code code, tree type, tree arg1)
2320 tree arg_type = TREE_TYPE (arg1);
2321 if (arg_type == type)
2322 return arg1;
2324 /* We can't widen types, since the runtime value could overflow the
2325 original type before being extended to the new type. */
2326 if (POLY_INT_CST_P (arg1)
2327 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2328 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2329 return build_poly_int_cst (type,
2330 poly_wide_int::from (poly_int_cst_value (arg1),
2331 TYPE_PRECISION (type),
2332 TYPE_SIGN (arg_type)));
2334 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2335 || TREE_CODE (type) == OFFSET_TYPE)
2337 if (TREE_CODE (arg1) == INTEGER_CST)
2338 return fold_convert_const_int_from_int (type, arg1);
2339 else if (TREE_CODE (arg1) == REAL_CST)
2340 return fold_convert_const_int_from_real (code, type, arg1);
2341 else if (TREE_CODE (arg1) == FIXED_CST)
2342 return fold_convert_const_int_from_fixed (type, arg1);
2344 else if (TREE_CODE (type) == REAL_TYPE)
2346 if (TREE_CODE (arg1) == INTEGER_CST)
2348 tree res = build_real_from_int_cst (type, arg1);
2349 /* Avoid the folding if flag_rounding_math is on and the
2350 conversion is not exact. */
2351 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
2353 bool fail = false;
2354 wide_int w = real_to_integer (&TREE_REAL_CST (res), &fail,
2355 TYPE_PRECISION (TREE_TYPE (arg1)));
2356 if (fail || wi::ne_p (w, wi::to_wide (arg1)))
2357 return NULL_TREE;
2359 return res;
2361 else if (TREE_CODE (arg1) == REAL_CST)
2362 return fold_convert_const_real_from_real (type, arg1);
2363 else if (TREE_CODE (arg1) == FIXED_CST)
2364 return fold_convert_const_real_from_fixed (type, arg1);
2366 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2368 if (TREE_CODE (arg1) == FIXED_CST)
2369 return fold_convert_const_fixed_from_fixed (type, arg1);
2370 else if (TREE_CODE (arg1) == INTEGER_CST)
2371 return fold_convert_const_fixed_from_int (type, arg1);
2372 else if (TREE_CODE (arg1) == REAL_CST)
2373 return fold_convert_const_fixed_from_real (type, arg1);
2375 else if (TREE_CODE (type) == VECTOR_TYPE)
2377 if (TREE_CODE (arg1) == VECTOR_CST
2378 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2380 tree elttype = TREE_TYPE (type);
2381 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2382 /* We can't handle steps directly when extending, since the
2383 values need to wrap at the original precision first. */
2384 bool step_ok_p
2385 = (INTEGRAL_TYPE_P (elttype)
2386 && INTEGRAL_TYPE_P (arg1_elttype)
2387 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2388 tree_vector_builder v;
2389 if (!v.new_unary_operation (type, arg1, step_ok_p))
2390 return NULL_TREE;
2391 unsigned int len = v.encoded_nelts ();
2392 for (unsigned int i = 0; i < len; ++i)
2394 tree elt = VECTOR_CST_ELT (arg1, i);
2395 tree cvt = fold_convert_const (code, elttype, elt);
2396 if (cvt == NULL_TREE)
2397 return NULL_TREE;
2398 v.quick_push (cvt);
2400 return v.build ();
2403 return NULL_TREE;
2406 /* Construct a vector of zero elements of vector type TYPE. */
2408 static tree
2409 build_zero_vector (tree type)
2411 tree t;
2413 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2414 return build_vector_from_val (type, t);
2417 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2419 bool
2420 fold_convertible_p (const_tree type, const_tree arg)
2422 const_tree orig = TREE_TYPE (arg);
2424 if (type == orig)
2425 return true;
2427 if (TREE_CODE (arg) == ERROR_MARK
2428 || TREE_CODE (type) == ERROR_MARK
2429 || TREE_CODE (orig) == ERROR_MARK)
2430 return false;
2432 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2433 return true;
2435 switch (TREE_CODE (type))
2437 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2438 case POINTER_TYPE: case REFERENCE_TYPE:
2439 case OFFSET_TYPE:
2440 return (INTEGRAL_TYPE_P (orig)
2441 || (POINTER_TYPE_P (orig)
2442 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2443 || TREE_CODE (orig) == OFFSET_TYPE);
2445 case REAL_TYPE:
2446 case FIXED_POINT_TYPE:
2447 case VOID_TYPE:
2448 return TREE_CODE (type) == TREE_CODE (orig);
2450 case VECTOR_TYPE:
2451 return (VECTOR_TYPE_P (orig)
2452 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2453 TYPE_VECTOR_SUBPARTS (orig))
2454 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2456 default:
2457 return false;
2461 /* Convert expression ARG to type TYPE. Used by the middle-end for
2462 simple conversions in preference to calling the front-end's convert. */
2464 tree
2465 fold_convert_loc (location_t loc, tree type, tree arg)
2467 tree orig = TREE_TYPE (arg);
2468 tree tem;
2470 if (type == orig)
2471 return arg;
2473 if (TREE_CODE (arg) == ERROR_MARK
2474 || TREE_CODE (type) == ERROR_MARK
2475 || TREE_CODE (orig) == ERROR_MARK)
2476 return error_mark_node;
2478 switch (TREE_CODE (type))
2480 case POINTER_TYPE:
2481 case REFERENCE_TYPE:
2482 /* Handle conversions between pointers to different address spaces. */
2483 if (POINTER_TYPE_P (orig)
2484 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2485 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2486 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2487 /* fall through */
2489 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2490 case OFFSET_TYPE:
2491 if (TREE_CODE (arg) == INTEGER_CST)
2493 tem = fold_convert_const (NOP_EXPR, type, arg);
2494 if (tem != NULL_TREE)
2495 return tem;
2497 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2498 || TREE_CODE (orig) == OFFSET_TYPE)
2499 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2500 if (TREE_CODE (orig) == COMPLEX_TYPE)
2501 return fold_convert_loc (loc, type,
2502 fold_build1_loc (loc, REALPART_EXPR,
2503 TREE_TYPE (orig), arg));
2504 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2505 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2506 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2508 case REAL_TYPE:
2509 if (TREE_CODE (arg) == INTEGER_CST)
2511 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2512 if (tem != NULL_TREE)
2513 return tem;
2515 else if (TREE_CODE (arg) == REAL_CST)
2517 tem = fold_convert_const (NOP_EXPR, type, arg);
2518 if (tem != NULL_TREE)
2519 return tem;
2521 else if (TREE_CODE (arg) == FIXED_CST)
2523 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2524 if (tem != NULL_TREE)
2525 return tem;
2528 switch (TREE_CODE (orig))
2530 case INTEGER_TYPE:
2531 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2532 case POINTER_TYPE: case REFERENCE_TYPE:
2533 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2535 case REAL_TYPE:
2536 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2538 case FIXED_POINT_TYPE:
2539 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2541 case COMPLEX_TYPE:
2542 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2543 return fold_convert_loc (loc, type, tem);
2545 default:
2546 gcc_unreachable ();
2549 case FIXED_POINT_TYPE:
2550 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2551 || TREE_CODE (arg) == REAL_CST)
2553 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2554 if (tem != NULL_TREE)
2555 goto fold_convert_exit;
2558 switch (TREE_CODE (orig))
2560 case FIXED_POINT_TYPE:
2561 case INTEGER_TYPE:
2562 case ENUMERAL_TYPE:
2563 case BOOLEAN_TYPE:
2564 case REAL_TYPE:
2565 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2567 case COMPLEX_TYPE:
2568 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2569 return fold_convert_loc (loc, type, tem);
2571 default:
2572 gcc_unreachable ();
2575 case COMPLEX_TYPE:
2576 switch (TREE_CODE (orig))
2578 case INTEGER_TYPE:
2579 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2580 case POINTER_TYPE: case REFERENCE_TYPE:
2581 case REAL_TYPE:
2582 case FIXED_POINT_TYPE:
2583 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2584 fold_convert_loc (loc, TREE_TYPE (type), arg),
2585 fold_convert_loc (loc, TREE_TYPE (type),
2586 integer_zero_node));
2587 case COMPLEX_TYPE:
2589 tree rpart, ipart;
2591 if (TREE_CODE (arg) == COMPLEX_EXPR)
2593 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2594 TREE_OPERAND (arg, 0));
2595 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2596 TREE_OPERAND (arg, 1));
2597 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2600 arg = save_expr (arg);
2601 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2602 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2603 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2604 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2605 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2608 default:
2609 gcc_unreachable ();
2612 case VECTOR_TYPE:
2613 if (integer_zerop (arg))
2614 return build_zero_vector (type);
2615 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2616 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2617 || TREE_CODE (orig) == VECTOR_TYPE);
2618 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2620 case VOID_TYPE:
2621 tem = fold_ignored_result (arg);
2622 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2624 default:
2625 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2626 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2627 gcc_unreachable ();
2629 fold_convert_exit:
2630 tem = protected_set_expr_location_unshare (tem, loc);
2631 return tem;
2634 /* Return false if expr can be assumed not to be an lvalue, true
2635 otherwise. */
2637 static bool
2638 maybe_lvalue_p (const_tree x)
2640 /* We only need to wrap lvalue tree codes. */
2641 switch (TREE_CODE (x))
2643 case VAR_DECL:
2644 case PARM_DECL:
2645 case RESULT_DECL:
2646 case LABEL_DECL:
2647 case FUNCTION_DECL:
2648 case SSA_NAME:
2649 case COMPOUND_LITERAL_EXPR:
2651 case COMPONENT_REF:
2652 case MEM_REF:
2653 case INDIRECT_REF:
2654 case ARRAY_REF:
2655 case ARRAY_RANGE_REF:
2656 case BIT_FIELD_REF:
2657 case OBJ_TYPE_REF:
2659 case REALPART_EXPR:
2660 case IMAGPART_EXPR:
2661 case PREINCREMENT_EXPR:
2662 case PREDECREMENT_EXPR:
2663 case SAVE_EXPR:
2664 case TRY_CATCH_EXPR:
2665 case WITH_CLEANUP_EXPR:
2666 case COMPOUND_EXPR:
2667 case MODIFY_EXPR:
2668 case TARGET_EXPR:
2669 case COND_EXPR:
2670 case BIND_EXPR:
2671 case VIEW_CONVERT_EXPR:
2672 break;
2674 default:
2675 /* Assume the worst for front-end tree codes. */
2676 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2677 break;
2678 return false;
2681 return true;
2684 /* Return an expr equal to X but certainly not valid as an lvalue. */
2686 tree
2687 non_lvalue_loc (location_t loc, tree x)
2689 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2690 us. */
2691 if (in_gimple_form)
2692 return x;
2694 if (! maybe_lvalue_p (x))
2695 return x;
2696 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2699 /* Given a tree comparison code, return the code that is the logical inverse.
2700 It is generally not safe to do this for floating-point comparisons, except
2701 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2702 ERROR_MARK in this case. */
2704 enum tree_code
2705 invert_tree_comparison (enum tree_code code, bool honor_nans)
2707 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2708 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2709 return ERROR_MARK;
2711 switch (code)
2713 case EQ_EXPR:
2714 return NE_EXPR;
2715 case NE_EXPR:
2716 return EQ_EXPR;
2717 case GT_EXPR:
2718 return honor_nans ? UNLE_EXPR : LE_EXPR;
2719 case GE_EXPR:
2720 return honor_nans ? UNLT_EXPR : LT_EXPR;
2721 case LT_EXPR:
2722 return honor_nans ? UNGE_EXPR : GE_EXPR;
2723 case LE_EXPR:
2724 return honor_nans ? UNGT_EXPR : GT_EXPR;
2725 case LTGT_EXPR:
2726 return UNEQ_EXPR;
2727 case UNEQ_EXPR:
2728 return LTGT_EXPR;
2729 case UNGT_EXPR:
2730 return LE_EXPR;
2731 case UNGE_EXPR:
2732 return LT_EXPR;
2733 case UNLT_EXPR:
2734 return GE_EXPR;
2735 case UNLE_EXPR:
2736 return GT_EXPR;
2737 case ORDERED_EXPR:
2738 return UNORDERED_EXPR;
2739 case UNORDERED_EXPR:
2740 return ORDERED_EXPR;
2741 default:
2742 gcc_unreachable ();
2746 /* Similar, but return the comparison that results if the operands are
2747 swapped. This is safe for floating-point. */
2749 enum tree_code
2750 swap_tree_comparison (enum tree_code code)
2752 switch (code)
2754 case EQ_EXPR:
2755 case NE_EXPR:
2756 case ORDERED_EXPR:
2757 case UNORDERED_EXPR:
2758 case LTGT_EXPR:
2759 case UNEQ_EXPR:
2760 return code;
2761 case GT_EXPR:
2762 return LT_EXPR;
2763 case GE_EXPR:
2764 return LE_EXPR;
2765 case LT_EXPR:
2766 return GT_EXPR;
2767 case LE_EXPR:
2768 return GE_EXPR;
2769 case UNGT_EXPR:
2770 return UNLT_EXPR;
2771 case UNGE_EXPR:
2772 return UNLE_EXPR;
2773 case UNLT_EXPR:
2774 return UNGT_EXPR;
2775 case UNLE_EXPR:
2776 return UNGE_EXPR;
2777 default:
2778 gcc_unreachable ();
2783 /* Convert a comparison tree code from an enum tree_code representation
2784 into a compcode bit-based encoding. This function is the inverse of
2785 compcode_to_comparison. */
2787 static enum comparison_code
2788 comparison_to_compcode (enum tree_code code)
2790 switch (code)
2792 case LT_EXPR:
2793 return COMPCODE_LT;
2794 case EQ_EXPR:
2795 return COMPCODE_EQ;
2796 case LE_EXPR:
2797 return COMPCODE_LE;
2798 case GT_EXPR:
2799 return COMPCODE_GT;
2800 case NE_EXPR:
2801 return COMPCODE_NE;
2802 case GE_EXPR:
2803 return COMPCODE_GE;
2804 case ORDERED_EXPR:
2805 return COMPCODE_ORD;
2806 case UNORDERED_EXPR:
2807 return COMPCODE_UNORD;
2808 case UNLT_EXPR:
2809 return COMPCODE_UNLT;
2810 case UNEQ_EXPR:
2811 return COMPCODE_UNEQ;
2812 case UNLE_EXPR:
2813 return COMPCODE_UNLE;
2814 case UNGT_EXPR:
2815 return COMPCODE_UNGT;
2816 case LTGT_EXPR:
2817 return COMPCODE_LTGT;
2818 case UNGE_EXPR:
2819 return COMPCODE_UNGE;
2820 default:
2821 gcc_unreachable ();
2825 /* Convert a compcode bit-based encoding of a comparison operator back
2826 to GCC's enum tree_code representation. This function is the
2827 inverse of comparison_to_compcode. */
2829 static enum tree_code
2830 compcode_to_comparison (enum comparison_code code)
2832 switch (code)
2834 case COMPCODE_LT:
2835 return LT_EXPR;
2836 case COMPCODE_EQ:
2837 return EQ_EXPR;
2838 case COMPCODE_LE:
2839 return LE_EXPR;
2840 case COMPCODE_GT:
2841 return GT_EXPR;
2842 case COMPCODE_NE:
2843 return NE_EXPR;
2844 case COMPCODE_GE:
2845 return GE_EXPR;
2846 case COMPCODE_ORD:
2847 return ORDERED_EXPR;
2848 case COMPCODE_UNORD:
2849 return UNORDERED_EXPR;
2850 case COMPCODE_UNLT:
2851 return UNLT_EXPR;
2852 case COMPCODE_UNEQ:
2853 return UNEQ_EXPR;
2854 case COMPCODE_UNLE:
2855 return UNLE_EXPR;
2856 case COMPCODE_UNGT:
2857 return UNGT_EXPR;
2858 case COMPCODE_LTGT:
2859 return LTGT_EXPR;
2860 case COMPCODE_UNGE:
2861 return UNGE_EXPR;
2862 default:
2863 gcc_unreachable ();
2867 /* Return true if COND1 tests the opposite condition of COND2. */
2869 bool
2870 inverse_conditions_p (const_tree cond1, const_tree cond2)
2872 return (COMPARISON_CLASS_P (cond1)
2873 && COMPARISON_CLASS_P (cond2)
2874 && (invert_tree_comparison
2875 (TREE_CODE (cond1),
2876 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2877 && operand_equal_p (TREE_OPERAND (cond1, 0),
2878 TREE_OPERAND (cond2, 0), 0)
2879 && operand_equal_p (TREE_OPERAND (cond1, 1),
2880 TREE_OPERAND (cond2, 1), 0));
2883 /* Return a tree for the comparison which is the combination of
2884 doing the AND or OR (depending on CODE) of the two operations LCODE
2885 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2886 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2887 if this makes the transformation invalid. */
2889 tree
2890 combine_comparisons (location_t loc,
2891 enum tree_code code, enum tree_code lcode,
2892 enum tree_code rcode, tree truth_type,
2893 tree ll_arg, tree lr_arg)
2895 bool honor_nans = HONOR_NANS (ll_arg);
2896 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2897 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2898 int compcode;
2900 switch (code)
2902 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2903 compcode = lcompcode & rcompcode;
2904 break;
2906 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2907 compcode = lcompcode | rcompcode;
2908 break;
2910 default:
2911 return NULL_TREE;
2914 if (!honor_nans)
2916 /* Eliminate unordered comparisons, as well as LTGT and ORD
2917 which are not used unless the mode has NaNs. */
2918 compcode &= ~COMPCODE_UNORD;
2919 if (compcode == COMPCODE_LTGT)
2920 compcode = COMPCODE_NE;
2921 else if (compcode == COMPCODE_ORD)
2922 compcode = COMPCODE_TRUE;
2924 else if (flag_trapping_math)
2926 /* Check that the original operation and the optimized ones will trap
2927 under the same condition. */
2928 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2929 && (lcompcode != COMPCODE_EQ)
2930 && (lcompcode != COMPCODE_ORD);
2931 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2932 && (rcompcode != COMPCODE_EQ)
2933 && (rcompcode != COMPCODE_ORD);
2934 bool trap = (compcode & COMPCODE_UNORD) == 0
2935 && (compcode != COMPCODE_EQ)
2936 && (compcode != COMPCODE_ORD);
2938 /* In a short-circuited boolean expression the LHS might be
2939 such that the RHS, if evaluated, will never trap. For
2940 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2941 if neither x nor y is NaN. (This is a mixed blessing: for
2942 example, the expression above will never trap, hence
2943 optimizing it to x < y would be invalid). */
2944 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2945 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2946 rtrap = false;
2948 /* If the comparison was short-circuited, and only the RHS
2949 trapped, we may now generate a spurious trap. */
2950 if (rtrap && !ltrap
2951 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2952 return NULL_TREE;
2954 /* If we changed the conditions that cause a trap, we lose. */
2955 if ((ltrap || rtrap) != trap)
2956 return NULL_TREE;
2959 if (compcode == COMPCODE_TRUE)
2960 return constant_boolean_node (true, truth_type);
2961 else if (compcode == COMPCODE_FALSE)
2962 return constant_boolean_node (false, truth_type);
2963 else
2965 enum tree_code tcode;
2967 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2968 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2972 /* Return nonzero if two operands (typically of the same tree node)
2973 are necessarily equal. FLAGS modifies behavior as follows:
2975 If OEP_ONLY_CONST is set, only return nonzero for constants.
2976 This function tests whether the operands are indistinguishable;
2977 it does not test whether they are equal using C's == operation.
2978 The distinction is important for IEEE floating point, because
2979 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2980 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2982 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2983 even though it may hold multiple values during a function.
2984 This is because a GCC tree node guarantees that nothing else is
2985 executed between the evaluation of its "operands" (which may often
2986 be evaluated in arbitrary order). Hence if the operands themselves
2987 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2988 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2989 unset means assuming isochronic (or instantaneous) tree equivalence.
2990 Unless comparing arbitrary expression trees, such as from different
2991 statements, this flag can usually be left unset.
2993 If OEP_PURE_SAME is set, then pure functions with identical arguments
2994 are considered the same. It is used when the caller has other ways
2995 to ensure that global memory is unchanged in between.
2997 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2998 not values of expressions.
3000 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
3001 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
3003 If OEP_BITWISE is set, then require the values to be bitwise identical
3004 rather than simply numerically equal. Do not take advantage of things
3005 like math-related flags or undefined behavior; only return true for
3006 values that are provably bitwise identical in all circumstances.
3008 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
3009 any operand with side effect. This is unnecesarily conservative in the
3010 case we know that arg0 and arg1 are in disjoint code paths (such as in
3011 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
3012 addresses with TREE_CONSTANT flag set so we know that &var == &var
3013 even if var is volatile. */
3015 bool
3016 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
3017 unsigned int flags)
3019 bool r;
3020 if (verify_hash_value (arg0, arg1, flags, &r))
3021 return r;
3023 STRIP_ANY_LOCATION_WRAPPER (arg0);
3024 STRIP_ANY_LOCATION_WRAPPER (arg1);
3026 /* If either is ERROR_MARK, they aren't equal. */
3027 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
3028 || TREE_TYPE (arg0) == error_mark_node
3029 || TREE_TYPE (arg1) == error_mark_node)
3030 return false;
3032 /* Similar, if either does not have a type (like a template id),
3033 they aren't equal. */
3034 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
3035 return false;
3037 /* Bitwise identity makes no sense if the values have different layouts. */
3038 if ((flags & OEP_BITWISE)
3039 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3040 return false;
3042 /* We cannot consider pointers to different address space equal. */
3043 if (POINTER_TYPE_P (TREE_TYPE (arg0))
3044 && POINTER_TYPE_P (TREE_TYPE (arg1))
3045 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
3046 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
3047 return false;
3049 /* Check equality of integer constants before bailing out due to
3050 precision differences. */
3051 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3053 /* Address of INTEGER_CST is not defined; check that we did not forget
3054 to drop the OEP_ADDRESS_OF flags. */
3055 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3056 return tree_int_cst_equal (arg0, arg1);
3059 if (!(flags & OEP_ADDRESS_OF))
3061 /* If both types don't have the same signedness, then we can't consider
3062 them equal. We must check this before the STRIP_NOPS calls
3063 because they may change the signedness of the arguments. As pointers
3064 strictly don't have a signedness, require either two pointers or
3065 two non-pointers as well. */
3066 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3067 || POINTER_TYPE_P (TREE_TYPE (arg0))
3068 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3069 return false;
3071 /* If both types don't have the same precision, then it is not safe
3072 to strip NOPs. */
3073 if (element_precision (TREE_TYPE (arg0))
3074 != element_precision (TREE_TYPE (arg1)))
3075 return false;
3077 STRIP_NOPS (arg0);
3078 STRIP_NOPS (arg1);
3080 #if 0
3081 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3082 sanity check once the issue is solved. */
3083 else
3084 /* Addresses of conversions and SSA_NAMEs (and many other things)
3085 are not defined. Check that we did not forget to drop the
3086 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3087 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3088 && TREE_CODE (arg0) != SSA_NAME);
3089 #endif
3091 /* In case both args are comparisons but with different comparison
3092 code, try to swap the comparison operands of one arg to produce
3093 a match and compare that variant. */
3094 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3095 && COMPARISON_CLASS_P (arg0)
3096 && COMPARISON_CLASS_P (arg1))
3098 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3100 if (TREE_CODE (arg0) == swap_code)
3101 return operand_equal_p (TREE_OPERAND (arg0, 0),
3102 TREE_OPERAND (arg1, 1), flags)
3103 && operand_equal_p (TREE_OPERAND (arg0, 1),
3104 TREE_OPERAND (arg1, 0), flags);
3107 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3109 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3110 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3112 else if (flags & OEP_ADDRESS_OF)
3114 /* If we are interested in comparing addresses ignore
3115 MEM_REF wrappings of the base that can appear just for
3116 TBAA reasons. */
3117 if (TREE_CODE (arg0) == MEM_REF
3118 && DECL_P (arg1)
3119 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3120 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3121 && integer_zerop (TREE_OPERAND (arg0, 1)))
3122 return true;
3123 else if (TREE_CODE (arg1) == MEM_REF
3124 && DECL_P (arg0)
3125 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3126 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3127 && integer_zerop (TREE_OPERAND (arg1, 1)))
3128 return true;
3129 return false;
3131 else
3132 return false;
3135 /* When not checking adddresses, this is needed for conversions and for
3136 COMPONENT_REF. Might as well play it safe and always test this. */
3137 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3138 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3139 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3140 && !(flags & OEP_ADDRESS_OF)))
3141 return false;
3143 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3144 We don't care about side effects in that case because the SAVE_EXPR
3145 takes care of that for us. In all other cases, two expressions are
3146 equal if they have no side effects. If we have two identical
3147 expressions with side effects that should be treated the same due
3148 to the only side effects being identical SAVE_EXPR's, that will
3149 be detected in the recursive calls below.
3150 If we are taking an invariant address of two identical objects
3151 they are necessarily equal as well. */
3152 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3153 && (TREE_CODE (arg0) == SAVE_EXPR
3154 || (flags & OEP_MATCH_SIDE_EFFECTS)
3155 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3156 return true;
3158 /* Next handle constant cases, those for which we can return 1 even
3159 if ONLY_CONST is set. */
3160 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3161 switch (TREE_CODE (arg0))
3163 case INTEGER_CST:
3164 return tree_int_cst_equal (arg0, arg1);
3166 case FIXED_CST:
3167 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3168 TREE_FIXED_CST (arg1));
3170 case REAL_CST:
3171 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3172 return true;
3174 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3176 /* If we do not distinguish between signed and unsigned zero,
3177 consider them equal. */
3178 if (real_zerop (arg0) && real_zerop (arg1))
3179 return true;
3181 return false;
3183 case VECTOR_CST:
3185 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3186 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3187 return false;
3189 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3190 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3191 return false;
3193 unsigned int count = vector_cst_encoded_nelts (arg0);
3194 for (unsigned int i = 0; i < count; ++i)
3195 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3196 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3197 return false;
3198 return true;
3201 case COMPLEX_CST:
3202 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3203 flags)
3204 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3205 flags));
3207 case STRING_CST:
3208 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3209 && ! memcmp (TREE_STRING_POINTER (arg0),
3210 TREE_STRING_POINTER (arg1),
3211 TREE_STRING_LENGTH (arg0)));
3213 case ADDR_EXPR:
3214 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3215 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3216 flags | OEP_ADDRESS_OF
3217 | OEP_MATCH_SIDE_EFFECTS);
3218 case CONSTRUCTOR:
3219 /* In GIMPLE empty constructors are allowed in initializers of
3220 aggregates. */
3221 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3222 default:
3223 break;
3226 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3227 two instances of undefined behavior will give identical results. */
3228 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3229 return false;
3231 /* Define macros to test an operand from arg0 and arg1 for equality and a
3232 variant that allows null and views null as being different from any
3233 non-null value. In the latter case, if either is null, the both
3234 must be; otherwise, do the normal comparison. */
3235 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3236 TREE_OPERAND (arg1, N), flags)
3238 #define OP_SAME_WITH_NULL(N) \
3239 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3240 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3242 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3244 case tcc_unary:
3245 /* Two conversions are equal only if signedness and modes match. */
3246 switch (TREE_CODE (arg0))
3248 CASE_CONVERT:
3249 case FIX_TRUNC_EXPR:
3250 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3251 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3252 return false;
3253 break;
3254 default:
3255 break;
3258 return OP_SAME (0);
3261 case tcc_comparison:
3262 case tcc_binary:
3263 if (OP_SAME (0) && OP_SAME (1))
3264 return true;
3266 /* For commutative ops, allow the other order. */
3267 return (commutative_tree_code (TREE_CODE (arg0))
3268 && operand_equal_p (TREE_OPERAND (arg0, 0),
3269 TREE_OPERAND (arg1, 1), flags)
3270 && operand_equal_p (TREE_OPERAND (arg0, 1),
3271 TREE_OPERAND (arg1, 0), flags));
3273 case tcc_reference:
3274 /* If either of the pointer (or reference) expressions we are
3275 dereferencing contain a side effect, these cannot be equal,
3276 but their addresses can be. */
3277 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3278 && (TREE_SIDE_EFFECTS (arg0)
3279 || TREE_SIDE_EFFECTS (arg1)))
3280 return false;
3282 switch (TREE_CODE (arg0))
3284 case INDIRECT_REF:
3285 if (!(flags & OEP_ADDRESS_OF))
3287 if (TYPE_ALIGN (TREE_TYPE (arg0))
3288 != TYPE_ALIGN (TREE_TYPE (arg1)))
3289 return false;
3290 /* Verify that the access types are compatible. */
3291 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3292 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3293 return false;
3295 flags &= ~OEP_ADDRESS_OF;
3296 return OP_SAME (0);
3298 case IMAGPART_EXPR:
3299 /* Require the same offset. */
3300 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3301 TYPE_SIZE (TREE_TYPE (arg1)),
3302 flags & ~OEP_ADDRESS_OF))
3303 return false;
3305 /* Fallthru. */
3306 case REALPART_EXPR:
3307 case VIEW_CONVERT_EXPR:
3308 return OP_SAME (0);
3310 case TARGET_MEM_REF:
3311 case MEM_REF:
3312 if (!(flags & OEP_ADDRESS_OF))
3314 /* Require equal access sizes */
3315 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3316 && (!TYPE_SIZE (TREE_TYPE (arg0))
3317 || !TYPE_SIZE (TREE_TYPE (arg1))
3318 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3319 TYPE_SIZE (TREE_TYPE (arg1)),
3320 flags)))
3321 return false;
3322 /* Verify that access happens in similar types. */
3323 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3324 return false;
3325 /* Verify that accesses are TBAA compatible. */
3326 if (!alias_ptr_types_compatible_p
3327 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3328 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3329 || (MR_DEPENDENCE_CLIQUE (arg0)
3330 != MR_DEPENDENCE_CLIQUE (arg1))
3331 || (MR_DEPENDENCE_BASE (arg0)
3332 != MR_DEPENDENCE_BASE (arg1)))
3333 return false;
3334 /* Verify that alignment is compatible. */
3335 if (TYPE_ALIGN (TREE_TYPE (arg0))
3336 != TYPE_ALIGN (TREE_TYPE (arg1)))
3337 return false;
3339 flags &= ~OEP_ADDRESS_OF;
3340 return (OP_SAME (0) && OP_SAME (1)
3341 /* TARGET_MEM_REF require equal extra operands. */
3342 && (TREE_CODE (arg0) != TARGET_MEM_REF
3343 || (OP_SAME_WITH_NULL (2)
3344 && OP_SAME_WITH_NULL (3)
3345 && OP_SAME_WITH_NULL (4))));
3347 case ARRAY_REF:
3348 case ARRAY_RANGE_REF:
3349 if (!OP_SAME (0))
3350 return false;
3351 flags &= ~OEP_ADDRESS_OF;
3352 /* Compare the array index by value if it is constant first as we
3353 may have different types but same value here. */
3354 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3355 TREE_OPERAND (arg1, 1))
3356 || OP_SAME (1))
3357 && OP_SAME_WITH_NULL (2)
3358 && OP_SAME_WITH_NULL (3)
3359 /* Compare low bound and element size as with OEP_ADDRESS_OF
3360 we have to account for the offset of the ref. */
3361 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3362 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3363 || (operand_equal_p (array_ref_low_bound
3364 (CONST_CAST_TREE (arg0)),
3365 array_ref_low_bound
3366 (CONST_CAST_TREE (arg1)), flags)
3367 && operand_equal_p (array_ref_element_size
3368 (CONST_CAST_TREE (arg0)),
3369 array_ref_element_size
3370 (CONST_CAST_TREE (arg1)),
3371 flags))));
3373 case COMPONENT_REF:
3374 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3375 may be NULL when we're called to compare MEM_EXPRs. */
3376 if (!OP_SAME_WITH_NULL (0))
3377 return false;
3379 bool compare_address = flags & OEP_ADDRESS_OF;
3381 /* Most of time we only need to compare FIELD_DECLs for equality.
3382 However when determining address look into actual offsets.
3383 These may match for unions and unshared record types. */
3384 flags &= ~OEP_ADDRESS_OF;
3385 if (!OP_SAME (1))
3387 if (compare_address
3388 && (flags & OEP_ADDRESS_OF_SAME_FIELD) == 0)
3390 tree field0 = TREE_OPERAND (arg0, 1);
3391 tree field1 = TREE_OPERAND (arg1, 1);
3393 /* Non-FIELD_DECL operands can appear in C++ templates. */
3394 if (TREE_CODE (field0) != FIELD_DECL
3395 || TREE_CODE (field1) != FIELD_DECL
3396 || !operand_equal_p (DECL_FIELD_OFFSET (field0),
3397 DECL_FIELD_OFFSET (field1), flags)
3398 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3399 DECL_FIELD_BIT_OFFSET (field1),
3400 flags))
3401 return false;
3403 else
3404 return false;
3407 return OP_SAME_WITH_NULL (2);
3409 case BIT_FIELD_REF:
3410 if (!OP_SAME (0))
3411 return false;
3412 flags &= ~OEP_ADDRESS_OF;
3413 return OP_SAME (1) && OP_SAME (2);
3415 default:
3416 return false;
3419 case tcc_expression:
3420 switch (TREE_CODE (arg0))
3422 case ADDR_EXPR:
3423 /* Be sure we pass right ADDRESS_OF flag. */
3424 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3425 return operand_equal_p (TREE_OPERAND (arg0, 0),
3426 TREE_OPERAND (arg1, 0),
3427 flags | OEP_ADDRESS_OF);
3429 case TRUTH_NOT_EXPR:
3430 return OP_SAME (0);
3432 case TRUTH_ANDIF_EXPR:
3433 case TRUTH_ORIF_EXPR:
3434 return OP_SAME (0) && OP_SAME (1);
3436 case WIDEN_MULT_PLUS_EXPR:
3437 case WIDEN_MULT_MINUS_EXPR:
3438 if (!OP_SAME (2))
3439 return false;
3440 /* The multiplcation operands are commutative. */
3441 /* FALLTHRU */
3443 case TRUTH_AND_EXPR:
3444 case TRUTH_OR_EXPR:
3445 case TRUTH_XOR_EXPR:
3446 if (OP_SAME (0) && OP_SAME (1))
3447 return true;
3449 /* Otherwise take into account this is a commutative operation. */
3450 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3451 TREE_OPERAND (arg1, 1), flags)
3452 && operand_equal_p (TREE_OPERAND (arg0, 1),
3453 TREE_OPERAND (arg1, 0), flags));
3455 case COND_EXPR:
3456 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3457 return false;
3458 flags &= ~OEP_ADDRESS_OF;
3459 return OP_SAME (0);
3461 case BIT_INSERT_EXPR:
3462 /* BIT_INSERT_EXPR has an implict operand as the type precision
3463 of op1. Need to check to make sure they are the same. */
3464 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3465 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3466 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3467 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3468 return false;
3469 /* FALLTHRU */
3471 case VEC_COND_EXPR:
3472 case DOT_PROD_EXPR:
3473 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3475 case MODIFY_EXPR:
3476 case INIT_EXPR:
3477 case COMPOUND_EXPR:
3478 case PREDECREMENT_EXPR:
3479 case PREINCREMENT_EXPR:
3480 case POSTDECREMENT_EXPR:
3481 case POSTINCREMENT_EXPR:
3482 if (flags & OEP_LEXICOGRAPHIC)
3483 return OP_SAME (0) && OP_SAME (1);
3484 return false;
3486 case CLEANUP_POINT_EXPR:
3487 case EXPR_STMT:
3488 case SAVE_EXPR:
3489 if (flags & OEP_LEXICOGRAPHIC)
3490 return OP_SAME (0);
3491 return false;
3493 case OBJ_TYPE_REF:
3494 /* Virtual table reference. */
3495 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3496 OBJ_TYPE_REF_EXPR (arg1), flags))
3497 return false;
3498 flags &= ~OEP_ADDRESS_OF;
3499 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3500 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3501 return false;
3502 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3503 OBJ_TYPE_REF_OBJECT (arg1), flags))
3504 return false;
3505 if (virtual_method_call_p (arg0))
3507 if (!virtual_method_call_p (arg1))
3508 return false;
3509 return types_same_for_odr (obj_type_ref_class (arg0),
3510 obj_type_ref_class (arg1));
3512 return false;
3514 default:
3515 return false;
3518 case tcc_vl_exp:
3519 switch (TREE_CODE (arg0))
3521 case CALL_EXPR:
3522 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3523 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3524 /* If not both CALL_EXPRs are either internal or normal function
3525 functions, then they are not equal. */
3526 return false;
3527 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3529 /* If the CALL_EXPRs call different internal functions, then they
3530 are not equal. */
3531 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3532 return false;
3534 else
3536 /* If the CALL_EXPRs call different functions, then they are not
3537 equal. */
3538 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3539 flags))
3540 return false;
3543 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3545 unsigned int cef = call_expr_flags (arg0);
3546 if (flags & OEP_PURE_SAME)
3547 cef &= ECF_CONST | ECF_PURE;
3548 else
3549 cef &= ECF_CONST;
3550 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3551 return false;
3554 /* Now see if all the arguments are the same. */
3556 const_call_expr_arg_iterator iter0, iter1;
3557 const_tree a0, a1;
3558 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3559 a1 = first_const_call_expr_arg (arg1, &iter1);
3560 a0 && a1;
3561 a0 = next_const_call_expr_arg (&iter0),
3562 a1 = next_const_call_expr_arg (&iter1))
3563 if (! operand_equal_p (a0, a1, flags))
3564 return false;
3566 /* If we get here and both argument lists are exhausted
3567 then the CALL_EXPRs are equal. */
3568 return ! (a0 || a1);
3570 default:
3571 return false;
3574 case tcc_declaration:
3575 /* Consider __builtin_sqrt equal to sqrt. */
3576 if (TREE_CODE (arg0) == FUNCTION_DECL)
3577 return (fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3578 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3579 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3580 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3582 if (DECL_P (arg0)
3583 && (flags & OEP_DECL_NAME)
3584 && (flags & OEP_LEXICOGRAPHIC))
3586 /* Consider decls with the same name equal. The caller needs
3587 to make sure they refer to the same entity (such as a function
3588 formal parameter). */
3589 tree a0name = DECL_NAME (arg0);
3590 tree a1name = DECL_NAME (arg1);
3591 const char *a0ns = a0name ? IDENTIFIER_POINTER (a0name) : NULL;
3592 const char *a1ns = a1name ? IDENTIFIER_POINTER (a1name) : NULL;
3593 return a0ns && a1ns && strcmp (a0ns, a1ns) == 0;
3595 return false;
3597 case tcc_exceptional:
3598 if (TREE_CODE (arg0) == CONSTRUCTOR)
3600 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3601 return false;
3603 /* In GIMPLE constructors are used only to build vectors from
3604 elements. Individual elements in the constructor must be
3605 indexed in increasing order and form an initial sequence.
3607 We make no effort to compare constructors in generic.
3608 (see sem_variable::equals in ipa-icf which can do so for
3609 constants). */
3610 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3611 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3612 return false;
3614 /* Be sure that vectors constructed have the same representation.
3615 We only tested element precision and modes to match.
3616 Vectors may be BLKmode and thus also check that the number of
3617 parts match. */
3618 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3619 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3620 return false;
3622 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3623 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3624 unsigned int len = vec_safe_length (v0);
3626 if (len != vec_safe_length (v1))
3627 return false;
3629 for (unsigned int i = 0; i < len; i++)
3631 constructor_elt *c0 = &(*v0)[i];
3632 constructor_elt *c1 = &(*v1)[i];
3634 if (!operand_equal_p (c0->value, c1->value, flags)
3635 /* In GIMPLE the indexes can be either NULL or matching i.
3636 Double check this so we won't get false
3637 positives for GENERIC. */
3638 || (c0->index
3639 && (TREE_CODE (c0->index) != INTEGER_CST
3640 || compare_tree_int (c0->index, i)))
3641 || (c1->index
3642 && (TREE_CODE (c1->index) != INTEGER_CST
3643 || compare_tree_int (c1->index, i))))
3644 return false;
3646 return true;
3648 else if (TREE_CODE (arg0) == STATEMENT_LIST
3649 && (flags & OEP_LEXICOGRAPHIC))
3651 /* Compare the STATEMENT_LISTs. */
3652 tree_stmt_iterator tsi1, tsi2;
3653 tree body1 = CONST_CAST_TREE (arg0);
3654 tree body2 = CONST_CAST_TREE (arg1);
3655 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3656 tsi_next (&tsi1), tsi_next (&tsi2))
3658 /* The lists don't have the same number of statements. */
3659 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3660 return false;
3661 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3662 return true;
3663 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3664 flags & (OEP_LEXICOGRAPHIC
3665 | OEP_NO_HASH_CHECK)))
3666 return false;
3669 return false;
3671 case tcc_statement:
3672 switch (TREE_CODE (arg0))
3674 case RETURN_EXPR:
3675 if (flags & OEP_LEXICOGRAPHIC)
3676 return OP_SAME_WITH_NULL (0);
3677 return false;
3678 case DEBUG_BEGIN_STMT:
3679 if (flags & OEP_LEXICOGRAPHIC)
3680 return true;
3681 return false;
3682 default:
3683 return false;
3686 default:
3687 return false;
3690 #undef OP_SAME
3691 #undef OP_SAME_WITH_NULL
3694 /* Generate a hash value for an expression. This can be used iteratively
3695 by passing a previous result as the HSTATE argument. */
3697 void
3698 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3699 unsigned int flags)
3701 int i;
3702 enum tree_code code;
3703 enum tree_code_class tclass;
3705 if (t == NULL_TREE || t == error_mark_node)
3707 hstate.merge_hash (0);
3708 return;
3711 STRIP_ANY_LOCATION_WRAPPER (t);
3713 if (!(flags & OEP_ADDRESS_OF))
3714 STRIP_NOPS (t);
3716 code = TREE_CODE (t);
3718 switch (code)
3720 /* Alas, constants aren't shared, so we can't rely on pointer
3721 identity. */
3722 case VOID_CST:
3723 hstate.merge_hash (0);
3724 return;
3725 case INTEGER_CST:
3726 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3727 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3728 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3729 return;
3730 case REAL_CST:
3732 unsigned int val2;
3733 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3734 val2 = rvc_zero;
3735 else
3736 val2 = real_hash (TREE_REAL_CST_PTR (t));
3737 hstate.merge_hash (val2);
3738 return;
3740 case FIXED_CST:
3742 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3743 hstate.merge_hash (val2);
3744 return;
3746 case STRING_CST:
3747 hstate.add ((const void *) TREE_STRING_POINTER (t),
3748 TREE_STRING_LENGTH (t));
3749 return;
3750 case COMPLEX_CST:
3751 hash_operand (TREE_REALPART (t), hstate, flags);
3752 hash_operand (TREE_IMAGPART (t), hstate, flags);
3753 return;
3754 case VECTOR_CST:
3756 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3757 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3758 unsigned int count = vector_cst_encoded_nelts (t);
3759 for (unsigned int i = 0; i < count; ++i)
3760 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3761 return;
3763 case SSA_NAME:
3764 /* We can just compare by pointer. */
3765 hstate.add_hwi (SSA_NAME_VERSION (t));
3766 return;
3767 case PLACEHOLDER_EXPR:
3768 /* The node itself doesn't matter. */
3769 return;
3770 case BLOCK:
3771 case OMP_CLAUSE:
3772 /* Ignore. */
3773 return;
3774 case TREE_LIST:
3775 /* A list of expressions, for a CALL_EXPR or as the elements of a
3776 VECTOR_CST. */
3777 for (; t; t = TREE_CHAIN (t))
3778 hash_operand (TREE_VALUE (t), hstate, flags);
3779 return;
3780 case CONSTRUCTOR:
3782 unsigned HOST_WIDE_INT idx;
3783 tree field, value;
3784 flags &= ~OEP_ADDRESS_OF;
3785 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3786 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3788 /* In GIMPLE the indexes can be either NULL or matching i. */
3789 if (field == NULL_TREE)
3790 field = bitsize_int (idx);
3791 hash_operand (field, hstate, flags);
3792 hash_operand (value, hstate, flags);
3794 return;
3796 case STATEMENT_LIST:
3798 tree_stmt_iterator i;
3799 for (i = tsi_start (CONST_CAST_TREE (t));
3800 !tsi_end_p (i); tsi_next (&i))
3801 hash_operand (tsi_stmt (i), hstate, flags);
3802 return;
3804 case TREE_VEC:
3805 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3806 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3807 return;
3808 case IDENTIFIER_NODE:
3809 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3810 return;
3811 case FUNCTION_DECL:
3812 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3813 Otherwise nodes that compare equal according to operand_equal_p might
3814 get different hash codes. However, don't do this for machine specific
3815 or front end builtins, since the function code is overloaded in those
3816 cases. */
3817 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3818 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3820 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3821 code = TREE_CODE (t);
3823 /* FALL THROUGH */
3824 default:
3825 if (POLY_INT_CST_P (t))
3827 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3828 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3829 return;
3831 tclass = TREE_CODE_CLASS (code);
3833 if (tclass == tcc_declaration)
3835 /* DECL's have a unique ID */
3836 hstate.add_hwi (DECL_UID (t));
3838 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3840 /* For comparisons that can be swapped, use the lower
3841 tree code. */
3842 enum tree_code ccode = swap_tree_comparison (code);
3843 if (code < ccode)
3844 ccode = code;
3845 hstate.add_object (ccode);
3846 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3847 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3849 else if (CONVERT_EXPR_CODE_P (code))
3851 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3852 operand_equal_p. */
3853 enum tree_code ccode = NOP_EXPR;
3854 hstate.add_object (ccode);
3856 /* Don't hash the type, that can lead to having nodes which
3857 compare equal according to operand_equal_p, but which
3858 have different hash codes. Make sure to include signedness
3859 in the hash computation. */
3860 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3861 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3863 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3864 else if (code == MEM_REF
3865 && (flags & OEP_ADDRESS_OF) != 0
3866 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3867 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3868 && integer_zerop (TREE_OPERAND (t, 1)))
3869 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3870 hstate, flags);
3871 /* Don't ICE on FE specific trees, or their arguments etc.
3872 during operand_equal_p hash verification. */
3873 else if (!IS_EXPR_CODE_CLASS (tclass))
3874 gcc_assert (flags & OEP_HASH_CHECK);
3875 else
3877 unsigned int sflags = flags;
3879 hstate.add_object (code);
3881 switch (code)
3883 case ADDR_EXPR:
3884 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3885 flags |= OEP_ADDRESS_OF;
3886 sflags = flags;
3887 break;
3889 case INDIRECT_REF:
3890 case MEM_REF:
3891 case TARGET_MEM_REF:
3892 flags &= ~OEP_ADDRESS_OF;
3893 sflags = flags;
3894 break;
3896 case COMPONENT_REF:
3897 if (sflags & OEP_ADDRESS_OF)
3899 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3900 hash_operand (DECL_FIELD_OFFSET (TREE_OPERAND (t, 1)),
3901 hstate, flags & ~OEP_ADDRESS_OF);
3902 hash_operand (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (t, 1)),
3903 hstate, flags & ~OEP_ADDRESS_OF);
3904 return;
3906 break;
3907 case ARRAY_REF:
3908 case ARRAY_RANGE_REF:
3909 case BIT_FIELD_REF:
3910 sflags &= ~OEP_ADDRESS_OF;
3911 break;
3913 case COND_EXPR:
3914 flags &= ~OEP_ADDRESS_OF;
3915 break;
3917 case WIDEN_MULT_PLUS_EXPR:
3918 case WIDEN_MULT_MINUS_EXPR:
3920 /* The multiplication operands are commutative. */
3921 inchash::hash one, two;
3922 hash_operand (TREE_OPERAND (t, 0), one, flags);
3923 hash_operand (TREE_OPERAND (t, 1), two, flags);
3924 hstate.add_commutative (one, two);
3925 hash_operand (TREE_OPERAND (t, 2), two, flags);
3926 return;
3929 case CALL_EXPR:
3930 if (CALL_EXPR_FN (t) == NULL_TREE)
3931 hstate.add_int (CALL_EXPR_IFN (t));
3932 break;
3934 case TARGET_EXPR:
3935 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3936 Usually different TARGET_EXPRs just should use
3937 different temporaries in their slots. */
3938 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
3939 return;
3941 case OBJ_TYPE_REF:
3942 /* Virtual table reference. */
3943 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
3944 flags &= ~OEP_ADDRESS_OF;
3945 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
3946 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
3947 if (!virtual_method_call_p (t))
3948 return;
3949 if (tree c = obj_type_ref_class (t))
3951 c = TYPE_NAME (TYPE_MAIN_VARIANT (c));
3952 /* We compute mangled names only when free_lang_data is run.
3953 In that case we can hash precisely. */
3954 if (TREE_CODE (c) == TYPE_DECL
3955 && DECL_ASSEMBLER_NAME_SET_P (c))
3956 hstate.add_object
3957 (IDENTIFIER_HASH_VALUE
3958 (DECL_ASSEMBLER_NAME (c)));
3960 return;
3961 default:
3962 break;
3965 /* Don't hash the type, that can lead to having nodes which
3966 compare equal according to operand_equal_p, but which
3967 have different hash codes. */
3968 if (code == NON_LVALUE_EXPR)
3970 /* Make sure to include signness in the hash computation. */
3971 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3972 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3975 else if (commutative_tree_code (code))
3977 /* It's a commutative expression. We want to hash it the same
3978 however it appears. We do this by first hashing both operands
3979 and then rehashing based on the order of their independent
3980 hashes. */
3981 inchash::hash one, two;
3982 hash_operand (TREE_OPERAND (t, 0), one, flags);
3983 hash_operand (TREE_OPERAND (t, 1), two, flags);
3984 hstate.add_commutative (one, two);
3986 else
3987 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3988 hash_operand (TREE_OPERAND (t, i), hstate,
3989 i == 0 ? flags : sflags);
3991 return;
3995 bool
3996 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
3997 unsigned int flags, bool *ret)
3999 /* When checking and unless comparing DECL names, verify that if
4000 the outermost operand_equal_p call returns non-zero then ARG0
4001 and ARG1 have the same hash value. */
4002 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
4004 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
4006 if (arg0 != arg1 && !(flags & OEP_DECL_NAME))
4008 inchash::hash hstate0 (0), hstate1 (0);
4009 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
4010 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
4011 hashval_t h0 = hstate0.end ();
4012 hashval_t h1 = hstate1.end ();
4013 gcc_assert (h0 == h1);
4015 *ret = true;
4017 else
4018 *ret = false;
4020 return true;
4023 return false;
4027 static operand_compare default_compare_instance;
4029 /* Conveinece wrapper around operand_compare class because usually we do
4030 not need to play with the valueizer. */
4032 bool
4033 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
4035 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
4038 namespace inchash
4041 /* Generate a hash value for an expression. This can be used iteratively
4042 by passing a previous result as the HSTATE argument.
4044 This function is intended to produce the same hash for expressions which
4045 would compare equal using operand_equal_p. */
4046 void
4047 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
4049 default_compare_instance.hash_operand (t, hstate, flags);
4054 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
4055 with a different signedness or a narrower precision. */
4057 static bool
4058 operand_equal_for_comparison_p (tree arg0, tree arg1)
4060 if (operand_equal_p (arg0, arg1, 0))
4061 return true;
4063 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
4064 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
4065 return false;
4067 /* Discard any conversions that don't change the modes of ARG0 and ARG1
4068 and see if the inner values are the same. This removes any
4069 signedness comparison, which doesn't matter here. */
4070 tree op0 = arg0;
4071 tree op1 = arg1;
4072 STRIP_NOPS (op0);
4073 STRIP_NOPS (op1);
4074 if (operand_equal_p (op0, op1, 0))
4075 return true;
4077 /* Discard a single widening conversion from ARG1 and see if the inner
4078 value is the same as ARG0. */
4079 if (CONVERT_EXPR_P (arg1)
4080 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4081 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4082 < TYPE_PRECISION (TREE_TYPE (arg1))
4083 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
4084 return true;
4086 return false;
4089 /* See if ARG is an expression that is either a comparison or is performing
4090 arithmetic on comparisons. The comparisons must only be comparing
4091 two different values, which will be stored in *CVAL1 and *CVAL2; if
4092 they are nonzero it means that some operands have already been found.
4093 No variables may be used anywhere else in the expression except in the
4094 comparisons.
4096 If this is true, return 1. Otherwise, return zero. */
4098 static bool
4099 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4101 enum tree_code code = TREE_CODE (arg);
4102 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4104 /* We can handle some of the tcc_expression cases here. */
4105 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4106 tclass = tcc_unary;
4107 else if (tclass == tcc_expression
4108 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4109 || code == COMPOUND_EXPR))
4110 tclass = tcc_binary;
4112 switch (tclass)
4114 case tcc_unary:
4115 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4117 case tcc_binary:
4118 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4119 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4121 case tcc_constant:
4122 return true;
4124 case tcc_expression:
4125 if (code == COND_EXPR)
4126 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4127 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4128 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4129 return false;
4131 case tcc_comparison:
4132 /* First see if we can handle the first operand, then the second. For
4133 the second operand, we know *CVAL1 can't be zero. It must be that
4134 one side of the comparison is each of the values; test for the
4135 case where this isn't true by failing if the two operands
4136 are the same. */
4138 if (operand_equal_p (TREE_OPERAND (arg, 0),
4139 TREE_OPERAND (arg, 1), 0))
4140 return false;
4142 if (*cval1 == 0)
4143 *cval1 = TREE_OPERAND (arg, 0);
4144 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4146 else if (*cval2 == 0)
4147 *cval2 = TREE_OPERAND (arg, 0);
4148 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4150 else
4151 return false;
4153 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4155 else if (*cval2 == 0)
4156 *cval2 = TREE_OPERAND (arg, 1);
4157 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4159 else
4160 return false;
4162 return true;
4164 default:
4165 return false;
4169 /* ARG is a tree that is known to contain just arithmetic operations and
4170 comparisons. Evaluate the operations in the tree substituting NEW0 for
4171 any occurrence of OLD0 as an operand of a comparison and likewise for
4172 NEW1 and OLD1. */
4174 static tree
4175 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4176 tree old1, tree new1)
4178 tree type = TREE_TYPE (arg);
4179 enum tree_code code = TREE_CODE (arg);
4180 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4182 /* We can handle some of the tcc_expression cases here. */
4183 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4184 tclass = tcc_unary;
4185 else if (tclass == tcc_expression
4186 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4187 tclass = tcc_binary;
4189 switch (tclass)
4191 case tcc_unary:
4192 return fold_build1_loc (loc, code, type,
4193 eval_subst (loc, TREE_OPERAND (arg, 0),
4194 old0, new0, old1, new1));
4196 case tcc_binary:
4197 return fold_build2_loc (loc, code, type,
4198 eval_subst (loc, TREE_OPERAND (arg, 0),
4199 old0, new0, old1, new1),
4200 eval_subst (loc, TREE_OPERAND (arg, 1),
4201 old0, new0, old1, new1));
4203 case tcc_expression:
4204 switch (code)
4206 case SAVE_EXPR:
4207 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4208 old1, new1);
4210 case COMPOUND_EXPR:
4211 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4212 old1, new1);
4214 case COND_EXPR:
4215 return fold_build3_loc (loc, code, type,
4216 eval_subst (loc, TREE_OPERAND (arg, 0),
4217 old0, new0, old1, new1),
4218 eval_subst (loc, TREE_OPERAND (arg, 1),
4219 old0, new0, old1, new1),
4220 eval_subst (loc, TREE_OPERAND (arg, 2),
4221 old0, new0, old1, new1));
4222 default:
4223 break;
4225 /* Fall through - ??? */
4227 case tcc_comparison:
4229 tree arg0 = TREE_OPERAND (arg, 0);
4230 tree arg1 = TREE_OPERAND (arg, 1);
4232 /* We need to check both for exact equality and tree equality. The
4233 former will be true if the operand has a side-effect. In that
4234 case, we know the operand occurred exactly once. */
4236 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4237 arg0 = new0;
4238 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4239 arg0 = new1;
4241 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4242 arg1 = new0;
4243 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4244 arg1 = new1;
4246 return fold_build2_loc (loc, code, type, arg0, arg1);
4249 default:
4250 return arg;
4254 /* Return a tree for the case when the result of an expression is RESULT
4255 converted to TYPE and OMITTED was previously an operand of the expression
4256 but is now not needed (e.g., we folded OMITTED * 0).
4258 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4259 the conversion of RESULT to TYPE. */
4261 tree
4262 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4264 tree t = fold_convert_loc (loc, type, result);
4266 /* If the resulting operand is an empty statement, just return the omitted
4267 statement casted to void. */
4268 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4269 return build1_loc (loc, NOP_EXPR, void_type_node,
4270 fold_ignored_result (omitted));
4272 if (TREE_SIDE_EFFECTS (omitted))
4273 return build2_loc (loc, COMPOUND_EXPR, type,
4274 fold_ignored_result (omitted), t);
4276 return non_lvalue_loc (loc, t);
4279 /* Return a tree for the case when the result of an expression is RESULT
4280 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4281 of the expression but are now not needed.
4283 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4284 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4285 evaluated before OMITTED2. Otherwise, if neither has side effects,
4286 just do the conversion of RESULT to TYPE. */
4288 tree
4289 omit_two_operands_loc (location_t loc, tree type, tree result,
4290 tree omitted1, tree omitted2)
4292 tree t = fold_convert_loc (loc, type, result);
4294 if (TREE_SIDE_EFFECTS (omitted2))
4295 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4296 if (TREE_SIDE_EFFECTS (omitted1))
4297 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4299 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4303 /* Return a simplified tree node for the truth-negation of ARG. This
4304 never alters ARG itself. We assume that ARG is an operation that
4305 returns a truth value (0 or 1).
4307 FIXME: one would think we would fold the result, but it causes
4308 problems with the dominator optimizer. */
4310 static tree
4311 fold_truth_not_expr (location_t loc, tree arg)
4313 tree type = TREE_TYPE (arg);
4314 enum tree_code code = TREE_CODE (arg);
4315 location_t loc1, loc2;
4317 /* If this is a comparison, we can simply invert it, except for
4318 floating-point non-equality comparisons, in which case we just
4319 enclose a TRUTH_NOT_EXPR around what we have. */
4321 if (TREE_CODE_CLASS (code) == tcc_comparison)
4323 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4324 if (FLOAT_TYPE_P (op_type)
4325 && flag_trapping_math
4326 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4327 && code != NE_EXPR && code != EQ_EXPR)
4328 return NULL_TREE;
4330 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4331 if (code == ERROR_MARK)
4332 return NULL_TREE;
4334 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4335 TREE_OPERAND (arg, 1));
4336 copy_warning (ret, arg);
4337 return ret;
4340 switch (code)
4342 case INTEGER_CST:
4343 return constant_boolean_node (integer_zerop (arg), type);
4345 case TRUTH_AND_EXPR:
4346 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4347 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4348 return build2_loc (loc, TRUTH_OR_EXPR, type,
4349 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4350 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4352 case TRUTH_OR_EXPR:
4353 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4354 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4355 return build2_loc (loc, TRUTH_AND_EXPR, type,
4356 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4357 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4359 case TRUTH_XOR_EXPR:
4360 /* Here we can invert either operand. We invert the first operand
4361 unless the second operand is a TRUTH_NOT_EXPR in which case our
4362 result is the XOR of the first operand with the inside of the
4363 negation of the second operand. */
4365 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4366 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4367 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4368 else
4369 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4370 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4371 TREE_OPERAND (arg, 1));
4373 case TRUTH_ANDIF_EXPR:
4374 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4375 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4376 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4377 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4378 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4380 case TRUTH_ORIF_EXPR:
4381 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4382 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4383 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4384 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4385 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4387 case TRUTH_NOT_EXPR:
4388 return TREE_OPERAND (arg, 0);
4390 case COND_EXPR:
4392 tree arg1 = TREE_OPERAND (arg, 1);
4393 tree arg2 = TREE_OPERAND (arg, 2);
4395 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4396 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4398 /* A COND_EXPR may have a throw as one operand, which
4399 then has void type. Just leave void operands
4400 as they are. */
4401 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4402 VOID_TYPE_P (TREE_TYPE (arg1))
4403 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4404 VOID_TYPE_P (TREE_TYPE (arg2))
4405 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4408 case COMPOUND_EXPR:
4409 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4410 return build2_loc (loc, COMPOUND_EXPR, type,
4411 TREE_OPERAND (arg, 0),
4412 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4414 case NON_LVALUE_EXPR:
4415 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4416 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4418 CASE_CONVERT:
4419 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4420 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4422 /* fall through */
4424 case FLOAT_EXPR:
4425 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4426 return build1_loc (loc, TREE_CODE (arg), type,
4427 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4429 case BIT_AND_EXPR:
4430 if (!integer_onep (TREE_OPERAND (arg, 1)))
4431 return NULL_TREE;
4432 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4434 case SAVE_EXPR:
4435 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4437 case CLEANUP_POINT_EXPR:
4438 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4439 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4440 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4442 default:
4443 return NULL_TREE;
4447 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4448 assume that ARG is an operation that returns a truth value (0 or 1
4449 for scalars, 0 or -1 for vectors). Return the folded expression if
4450 folding is successful. Otherwise, return NULL_TREE. */
4452 static tree
4453 fold_invert_truthvalue (location_t loc, tree arg)
4455 tree type = TREE_TYPE (arg);
4456 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4457 ? BIT_NOT_EXPR
4458 : TRUTH_NOT_EXPR,
4459 type, arg);
4462 /* Return a simplified tree node for the truth-negation of ARG. This
4463 never alters ARG itself. We assume that ARG is an operation that
4464 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4466 tree
4467 invert_truthvalue_loc (location_t loc, tree arg)
4469 if (TREE_CODE (arg) == ERROR_MARK)
4470 return arg;
4472 tree type = TREE_TYPE (arg);
4473 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4474 ? BIT_NOT_EXPR
4475 : TRUTH_NOT_EXPR,
4476 type, arg);
4479 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4480 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4481 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4482 is the original memory reference used to preserve the alias set of
4483 the access. */
4485 static tree
4486 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4487 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4488 int unsignedp, int reversep)
4490 tree result, bftype;
4492 /* Attempt not to lose the access path if possible. */
4493 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4495 tree ninner = TREE_OPERAND (orig_inner, 0);
4496 machine_mode nmode;
4497 poly_int64 nbitsize, nbitpos;
4498 tree noffset;
4499 int nunsignedp, nreversep, nvolatilep = 0;
4500 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4501 &noffset, &nmode, &nunsignedp,
4502 &nreversep, &nvolatilep);
4503 if (base == inner
4504 && noffset == NULL_TREE
4505 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4506 && !reversep
4507 && !nreversep
4508 && !nvolatilep)
4510 inner = ninner;
4511 bitpos -= nbitpos;
4515 alias_set_type iset = get_alias_set (orig_inner);
4516 if (iset == 0 && get_alias_set (inner) != iset)
4517 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4518 build_fold_addr_expr (inner),
4519 build_int_cst (ptr_type_node, 0));
4521 if (known_eq (bitpos, 0) && !reversep)
4523 tree size = TYPE_SIZE (TREE_TYPE (inner));
4524 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4525 || POINTER_TYPE_P (TREE_TYPE (inner)))
4526 && tree_fits_shwi_p (size)
4527 && tree_to_shwi (size) == bitsize)
4528 return fold_convert_loc (loc, type, inner);
4531 bftype = type;
4532 if (TYPE_PRECISION (bftype) != bitsize
4533 || TYPE_UNSIGNED (bftype) == !unsignedp)
4534 bftype = build_nonstandard_integer_type (bitsize, 0);
4536 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4537 bitsize_int (bitsize), bitsize_int (bitpos));
4538 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4540 if (bftype != type)
4541 result = fold_convert_loc (loc, type, result);
4543 return result;
4546 /* Optimize a bit-field compare.
4548 There are two cases: First is a compare against a constant and the
4549 second is a comparison of two items where the fields are at the same
4550 bit position relative to the start of a chunk (byte, halfword, word)
4551 large enough to contain it. In these cases we can avoid the shift
4552 implicit in bitfield extractions.
4554 For constants, we emit a compare of the shifted constant with the
4555 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4556 compared. For two fields at the same position, we do the ANDs with the
4557 similar mask and compare the result of the ANDs.
4559 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4560 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4561 are the left and right operands of the comparison, respectively.
4563 If the optimization described above can be done, we return the resulting
4564 tree. Otherwise we return zero. */
4566 static tree
4567 optimize_bit_field_compare (location_t loc, enum tree_code code,
4568 tree compare_type, tree lhs, tree rhs)
4570 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4571 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4572 tree type = TREE_TYPE (lhs);
4573 tree unsigned_type;
4574 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4575 machine_mode lmode, rmode;
4576 scalar_int_mode nmode;
4577 int lunsignedp, runsignedp;
4578 int lreversep, rreversep;
4579 int lvolatilep = 0, rvolatilep = 0;
4580 tree linner, rinner = NULL_TREE;
4581 tree mask;
4582 tree offset;
4584 /* Get all the information about the extractions being done. If the bit size
4585 is the same as the size of the underlying object, we aren't doing an
4586 extraction at all and so can do nothing. We also don't want to
4587 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4588 then will no longer be able to replace it. */
4589 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4590 &lunsignedp, &lreversep, &lvolatilep);
4591 if (linner == lhs
4592 || !known_size_p (plbitsize)
4593 || !plbitsize.is_constant (&lbitsize)
4594 || !plbitpos.is_constant (&lbitpos)
4595 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4596 || offset != 0
4597 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4598 || lvolatilep)
4599 return 0;
4601 if (const_p)
4602 rreversep = lreversep;
4603 else
4605 /* If this is not a constant, we can only do something if bit positions,
4606 sizes, signedness and storage order are the same. */
4607 rinner
4608 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4609 &runsignedp, &rreversep, &rvolatilep);
4611 if (rinner == rhs
4612 || maybe_ne (lbitpos, rbitpos)
4613 || maybe_ne (lbitsize, rbitsize)
4614 || lunsignedp != runsignedp
4615 || lreversep != rreversep
4616 || offset != 0
4617 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4618 || rvolatilep)
4619 return 0;
4622 /* Honor the C++ memory model and mimic what RTL expansion does. */
4623 poly_uint64 bitstart = 0;
4624 poly_uint64 bitend = 0;
4625 if (TREE_CODE (lhs) == COMPONENT_REF)
4627 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4628 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4629 return 0;
4632 /* See if we can find a mode to refer to this field. We should be able to,
4633 but fail if we can't. */
4634 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4635 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4636 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4637 TYPE_ALIGN (TREE_TYPE (rinner))),
4638 BITS_PER_WORD, false, &nmode))
4639 return 0;
4641 /* Set signed and unsigned types of the precision of this mode for the
4642 shifts below. */
4643 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4645 /* Compute the bit position and size for the new reference and our offset
4646 within it. If the new reference is the same size as the original, we
4647 won't optimize anything, so return zero. */
4648 nbitsize = GET_MODE_BITSIZE (nmode);
4649 nbitpos = lbitpos & ~ (nbitsize - 1);
4650 lbitpos -= nbitpos;
4651 if (nbitsize == lbitsize)
4652 return 0;
4654 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4655 lbitpos = nbitsize - lbitsize - lbitpos;
4657 /* Make the mask to be used against the extracted field. */
4658 mask = build_int_cst_type (unsigned_type, -1);
4659 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4660 mask = const_binop (RSHIFT_EXPR, mask,
4661 size_int (nbitsize - lbitsize - lbitpos));
4663 if (! const_p)
4665 if (nbitpos < 0)
4666 return 0;
4668 /* If not comparing with constant, just rework the comparison
4669 and return. */
4670 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4671 nbitsize, nbitpos, 1, lreversep);
4672 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4673 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4674 nbitsize, nbitpos, 1, rreversep);
4675 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4676 return fold_build2_loc (loc, code, compare_type, t1, t2);
4679 /* Otherwise, we are handling the constant case. See if the constant is too
4680 big for the field. Warn and return a tree for 0 (false) if so. We do
4681 this not only for its own sake, but to avoid having to test for this
4682 error case below. If we didn't, we might generate wrong code.
4684 For unsigned fields, the constant shifted right by the field length should
4685 be all zero. For signed fields, the high-order bits should agree with
4686 the sign bit. */
4688 if (lunsignedp)
4690 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4692 warning (0, "comparison is always %d due to width of bit-field",
4693 code == NE_EXPR);
4694 return constant_boolean_node (code == NE_EXPR, compare_type);
4697 else
4699 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4700 if (tem != 0 && tem != -1)
4702 warning (0, "comparison is always %d due to width of bit-field",
4703 code == NE_EXPR);
4704 return constant_boolean_node (code == NE_EXPR, compare_type);
4708 if (nbitpos < 0)
4709 return 0;
4711 /* Single-bit compares should always be against zero. */
4712 if (lbitsize == 1 && ! integer_zerop (rhs))
4714 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4715 rhs = build_int_cst (type, 0);
4718 /* Make a new bitfield reference, shift the constant over the
4719 appropriate number of bits and mask it with the computed mask
4720 (in case this was a signed field). If we changed it, make a new one. */
4721 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4722 nbitsize, nbitpos, 1, lreversep);
4724 rhs = const_binop (BIT_AND_EXPR,
4725 const_binop (LSHIFT_EXPR,
4726 fold_convert_loc (loc, unsigned_type, rhs),
4727 size_int (lbitpos)),
4728 mask);
4730 lhs = build2_loc (loc, code, compare_type,
4731 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4732 return lhs;
4735 /* Subroutine for fold_truth_andor_1: decode a field reference.
4737 If EXP is a comparison reference, we return the innermost reference.
4739 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4740 set to the starting bit number.
4742 If the innermost field can be completely contained in a mode-sized
4743 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4745 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4746 otherwise it is not changed.
4748 *PUNSIGNEDP is set to the signedness of the field.
4750 *PREVERSEP is set to the storage order of the field.
4752 *PMASK is set to the mask used. This is either contained in a
4753 BIT_AND_EXPR or derived from the width of the field.
4755 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4757 Return 0 if this is not a component reference or is one that we can't
4758 do anything with. */
4760 static tree
4761 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4762 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4763 int *punsignedp, int *preversep, int *pvolatilep,
4764 tree *pmask, tree *pand_mask)
4766 tree exp = *exp_;
4767 tree outer_type = 0;
4768 tree and_mask = 0;
4769 tree mask, inner, offset;
4770 tree unsigned_type;
4771 unsigned int precision;
4773 /* All the optimizations using this function assume integer fields.
4774 There are problems with FP fields since the type_for_size call
4775 below can fail for, e.g., XFmode. */
4776 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4777 return NULL_TREE;
4779 /* We are interested in the bare arrangement of bits, so strip everything
4780 that doesn't affect the machine mode. However, record the type of the
4781 outermost expression if it may matter below. */
4782 if (CONVERT_EXPR_P (exp)
4783 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4784 outer_type = TREE_TYPE (exp);
4785 STRIP_NOPS (exp);
4787 if (TREE_CODE (exp) == BIT_AND_EXPR)
4789 and_mask = TREE_OPERAND (exp, 1);
4790 exp = TREE_OPERAND (exp, 0);
4791 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4792 if (TREE_CODE (and_mask) != INTEGER_CST)
4793 return NULL_TREE;
4796 poly_int64 poly_bitsize, poly_bitpos;
4797 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4798 pmode, punsignedp, preversep, pvolatilep);
4799 if ((inner == exp && and_mask == 0)
4800 || !poly_bitsize.is_constant (pbitsize)
4801 || !poly_bitpos.is_constant (pbitpos)
4802 || *pbitsize < 0
4803 || offset != 0
4804 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4805 /* Reject out-of-bound accesses (PR79731). */
4806 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4807 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4808 *pbitpos + *pbitsize) < 0))
4809 return NULL_TREE;
4811 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4812 if (unsigned_type == NULL_TREE)
4813 return NULL_TREE;
4815 *exp_ = exp;
4817 /* If the number of bits in the reference is the same as the bitsize of
4818 the outer type, then the outer type gives the signedness. Otherwise
4819 (in case of a small bitfield) the signedness is unchanged. */
4820 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4821 *punsignedp = TYPE_UNSIGNED (outer_type);
4823 /* Compute the mask to access the bitfield. */
4824 precision = TYPE_PRECISION (unsigned_type);
4826 mask = build_int_cst_type (unsigned_type, -1);
4828 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4829 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4831 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4832 if (and_mask != 0)
4833 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4834 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4836 *pmask = mask;
4837 *pand_mask = and_mask;
4838 return inner;
4841 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4842 bit positions and MASK is SIGNED. */
4844 static bool
4845 all_ones_mask_p (const_tree mask, unsigned int size)
4847 tree type = TREE_TYPE (mask);
4848 unsigned int precision = TYPE_PRECISION (type);
4850 /* If this function returns true when the type of the mask is
4851 UNSIGNED, then there will be errors. In particular see
4852 gcc.c-torture/execute/990326-1.c. There does not appear to be
4853 any documentation paper trail as to why this is so. But the pre
4854 wide-int worked with that restriction and it has been preserved
4855 here. */
4856 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4857 return false;
4859 return wi::mask (size, false, precision) == wi::to_wide (mask);
4862 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4863 represents the sign bit of EXP's type. If EXP represents a sign
4864 or zero extension, also test VAL against the unextended type.
4865 The return value is the (sub)expression whose sign bit is VAL,
4866 or NULL_TREE otherwise. */
4868 tree
4869 sign_bit_p (tree exp, const_tree val)
4871 int width;
4872 tree t;
4874 /* Tree EXP must have an integral type. */
4875 t = TREE_TYPE (exp);
4876 if (! INTEGRAL_TYPE_P (t))
4877 return NULL_TREE;
4879 /* Tree VAL must be an integer constant. */
4880 if (TREE_CODE (val) != INTEGER_CST
4881 || TREE_OVERFLOW (val))
4882 return NULL_TREE;
4884 width = TYPE_PRECISION (t);
4885 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4886 return exp;
4888 /* Handle extension from a narrower type. */
4889 if (TREE_CODE (exp) == NOP_EXPR
4890 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4891 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4893 return NULL_TREE;
4896 /* Subroutine for fold_truth_andor_1 and simple_condition_p: determine if an
4897 operand is simple enough to be evaluated unconditionally. */
4899 static bool
4900 simple_operand_p (const_tree exp)
4902 /* Strip any conversions that don't change the machine mode. */
4903 STRIP_NOPS (exp);
4905 return (CONSTANT_CLASS_P (exp)
4906 || TREE_CODE (exp) == SSA_NAME
4907 || (DECL_P (exp)
4908 && ! TREE_ADDRESSABLE (exp)
4909 && ! TREE_THIS_VOLATILE (exp)
4910 && ! DECL_NONLOCAL (exp)
4911 /* Don't regard global variables as simple. They may be
4912 allocated in ways unknown to the compiler (shared memory,
4913 #pragma weak, etc). */
4914 && ! TREE_PUBLIC (exp)
4915 && ! DECL_EXTERNAL (exp)
4916 /* Weakrefs are not safe to be read, since they can be NULL.
4917 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4918 have DECL_WEAK flag set. */
4919 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4920 /* Loading a static variable is unduly expensive, but global
4921 registers aren't expensive. */
4922 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4925 /* Determine if an operand is simple enough to be evaluated unconditionally.
4926 In addition to simple_operand_p, we assume that comparisons, conversions,
4927 and logic-not operations are simple, if their operands are simple, too. */
4929 bool
4930 simple_condition_p (tree exp)
4932 enum tree_code code;
4934 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
4935 return false;
4937 while (CONVERT_EXPR_P (exp))
4938 exp = TREE_OPERAND (exp, 0);
4940 code = TREE_CODE (exp);
4942 if (TREE_CODE_CLASS (code) == tcc_comparison)
4943 return (simple_operand_p (TREE_OPERAND (exp, 0))
4944 && simple_operand_p (TREE_OPERAND (exp, 1)));
4946 if (code == TRUTH_NOT_EXPR)
4947 return simple_condition_p (TREE_OPERAND (exp, 0));
4949 return simple_operand_p (exp);
4953 /* The following functions are subroutines to fold_range_test and allow it to
4954 try to change a logical combination of comparisons into a range test.
4956 For example, both
4957 X == 2 || X == 3 || X == 4 || X == 5
4959 X >= 2 && X <= 5
4960 are converted to
4961 (unsigned) (X - 2) <= 3
4963 We describe each set of comparisons as being either inside or outside
4964 a range, using a variable named like IN_P, and then describe the
4965 range with a lower and upper bound. If one of the bounds is omitted,
4966 it represents either the highest or lowest value of the type.
4968 In the comments below, we represent a range by two numbers in brackets
4969 preceded by a "+" to designate being inside that range, or a "-" to
4970 designate being outside that range, so the condition can be inverted by
4971 flipping the prefix. An omitted bound is represented by a "-". For
4972 example, "- [-, 10]" means being outside the range starting at the lowest
4973 possible value and ending at 10, in other words, being greater than 10.
4974 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4975 always false.
4977 We set up things so that the missing bounds are handled in a consistent
4978 manner so neither a missing bound nor "true" and "false" need to be
4979 handled using a special case. */
4981 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4982 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4983 and UPPER1_P are nonzero if the respective argument is an upper bound
4984 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4985 must be specified for a comparison. ARG1 will be converted to ARG0's
4986 type if both are specified. */
4988 static tree
4989 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4990 tree arg1, int upper1_p)
4992 tree tem;
4993 int result;
4994 int sgn0, sgn1;
4996 /* If neither arg represents infinity, do the normal operation.
4997 Else, if not a comparison, return infinity. Else handle the special
4998 comparison rules. Note that most of the cases below won't occur, but
4999 are handled for consistency. */
5001 if (arg0 != 0 && arg1 != 0)
5003 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
5004 arg0, fold_convert (TREE_TYPE (arg0), arg1));
5005 STRIP_NOPS (tem);
5006 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
5009 if (TREE_CODE_CLASS (code) != tcc_comparison)
5010 return 0;
5012 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
5013 for neither. In real maths, we cannot assume open ended ranges are
5014 the same. But, this is computer arithmetic, where numbers are finite.
5015 We can therefore make the transformation of any unbounded range with
5016 the value Z, Z being greater than any representable number. This permits
5017 us to treat unbounded ranges as equal. */
5018 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
5019 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
5020 switch (code)
5022 case EQ_EXPR:
5023 result = sgn0 == sgn1;
5024 break;
5025 case NE_EXPR:
5026 result = sgn0 != sgn1;
5027 break;
5028 case LT_EXPR:
5029 result = sgn0 < sgn1;
5030 break;
5031 case LE_EXPR:
5032 result = sgn0 <= sgn1;
5033 break;
5034 case GT_EXPR:
5035 result = sgn0 > sgn1;
5036 break;
5037 case GE_EXPR:
5038 result = sgn0 >= sgn1;
5039 break;
5040 default:
5041 gcc_unreachable ();
5044 return constant_boolean_node (result, type);
5047 /* Helper routine for make_range. Perform one step for it, return
5048 new expression if the loop should continue or NULL_TREE if it should
5049 stop. */
5051 tree
5052 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
5053 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
5054 bool *strict_overflow_p)
5056 tree arg0_type = TREE_TYPE (arg0);
5057 tree n_low, n_high, low = *p_low, high = *p_high;
5058 int in_p = *p_in_p, n_in_p;
5060 switch (code)
5062 case TRUTH_NOT_EXPR:
5063 /* We can only do something if the range is testing for zero. */
5064 if (low == NULL_TREE || high == NULL_TREE
5065 || ! integer_zerop (low) || ! integer_zerop (high))
5066 return NULL_TREE;
5067 *p_in_p = ! in_p;
5068 return arg0;
5070 case EQ_EXPR: case NE_EXPR:
5071 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
5072 /* We can only do something if the range is testing for zero
5073 and if the second operand is an integer constant. Note that
5074 saying something is "in" the range we make is done by
5075 complementing IN_P since it will set in the initial case of
5076 being not equal to zero; "out" is leaving it alone. */
5077 if (low == NULL_TREE || high == NULL_TREE
5078 || ! integer_zerop (low) || ! integer_zerop (high)
5079 || TREE_CODE (arg1) != INTEGER_CST)
5080 return NULL_TREE;
5082 switch (code)
5084 case NE_EXPR: /* - [c, c] */
5085 low = high = arg1;
5086 break;
5087 case EQ_EXPR: /* + [c, c] */
5088 in_p = ! in_p, low = high = arg1;
5089 break;
5090 case GT_EXPR: /* - [-, c] */
5091 low = 0, high = arg1;
5092 break;
5093 case GE_EXPR: /* + [c, -] */
5094 in_p = ! in_p, low = arg1, high = 0;
5095 break;
5096 case LT_EXPR: /* - [c, -] */
5097 low = arg1, high = 0;
5098 break;
5099 case LE_EXPR: /* + [-, c] */
5100 in_p = ! in_p, low = 0, high = arg1;
5101 break;
5102 default:
5103 gcc_unreachable ();
5106 /* If this is an unsigned comparison, we also know that EXP is
5107 greater than or equal to zero. We base the range tests we make
5108 on that fact, so we record it here so we can parse existing
5109 range tests. We test arg0_type since often the return type
5110 of, e.g. EQ_EXPR, is boolean. */
5111 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5113 if (! merge_ranges (&n_in_p, &n_low, &n_high,
5114 in_p, low, high, 1,
5115 build_int_cst (arg0_type, 0),
5116 NULL_TREE))
5117 return NULL_TREE;
5119 in_p = n_in_p, low = n_low, high = n_high;
5121 /* If the high bound is missing, but we have a nonzero low
5122 bound, reverse the range so it goes from zero to the low bound
5123 minus 1. */
5124 if (high == 0 && low && ! integer_zerop (low))
5126 in_p = ! in_p;
5127 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5128 build_int_cst (TREE_TYPE (low), 1), 0);
5129 low = build_int_cst (arg0_type, 0);
5133 *p_low = low;
5134 *p_high = high;
5135 *p_in_p = in_p;
5136 return arg0;
5138 case NEGATE_EXPR:
5139 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5140 low and high are non-NULL, then normalize will DTRT. */
5141 if (!TYPE_UNSIGNED (arg0_type)
5142 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5144 if (low == NULL_TREE)
5145 low = TYPE_MIN_VALUE (arg0_type);
5146 if (high == NULL_TREE)
5147 high = TYPE_MAX_VALUE (arg0_type);
5150 /* (-x) IN [a,b] -> x in [-b, -a] */
5151 n_low = range_binop (MINUS_EXPR, exp_type,
5152 build_int_cst (exp_type, 0),
5153 0, high, 1);
5154 n_high = range_binop (MINUS_EXPR, exp_type,
5155 build_int_cst (exp_type, 0),
5156 0, low, 0);
5157 if (n_high != 0 && TREE_OVERFLOW (n_high))
5158 return NULL_TREE;
5159 goto normalize;
5161 case BIT_NOT_EXPR:
5162 /* ~ X -> -X - 1 */
5163 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5164 build_int_cst (exp_type, 1));
5166 case PLUS_EXPR:
5167 case MINUS_EXPR:
5168 if (TREE_CODE (arg1) != INTEGER_CST)
5169 return NULL_TREE;
5171 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5172 move a constant to the other side. */
5173 if (!TYPE_UNSIGNED (arg0_type)
5174 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5175 return NULL_TREE;
5177 /* If EXP is signed, any overflow in the computation is undefined,
5178 so we don't worry about it so long as our computations on
5179 the bounds don't overflow. For unsigned, overflow is defined
5180 and this is exactly the right thing. */
5181 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5182 arg0_type, low, 0, arg1, 0);
5183 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5184 arg0_type, high, 1, arg1, 0);
5185 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5186 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5187 return NULL_TREE;
5189 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5190 *strict_overflow_p = true;
5192 normalize:
5193 /* Check for an unsigned range which has wrapped around the maximum
5194 value thus making n_high < n_low, and normalize it. */
5195 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5197 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5198 build_int_cst (TREE_TYPE (n_high), 1), 0);
5199 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5200 build_int_cst (TREE_TYPE (n_low), 1), 0);
5202 /* If the range is of the form +/- [ x+1, x ], we won't
5203 be able to normalize it. But then, it represents the
5204 whole range or the empty set, so make it
5205 +/- [ -, - ]. */
5206 if (tree_int_cst_equal (n_low, low)
5207 && tree_int_cst_equal (n_high, high))
5208 low = high = 0;
5209 else
5210 in_p = ! in_p;
5212 else
5213 low = n_low, high = n_high;
5215 *p_low = low;
5216 *p_high = high;
5217 *p_in_p = in_p;
5218 return arg0;
5220 CASE_CONVERT:
5221 case NON_LVALUE_EXPR:
5222 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5223 return NULL_TREE;
5225 if (! INTEGRAL_TYPE_P (arg0_type)
5226 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5227 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5228 return NULL_TREE;
5230 n_low = low, n_high = high;
5232 if (n_low != 0)
5233 n_low = fold_convert_loc (loc, arg0_type, n_low);
5235 if (n_high != 0)
5236 n_high = fold_convert_loc (loc, arg0_type, n_high);
5238 /* If we're converting arg0 from an unsigned type, to exp,
5239 a signed type, we will be doing the comparison as unsigned.
5240 The tests above have already verified that LOW and HIGH
5241 are both positive.
5243 So we have to ensure that we will handle large unsigned
5244 values the same way that the current signed bounds treat
5245 negative values. */
5247 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5249 tree high_positive;
5250 tree equiv_type;
5251 /* For fixed-point modes, we need to pass the saturating flag
5252 as the 2nd parameter. */
5253 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5254 equiv_type
5255 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5256 TYPE_SATURATING (arg0_type));
5257 else
5258 equiv_type
5259 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5261 /* A range without an upper bound is, naturally, unbounded.
5262 Since convert would have cropped a very large value, use
5263 the max value for the destination type. */
5264 high_positive
5265 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5266 : TYPE_MAX_VALUE (arg0_type);
5268 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5269 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5270 fold_convert_loc (loc, arg0_type,
5271 high_positive),
5272 build_int_cst (arg0_type, 1));
5274 /* If the low bound is specified, "and" the range with the
5275 range for which the original unsigned value will be
5276 positive. */
5277 if (low != 0)
5279 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5280 1, fold_convert_loc (loc, arg0_type,
5281 integer_zero_node),
5282 high_positive))
5283 return NULL_TREE;
5285 in_p = (n_in_p == in_p);
5287 else
5289 /* Otherwise, "or" the range with the range of the input
5290 that will be interpreted as negative. */
5291 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5292 1, fold_convert_loc (loc, arg0_type,
5293 integer_zero_node),
5294 high_positive))
5295 return NULL_TREE;
5297 in_p = (in_p != n_in_p);
5301 /* Otherwise, if we are converting arg0 from signed type, to exp,
5302 an unsigned type, we will do the comparison as signed. If
5303 high is non-NULL, we punt above if it doesn't fit in the signed
5304 type, so if we get through here, +[-, high] or +[low, high] are
5305 equivalent to +[-, n_high] or +[n_low, n_high]. Similarly,
5306 +[-, -] or -[-, -] are equivalent too. But if low is specified and
5307 high is not, the +[low, -] range is equivalent to union of
5308 +[n_low, -] and +[-, -1] ranges, so +[low, -] is equivalent to
5309 -[0, n_low-1] and similarly -[low, -] to +[0, n_low-1], except for
5310 low being 0, which should be treated as [-, -]. */
5311 else if (TYPE_UNSIGNED (exp_type)
5312 && !TYPE_UNSIGNED (arg0_type)
5313 && low
5314 && !high)
5316 if (integer_zerop (low))
5317 n_low = NULL_TREE;
5318 else
5320 n_high = fold_build2_loc (loc, PLUS_EXPR, arg0_type,
5321 n_low, build_int_cst (arg0_type, -1));
5322 n_low = build_zero_cst (arg0_type);
5323 in_p = !in_p;
5327 *p_low = n_low;
5328 *p_high = n_high;
5329 *p_in_p = in_p;
5330 return arg0;
5332 default:
5333 return NULL_TREE;
5337 /* Given EXP, a logical expression, set the range it is testing into
5338 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5339 actually being tested. *PLOW and *PHIGH will be made of the same
5340 type as the returned expression. If EXP is not a comparison, we
5341 will most likely not be returning a useful value and range. Set
5342 *STRICT_OVERFLOW_P to true if the return value is only valid
5343 because signed overflow is undefined; otherwise, do not change
5344 *STRICT_OVERFLOW_P. */
5346 tree
5347 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5348 bool *strict_overflow_p)
5350 enum tree_code code;
5351 tree arg0, arg1 = NULL_TREE;
5352 tree exp_type, nexp;
5353 int in_p;
5354 tree low, high;
5355 location_t loc = EXPR_LOCATION (exp);
5357 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5358 and see if we can refine the range. Some of the cases below may not
5359 happen, but it doesn't seem worth worrying about this. We "continue"
5360 the outer loop when we've changed something; otherwise we "break"
5361 the switch, which will "break" the while. */
5363 in_p = 0;
5364 low = high = build_int_cst (TREE_TYPE (exp), 0);
5366 while (1)
5368 code = TREE_CODE (exp);
5369 exp_type = TREE_TYPE (exp);
5370 arg0 = NULL_TREE;
5372 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5374 if (TREE_OPERAND_LENGTH (exp) > 0)
5375 arg0 = TREE_OPERAND (exp, 0);
5376 if (TREE_CODE_CLASS (code) == tcc_binary
5377 || TREE_CODE_CLASS (code) == tcc_comparison
5378 || (TREE_CODE_CLASS (code) == tcc_expression
5379 && TREE_OPERAND_LENGTH (exp) > 1))
5380 arg1 = TREE_OPERAND (exp, 1);
5382 if (arg0 == NULL_TREE)
5383 break;
5385 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5386 &high, &in_p, strict_overflow_p);
5387 if (nexp == NULL_TREE)
5388 break;
5389 exp = nexp;
5392 /* If EXP is a constant, we can evaluate whether this is true or false. */
5393 if (TREE_CODE (exp) == INTEGER_CST)
5395 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5396 exp, 0, low, 0))
5397 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5398 exp, 1, high, 1)));
5399 low = high = 0;
5400 exp = 0;
5403 *pin_p = in_p, *plow = low, *phigh = high;
5404 return exp;
5407 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5408 a bitwise check i.e. when
5409 LOW == 0xXX...X00...0
5410 HIGH == 0xXX...X11...1
5411 Return corresponding mask in MASK and stem in VALUE. */
5413 static bool
5414 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5415 tree *value)
5417 if (TREE_CODE (low) != INTEGER_CST
5418 || TREE_CODE (high) != INTEGER_CST)
5419 return false;
5421 unsigned prec = TYPE_PRECISION (type);
5422 wide_int lo = wi::to_wide (low, prec);
5423 wide_int hi = wi::to_wide (high, prec);
5425 wide_int end_mask = lo ^ hi;
5426 if ((end_mask & (end_mask + 1)) != 0
5427 || (lo & end_mask) != 0)
5428 return false;
5430 wide_int stem_mask = ~end_mask;
5431 wide_int stem = lo & stem_mask;
5432 if (stem != (hi & stem_mask))
5433 return false;
5435 *mask = wide_int_to_tree (type, stem_mask);
5436 *value = wide_int_to_tree (type, stem);
5438 return true;
5441 /* Helper routine for build_range_check and match.pd. Return the type to
5442 perform the check or NULL if it shouldn't be optimized. */
5444 tree
5445 range_check_type (tree etype)
5447 /* First make sure that arithmetics in this type is valid, then make sure
5448 that it wraps around. */
5449 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5450 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5452 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5454 tree utype, minv, maxv;
5456 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5457 for the type in question, as we rely on this here. */
5458 utype = unsigned_type_for (etype);
5459 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5460 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5461 build_int_cst (TREE_TYPE (maxv), 1), 1);
5462 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5464 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5465 minv, 1, maxv, 1)))
5466 etype = utype;
5467 else
5468 return NULL_TREE;
5470 else if (POINTER_TYPE_P (etype) || TREE_CODE (etype) == OFFSET_TYPE)
5471 etype = unsigned_type_for (etype);
5472 return etype;
5475 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5476 type, TYPE, return an expression to test if EXP is in (or out of, depending
5477 on IN_P) the range. Return 0 if the test couldn't be created. */
5479 tree
5480 build_range_check (location_t loc, tree type, tree exp, int in_p,
5481 tree low, tree high)
5483 tree etype = TREE_TYPE (exp), mask, value;
5485 /* Disable this optimization for function pointer expressions
5486 on targets that require function pointer canonicalization. */
5487 if (targetm.have_canonicalize_funcptr_for_compare ()
5488 && POINTER_TYPE_P (etype)
5489 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5490 return NULL_TREE;
5492 if (! in_p)
5494 value = build_range_check (loc, type, exp, 1, low, high);
5495 if (value != 0)
5496 return invert_truthvalue_loc (loc, value);
5498 return 0;
5501 if (low == 0 && high == 0)
5502 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5504 if (low == 0)
5505 return fold_build2_loc (loc, LE_EXPR, type, exp,
5506 fold_convert_loc (loc, etype, high));
5508 if (high == 0)
5509 return fold_build2_loc (loc, GE_EXPR, type, exp,
5510 fold_convert_loc (loc, etype, low));
5512 if (operand_equal_p (low, high, 0))
5513 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5514 fold_convert_loc (loc, etype, low));
5516 if (TREE_CODE (exp) == BIT_AND_EXPR
5517 && maskable_range_p (low, high, etype, &mask, &value))
5518 return fold_build2_loc (loc, EQ_EXPR, type,
5519 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5520 exp, mask),
5521 value);
5523 if (integer_zerop (low))
5525 if (! TYPE_UNSIGNED (etype))
5527 etype = unsigned_type_for (etype);
5528 high = fold_convert_loc (loc, etype, high);
5529 exp = fold_convert_loc (loc, etype, exp);
5531 return build_range_check (loc, type, exp, 1, 0, high);
5534 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5535 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5537 int prec = TYPE_PRECISION (etype);
5539 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5541 if (TYPE_UNSIGNED (etype))
5543 tree signed_etype = signed_type_for (etype);
5544 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5545 etype
5546 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5547 else
5548 etype = signed_etype;
5549 exp = fold_convert_loc (loc, etype, exp);
5551 return fold_build2_loc (loc, GT_EXPR, type, exp,
5552 build_int_cst (etype, 0));
5556 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5557 This requires wrap-around arithmetics for the type of the expression. */
5558 etype = range_check_type (etype);
5559 if (etype == NULL_TREE)
5560 return NULL_TREE;
5562 high = fold_convert_loc (loc, etype, high);
5563 low = fold_convert_loc (loc, etype, low);
5564 exp = fold_convert_loc (loc, etype, exp);
5566 value = const_binop (MINUS_EXPR, high, low);
5568 if (value != 0 && !TREE_OVERFLOW (value))
5569 return build_range_check (loc, type,
5570 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5571 1, build_int_cst (etype, 0), value);
5573 return 0;
5576 /* Return the predecessor of VAL in its type, handling the infinite case. */
5578 static tree
5579 range_predecessor (tree val)
5581 tree type = TREE_TYPE (val);
5583 if (INTEGRAL_TYPE_P (type)
5584 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5585 return 0;
5586 else
5587 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5588 build_int_cst (TREE_TYPE (val), 1), 0);
5591 /* Return the successor of VAL in its type, handling the infinite case. */
5593 static tree
5594 range_successor (tree val)
5596 tree type = TREE_TYPE (val);
5598 if (INTEGRAL_TYPE_P (type)
5599 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5600 return 0;
5601 else
5602 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5603 build_int_cst (TREE_TYPE (val), 1), 0);
5606 /* Given two ranges, see if we can merge them into one. Return 1 if we
5607 can, 0 if we can't. Set the output range into the specified parameters. */
5609 bool
5610 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5611 tree high0, int in1_p, tree low1, tree high1)
5613 int no_overlap;
5614 int subset;
5615 int temp;
5616 tree tem;
5617 int in_p;
5618 tree low, high;
5619 int lowequal = ((low0 == 0 && low1 == 0)
5620 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5621 low0, 0, low1, 0)));
5622 int highequal = ((high0 == 0 && high1 == 0)
5623 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5624 high0, 1, high1, 1)));
5626 /* Make range 0 be the range that starts first, or ends last if they
5627 start at the same value. Swap them if it isn't. */
5628 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5629 low0, 0, low1, 0))
5630 || (lowequal
5631 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5632 high1, 1, high0, 1))))
5634 temp = in0_p, in0_p = in1_p, in1_p = temp;
5635 tem = low0, low0 = low1, low1 = tem;
5636 tem = high0, high0 = high1, high1 = tem;
5639 /* If the second range is != high1 where high1 is the type maximum of
5640 the type, try first merging with < high1 range. */
5641 if (low1
5642 && high1
5643 && TREE_CODE (low1) == INTEGER_CST
5644 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5645 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5646 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5647 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5648 && operand_equal_p (low1, high1, 0))
5650 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5651 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5652 !in1_p, NULL_TREE, range_predecessor (low1)))
5653 return true;
5654 /* Similarly for the second range != low1 where low1 is the type minimum
5655 of the type, try first merging with > low1 range. */
5656 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5657 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5658 !in1_p, range_successor (low1), NULL_TREE))
5659 return true;
5662 /* Now flag two cases, whether the ranges are disjoint or whether the
5663 second range is totally subsumed in the first. Note that the tests
5664 below are simplified by the ones above. */
5665 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5666 high0, 1, low1, 0));
5667 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5668 high1, 1, high0, 1));
5670 /* We now have four cases, depending on whether we are including or
5671 excluding the two ranges. */
5672 if (in0_p && in1_p)
5674 /* If they don't overlap, the result is false. If the second range
5675 is a subset it is the result. Otherwise, the range is from the start
5676 of the second to the end of the first. */
5677 if (no_overlap)
5678 in_p = 0, low = high = 0;
5679 else if (subset)
5680 in_p = 1, low = low1, high = high1;
5681 else
5682 in_p = 1, low = low1, high = high0;
5685 else if (in0_p && ! in1_p)
5687 /* If they don't overlap, the result is the first range. If they are
5688 equal, the result is false. If the second range is a subset of the
5689 first, and the ranges begin at the same place, we go from just after
5690 the end of the second range to the end of the first. If the second
5691 range is not a subset of the first, or if it is a subset and both
5692 ranges end at the same place, the range starts at the start of the
5693 first range and ends just before the second range.
5694 Otherwise, we can't describe this as a single range. */
5695 if (no_overlap)
5696 in_p = 1, low = low0, high = high0;
5697 else if (lowequal && highequal)
5698 in_p = 0, low = high = 0;
5699 else if (subset && lowequal)
5701 low = range_successor (high1);
5702 high = high0;
5703 in_p = 1;
5704 if (low == 0)
5706 /* We are in the weird situation where high0 > high1 but
5707 high1 has no successor. Punt. */
5708 return 0;
5711 else if (! subset || highequal)
5713 low = low0;
5714 high = range_predecessor (low1);
5715 in_p = 1;
5716 if (high == 0)
5718 /* low0 < low1 but low1 has no predecessor. Punt. */
5719 return 0;
5722 else
5723 return 0;
5726 else if (! in0_p && in1_p)
5728 /* If they don't overlap, the result is the second range. If the second
5729 is a subset of the first, the result is false. Otherwise,
5730 the range starts just after the first range and ends at the
5731 end of the second. */
5732 if (no_overlap)
5733 in_p = 1, low = low1, high = high1;
5734 else if (subset || highequal)
5735 in_p = 0, low = high = 0;
5736 else
5738 low = range_successor (high0);
5739 high = high1;
5740 in_p = 1;
5741 if (low == 0)
5743 /* high1 > high0 but high0 has no successor. Punt. */
5744 return 0;
5749 else
5751 /* The case where we are excluding both ranges. Here the complex case
5752 is if they don't overlap. In that case, the only time we have a
5753 range is if they are adjacent. If the second is a subset of the
5754 first, the result is the first. Otherwise, the range to exclude
5755 starts at the beginning of the first range and ends at the end of the
5756 second. */
5757 if (no_overlap)
5759 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5760 range_successor (high0),
5761 1, low1, 0)))
5762 in_p = 0, low = low0, high = high1;
5763 else
5765 /* Canonicalize - [min, x] into - [-, x]. */
5766 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5767 switch (TREE_CODE (TREE_TYPE (low0)))
5769 case ENUMERAL_TYPE:
5770 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5771 GET_MODE_BITSIZE
5772 (TYPE_MODE (TREE_TYPE (low0)))))
5773 break;
5774 /* FALLTHROUGH */
5775 case INTEGER_TYPE:
5776 if (tree_int_cst_equal (low0,
5777 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5778 low0 = 0;
5779 break;
5780 case POINTER_TYPE:
5781 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5782 && integer_zerop (low0))
5783 low0 = 0;
5784 break;
5785 default:
5786 break;
5789 /* Canonicalize - [x, max] into - [x, -]. */
5790 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5791 switch (TREE_CODE (TREE_TYPE (high1)))
5793 case ENUMERAL_TYPE:
5794 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5795 GET_MODE_BITSIZE
5796 (TYPE_MODE (TREE_TYPE (high1)))))
5797 break;
5798 /* FALLTHROUGH */
5799 case INTEGER_TYPE:
5800 if (tree_int_cst_equal (high1,
5801 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5802 high1 = 0;
5803 break;
5804 case POINTER_TYPE:
5805 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5806 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5807 high1, 1,
5808 build_int_cst (TREE_TYPE (high1), 1),
5809 1)))
5810 high1 = 0;
5811 break;
5812 default:
5813 break;
5816 /* The ranges might be also adjacent between the maximum and
5817 minimum values of the given type. For
5818 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5819 return + [x + 1, y - 1]. */
5820 if (low0 == 0 && high1 == 0)
5822 low = range_successor (high0);
5823 high = range_predecessor (low1);
5824 if (low == 0 || high == 0)
5825 return 0;
5827 in_p = 1;
5829 else
5830 return 0;
5833 else if (subset)
5834 in_p = 0, low = low0, high = high0;
5835 else
5836 in_p = 0, low = low0, high = high1;
5839 *pin_p = in_p, *plow = low, *phigh = high;
5840 return 1;
5844 /* Subroutine of fold, looking inside expressions of the form
5845 A op B ? A : C, where (ARG00, COMP_CODE, ARG01), ARG1 and ARG2
5846 are the three operands of the COND_EXPR. This function is
5847 being used also to optimize A op B ? C : A, by reversing the
5848 comparison first.
5850 Return a folded expression whose code is not a COND_EXPR
5851 anymore, or NULL_TREE if no folding opportunity is found. */
5853 static tree
5854 fold_cond_expr_with_comparison (location_t loc, tree type,
5855 enum tree_code comp_code,
5856 tree arg00, tree arg01, tree arg1, tree arg2)
5858 tree arg1_type = TREE_TYPE (arg1);
5859 tree tem;
5861 STRIP_NOPS (arg1);
5862 STRIP_NOPS (arg2);
5864 /* If we have A op 0 ? A : -A, consider applying the following
5865 transformations:
5867 A == 0? A : -A same as -A
5868 A != 0? A : -A same as A
5869 A >= 0? A : -A same as abs (A)
5870 A > 0? A : -A same as abs (A)
5871 A <= 0? A : -A same as -abs (A)
5872 A < 0? A : -A same as -abs (A)
5874 None of these transformations work for modes with signed
5875 zeros. If A is +/-0, the first two transformations will
5876 change the sign of the result (from +0 to -0, or vice
5877 versa). The last four will fix the sign of the result,
5878 even though the original expressions could be positive or
5879 negative, depending on the sign of A.
5881 Note that all these transformations are correct if A is
5882 NaN, since the two alternatives (A and -A) are also NaNs. */
5883 if (!HONOR_SIGNED_ZEROS (type)
5884 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5885 ? real_zerop (arg01)
5886 : integer_zerop (arg01))
5887 && ((TREE_CODE (arg2) == NEGATE_EXPR
5888 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5889 /* In the case that A is of the form X-Y, '-A' (arg2) may
5890 have already been folded to Y-X, check for that. */
5891 || (TREE_CODE (arg1) == MINUS_EXPR
5892 && TREE_CODE (arg2) == MINUS_EXPR
5893 && operand_equal_p (TREE_OPERAND (arg1, 0),
5894 TREE_OPERAND (arg2, 1), 0)
5895 && operand_equal_p (TREE_OPERAND (arg1, 1),
5896 TREE_OPERAND (arg2, 0), 0))))
5897 switch (comp_code)
5899 case EQ_EXPR:
5900 case UNEQ_EXPR:
5901 tem = fold_convert_loc (loc, arg1_type, arg1);
5902 return fold_convert_loc (loc, type, negate_expr (tem));
5903 case NE_EXPR:
5904 case LTGT_EXPR:
5905 return fold_convert_loc (loc, type, arg1);
5906 case UNGE_EXPR:
5907 case UNGT_EXPR:
5908 if (flag_trapping_math)
5909 break;
5910 /* Fall through. */
5911 case GE_EXPR:
5912 case GT_EXPR:
5913 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5914 break;
5915 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5916 return fold_convert_loc (loc, type, tem);
5917 case UNLE_EXPR:
5918 case UNLT_EXPR:
5919 if (flag_trapping_math)
5920 break;
5921 /* FALLTHRU */
5922 case LE_EXPR:
5923 case LT_EXPR:
5924 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5925 break;
5926 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5927 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
5929 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
5930 is not, invokes UB both in abs and in the negation of it.
5931 So, use ABSU_EXPR instead. */
5932 tree utype = unsigned_type_for (TREE_TYPE (arg1));
5933 tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
5934 tem = negate_expr (tem);
5935 return fold_convert_loc (loc, type, tem);
5937 else
5939 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5940 return negate_expr (fold_convert_loc (loc, type, tem));
5942 default:
5943 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5944 break;
5947 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5948 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5949 both transformations are correct when A is NaN: A != 0
5950 is then true, and A == 0 is false. */
5952 if (!HONOR_SIGNED_ZEROS (type)
5953 && integer_zerop (arg01) && integer_zerop (arg2))
5955 if (comp_code == NE_EXPR)
5956 return fold_convert_loc (loc, type, arg1);
5957 else if (comp_code == EQ_EXPR)
5958 return build_zero_cst (type);
5961 /* Try some transformations of A op B ? A : B.
5963 A == B? A : B same as B
5964 A != B? A : B same as A
5965 A >= B? A : B same as max (A, B)
5966 A > B? A : B same as max (B, A)
5967 A <= B? A : B same as min (A, B)
5968 A < B? A : B same as min (B, A)
5970 As above, these transformations don't work in the presence
5971 of signed zeros. For example, if A and B are zeros of
5972 opposite sign, the first two transformations will change
5973 the sign of the result. In the last four, the original
5974 expressions give different results for (A=+0, B=-0) and
5975 (A=-0, B=+0), but the transformed expressions do not.
5977 The first two transformations are correct if either A or B
5978 is a NaN. In the first transformation, the condition will
5979 be false, and B will indeed be chosen. In the case of the
5980 second transformation, the condition A != B will be true,
5981 and A will be chosen.
5983 The conversions to max() and min() are not correct if B is
5984 a number and A is not. The conditions in the original
5985 expressions will be false, so all four give B. The min()
5986 and max() versions would give a NaN instead. */
5987 if (!HONOR_SIGNED_ZEROS (type)
5988 && operand_equal_for_comparison_p (arg01, arg2)
5989 /* Avoid these transformations if the COND_EXPR may be used
5990 as an lvalue in the C++ front-end. PR c++/19199. */
5991 && (in_gimple_form
5992 || VECTOR_TYPE_P (type)
5993 || (! lang_GNU_CXX ()
5994 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5995 || ! maybe_lvalue_p (arg1)
5996 || ! maybe_lvalue_p (arg2)))
5998 tree comp_op0 = arg00;
5999 tree comp_op1 = arg01;
6000 tree comp_type = TREE_TYPE (comp_op0);
6002 switch (comp_code)
6004 case EQ_EXPR:
6005 return fold_convert_loc (loc, type, arg2);
6006 case NE_EXPR:
6007 return fold_convert_loc (loc, type, arg1);
6008 case LE_EXPR:
6009 case LT_EXPR:
6010 case UNLE_EXPR:
6011 case UNLT_EXPR:
6012 /* In C++ a ?: expression can be an lvalue, so put the
6013 operand which will be used if they are equal first
6014 so that we can convert this back to the
6015 corresponding COND_EXPR. */
6016 if (!HONOR_NANS (arg1))
6018 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6019 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6020 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
6021 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
6022 : fold_build2_loc (loc, MIN_EXPR, comp_type,
6023 comp_op1, comp_op0);
6024 return fold_convert_loc (loc, type, tem);
6026 break;
6027 case GE_EXPR:
6028 case GT_EXPR:
6029 case UNGE_EXPR:
6030 case UNGT_EXPR:
6031 if (!HONOR_NANS (arg1))
6033 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6034 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6035 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
6036 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
6037 : fold_build2_loc (loc, MAX_EXPR, comp_type,
6038 comp_op1, comp_op0);
6039 return fold_convert_loc (loc, type, tem);
6041 break;
6042 case UNEQ_EXPR:
6043 if (!HONOR_NANS (arg1))
6044 return fold_convert_loc (loc, type, arg2);
6045 break;
6046 case LTGT_EXPR:
6047 if (!HONOR_NANS (arg1))
6048 return fold_convert_loc (loc, type, arg1);
6049 break;
6050 default:
6051 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6052 break;
6056 return NULL_TREE;
6061 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
6062 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
6063 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
6064 false) >= 2)
6065 #endif
6067 /* EXP is some logical combination of boolean tests. See if we can
6068 merge it into some range test. Return the new tree if so. */
6070 static tree
6071 fold_range_test (location_t loc, enum tree_code code, tree type,
6072 tree op0, tree op1)
6074 int or_op = (code == TRUTH_ORIF_EXPR
6075 || code == TRUTH_OR_EXPR);
6076 int in0_p, in1_p, in_p;
6077 tree low0, low1, low, high0, high1, high;
6078 bool strict_overflow_p = false;
6079 tree tem, lhs, rhs;
6080 const char * const warnmsg = G_("assuming signed overflow does not occur "
6081 "when simplifying range test");
6083 if (!INTEGRAL_TYPE_P (type))
6084 return 0;
6086 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
6087 /* If op0 is known true or false and this is a short-circuiting
6088 operation we must not merge with op1 since that makes side-effects
6089 unconditional. So special-case this. */
6090 if (!lhs
6091 && ((code == TRUTH_ORIF_EXPR && in0_p)
6092 || (code == TRUTH_ANDIF_EXPR && !in0_p)))
6093 return op0;
6094 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
6096 /* If this is an OR operation, invert both sides; we will invert
6097 again at the end. */
6098 if (or_op)
6099 in0_p = ! in0_p, in1_p = ! in1_p;
6101 /* If both expressions are the same, if we can merge the ranges, and we
6102 can build the range test, return it or it inverted. If one of the
6103 ranges is always true or always false, consider it to be the same
6104 expression as the other. */
6105 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
6106 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
6107 in1_p, low1, high1)
6108 && (tem = (build_range_check (loc, type,
6109 lhs != 0 ? lhs
6110 : rhs != 0 ? rhs : integer_zero_node,
6111 in_p, low, high))) != 0)
6113 if (strict_overflow_p)
6114 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
6115 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6118 /* On machines where the branch cost is expensive, if this is a
6119 short-circuited branch and the underlying object on both sides
6120 is the same, make a non-short-circuit operation. */
6121 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6122 if (param_logical_op_non_short_circuit != -1)
6123 logical_op_non_short_circuit
6124 = param_logical_op_non_short_circuit;
6125 if (logical_op_non_short_circuit
6126 && !sanitize_coverage_p ()
6127 && lhs != 0 && rhs != 0
6128 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6129 && operand_equal_p (lhs, rhs, 0))
6131 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6132 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6133 which cases we can't do this. */
6134 if (simple_operand_p (lhs))
6135 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6136 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6137 type, op0, op1);
6139 else if (!lang_hooks.decls.global_bindings_p ()
6140 && !CONTAINS_PLACEHOLDER_P (lhs))
6142 tree common = save_expr (lhs);
6144 if ((lhs = build_range_check (loc, type, common,
6145 or_op ? ! in0_p : in0_p,
6146 low0, high0)) != 0
6147 && (rhs = build_range_check (loc, type, common,
6148 or_op ? ! in1_p : in1_p,
6149 low1, high1)) != 0)
6151 if (strict_overflow_p)
6152 fold_overflow_warning (warnmsg,
6153 WARN_STRICT_OVERFLOW_COMPARISON);
6154 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6155 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6156 type, lhs, rhs);
6161 return 0;
6164 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6165 bit value. Arrange things so the extra bits will be set to zero if and
6166 only if C is signed-extended to its full width. If MASK is nonzero,
6167 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6169 static tree
6170 unextend (tree c, int p, int unsignedp, tree mask)
6172 tree type = TREE_TYPE (c);
6173 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6174 tree temp;
6176 if (p == modesize || unsignedp)
6177 return c;
6179 /* We work by getting just the sign bit into the low-order bit, then
6180 into the high-order bit, then sign-extend. We then XOR that value
6181 with C. */
6182 temp = build_int_cst (TREE_TYPE (c),
6183 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6185 /* We must use a signed type in order to get an arithmetic right shift.
6186 However, we must also avoid introducing accidental overflows, so that
6187 a subsequent call to integer_zerop will work. Hence we must
6188 do the type conversion here. At this point, the constant is either
6189 zero or one, and the conversion to a signed type can never overflow.
6190 We could get an overflow if this conversion is done anywhere else. */
6191 if (TYPE_UNSIGNED (type))
6192 temp = fold_convert (signed_type_for (type), temp);
6194 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6195 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6196 if (mask != 0)
6197 temp = const_binop (BIT_AND_EXPR, temp,
6198 fold_convert (TREE_TYPE (c), mask));
6199 /* If necessary, convert the type back to match the type of C. */
6200 if (TYPE_UNSIGNED (type))
6201 temp = fold_convert (type, temp);
6203 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6206 /* For an expression that has the form
6207 (A && B) || ~B
6209 (A || B) && ~B,
6210 we can drop one of the inner expressions and simplify to
6211 A || ~B
6213 A && ~B
6214 LOC is the location of the resulting expression. OP is the inner
6215 logical operation; the left-hand side in the examples above, while CMPOP
6216 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6217 removing a condition that guards another, as in
6218 (A != NULL && A->...) || A == NULL
6219 which we must not transform. If RHS_ONLY is true, only eliminate the
6220 right-most operand of the inner logical operation. */
6222 static tree
6223 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6224 bool rhs_only)
6226 tree type = TREE_TYPE (cmpop);
6227 enum tree_code code = TREE_CODE (cmpop);
6228 enum tree_code truthop_code = TREE_CODE (op);
6229 tree lhs = TREE_OPERAND (op, 0);
6230 tree rhs = TREE_OPERAND (op, 1);
6231 tree orig_lhs = lhs, orig_rhs = rhs;
6232 enum tree_code rhs_code = TREE_CODE (rhs);
6233 enum tree_code lhs_code = TREE_CODE (lhs);
6234 enum tree_code inv_code;
6236 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6237 return NULL_TREE;
6239 if (TREE_CODE_CLASS (code) != tcc_comparison)
6240 return NULL_TREE;
6242 if (rhs_code == truthop_code)
6244 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6245 if (newrhs != NULL_TREE)
6247 rhs = newrhs;
6248 rhs_code = TREE_CODE (rhs);
6251 if (lhs_code == truthop_code && !rhs_only)
6253 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6254 if (newlhs != NULL_TREE)
6256 lhs = newlhs;
6257 lhs_code = TREE_CODE (lhs);
6261 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6262 if (inv_code == rhs_code
6263 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6264 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6265 return lhs;
6266 if (!rhs_only && inv_code == lhs_code
6267 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6268 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6269 return rhs;
6270 if (rhs != orig_rhs || lhs != orig_lhs)
6271 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6272 lhs, rhs);
6273 return NULL_TREE;
6276 /* Find ways of folding logical expressions of LHS and RHS:
6277 Try to merge two comparisons to the same innermost item.
6278 Look for range tests like "ch >= '0' && ch <= '9'".
6279 Look for combinations of simple terms on machines with expensive branches
6280 and evaluate the RHS unconditionally.
6282 For example, if we have p->a == 2 && p->b == 4 and we can make an
6283 object large enough to span both A and B, we can do this with a comparison
6284 against the object ANDed with the a mask.
6286 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6287 operations to do this with one comparison.
6289 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6290 function and the one above.
6292 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6293 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6295 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6296 two operands.
6298 We return the simplified tree or 0 if no optimization is possible. */
6300 static tree
6301 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6302 tree lhs, tree rhs)
6304 /* If this is the "or" of two comparisons, we can do something if
6305 the comparisons are NE_EXPR. If this is the "and", we can do something
6306 if the comparisons are EQ_EXPR. I.e.,
6307 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6309 WANTED_CODE is this operation code. For single bit fields, we can
6310 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6311 comparison for one-bit fields. */
6313 enum tree_code wanted_code;
6314 enum tree_code lcode, rcode;
6315 tree ll_arg, lr_arg, rl_arg, rr_arg;
6316 tree ll_inner, lr_inner, rl_inner, rr_inner;
6317 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6318 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6319 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6320 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6321 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6322 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6323 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6324 scalar_int_mode lnmode, rnmode;
6325 tree ll_mask, lr_mask, rl_mask, rr_mask;
6326 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6327 tree l_const, r_const;
6328 tree lntype, rntype, result;
6329 HOST_WIDE_INT first_bit, end_bit;
6330 int volatilep;
6332 /* Start by getting the comparison codes. Fail if anything is volatile.
6333 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6334 it were surrounded with a NE_EXPR. */
6336 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6337 return 0;
6339 lcode = TREE_CODE (lhs);
6340 rcode = TREE_CODE (rhs);
6342 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6344 lhs = build2 (NE_EXPR, truth_type, lhs,
6345 build_int_cst (TREE_TYPE (lhs), 0));
6346 lcode = NE_EXPR;
6349 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6351 rhs = build2 (NE_EXPR, truth_type, rhs,
6352 build_int_cst (TREE_TYPE (rhs), 0));
6353 rcode = NE_EXPR;
6356 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6357 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6358 return 0;
6360 ll_arg = TREE_OPERAND (lhs, 0);
6361 lr_arg = TREE_OPERAND (lhs, 1);
6362 rl_arg = TREE_OPERAND (rhs, 0);
6363 rr_arg = TREE_OPERAND (rhs, 1);
6365 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6366 if (simple_operand_p (ll_arg)
6367 && simple_operand_p (lr_arg))
6369 if (operand_equal_p (ll_arg, rl_arg, 0)
6370 && operand_equal_p (lr_arg, rr_arg, 0))
6372 result = combine_comparisons (loc, code, lcode, rcode,
6373 truth_type, ll_arg, lr_arg);
6374 if (result)
6375 return result;
6377 else if (operand_equal_p (ll_arg, rr_arg, 0)
6378 && operand_equal_p (lr_arg, rl_arg, 0))
6380 result = combine_comparisons (loc, code, lcode,
6381 swap_tree_comparison (rcode),
6382 truth_type, ll_arg, lr_arg);
6383 if (result)
6384 return result;
6388 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6389 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6391 /* If the RHS can be evaluated unconditionally and its operands are
6392 simple, it wins to evaluate the RHS unconditionally on machines
6393 with expensive branches. In this case, this isn't a comparison
6394 that can be merged. */
6396 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6397 false) >= 2
6398 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6399 && simple_operand_p (rl_arg)
6400 && simple_operand_p (rr_arg))
6402 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6403 if (code == TRUTH_OR_EXPR
6404 && lcode == NE_EXPR && integer_zerop (lr_arg)
6405 && rcode == NE_EXPR && integer_zerop (rr_arg)
6406 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6407 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6408 return build2_loc (loc, NE_EXPR, truth_type,
6409 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6410 ll_arg, rl_arg),
6411 build_int_cst (TREE_TYPE (ll_arg), 0));
6413 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6414 if (code == TRUTH_AND_EXPR
6415 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6416 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6417 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6418 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6419 return build2_loc (loc, EQ_EXPR, truth_type,
6420 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6421 ll_arg, rl_arg),
6422 build_int_cst (TREE_TYPE (ll_arg), 0));
6425 /* See if the comparisons can be merged. Then get all the parameters for
6426 each side. */
6428 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6429 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6430 return 0;
6432 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6433 volatilep = 0;
6434 ll_inner = decode_field_reference (loc, &ll_arg,
6435 &ll_bitsize, &ll_bitpos, &ll_mode,
6436 &ll_unsignedp, &ll_reversep, &volatilep,
6437 &ll_mask, &ll_and_mask);
6438 lr_inner = decode_field_reference (loc, &lr_arg,
6439 &lr_bitsize, &lr_bitpos, &lr_mode,
6440 &lr_unsignedp, &lr_reversep, &volatilep,
6441 &lr_mask, &lr_and_mask);
6442 rl_inner = decode_field_reference (loc, &rl_arg,
6443 &rl_bitsize, &rl_bitpos, &rl_mode,
6444 &rl_unsignedp, &rl_reversep, &volatilep,
6445 &rl_mask, &rl_and_mask);
6446 rr_inner = decode_field_reference (loc, &rr_arg,
6447 &rr_bitsize, &rr_bitpos, &rr_mode,
6448 &rr_unsignedp, &rr_reversep, &volatilep,
6449 &rr_mask, &rr_and_mask);
6451 /* It must be true that the inner operation on the lhs of each
6452 comparison must be the same if we are to be able to do anything.
6453 Then see if we have constants. If not, the same must be true for
6454 the rhs's. */
6455 if (volatilep
6456 || ll_reversep != rl_reversep
6457 || ll_inner == 0 || rl_inner == 0
6458 || ! operand_equal_p (ll_inner, rl_inner, 0))
6459 return 0;
6461 if (TREE_CODE (lr_arg) == INTEGER_CST
6462 && TREE_CODE (rr_arg) == INTEGER_CST)
6464 l_const = lr_arg, r_const = rr_arg;
6465 lr_reversep = ll_reversep;
6467 else if (lr_reversep != rr_reversep
6468 || lr_inner == 0 || rr_inner == 0
6469 || ! operand_equal_p (lr_inner, rr_inner, 0))
6470 return 0;
6471 else
6472 l_const = r_const = 0;
6474 /* If either comparison code is not correct for our logical operation,
6475 fail. However, we can convert a one-bit comparison against zero into
6476 the opposite comparison against that bit being set in the field. */
6478 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6479 if (lcode != wanted_code)
6481 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6483 /* Make the left operand unsigned, since we are only interested
6484 in the value of one bit. Otherwise we are doing the wrong
6485 thing below. */
6486 ll_unsignedp = 1;
6487 l_const = ll_mask;
6489 else
6490 return 0;
6493 /* This is analogous to the code for l_const above. */
6494 if (rcode != wanted_code)
6496 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6498 rl_unsignedp = 1;
6499 r_const = rl_mask;
6501 else
6502 return 0;
6505 /* See if we can find a mode that contains both fields being compared on
6506 the left. If we can't, fail. Otherwise, update all constants and masks
6507 to be relative to a field of that size. */
6508 first_bit = MIN (ll_bitpos, rl_bitpos);
6509 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6510 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6511 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6512 volatilep, &lnmode))
6513 return 0;
6515 lnbitsize = GET_MODE_BITSIZE (lnmode);
6516 lnbitpos = first_bit & ~ (lnbitsize - 1);
6517 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6518 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6520 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6522 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6523 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6526 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6527 size_int (xll_bitpos));
6528 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6529 size_int (xrl_bitpos));
6530 if (ll_mask == NULL_TREE || rl_mask == NULL_TREE)
6531 return 0;
6533 if (l_const)
6535 l_const = fold_convert_loc (loc, lntype, l_const);
6536 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6537 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6538 if (l_const == NULL_TREE)
6539 return 0;
6540 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6541 fold_build1_loc (loc, BIT_NOT_EXPR,
6542 lntype, ll_mask))))
6544 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6546 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6549 if (r_const)
6551 r_const = fold_convert_loc (loc, lntype, r_const);
6552 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6553 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6554 if (r_const == NULL_TREE)
6555 return 0;
6556 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6557 fold_build1_loc (loc, BIT_NOT_EXPR,
6558 lntype, rl_mask))))
6560 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6562 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6566 /* If the right sides are not constant, do the same for it. Also,
6567 disallow this optimization if a size, signedness or storage order
6568 mismatch occurs between the left and right sides. */
6569 if (l_const == 0)
6571 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6572 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6573 || ll_reversep != lr_reversep
6574 /* Make sure the two fields on the right
6575 correspond to the left without being swapped. */
6576 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6577 return 0;
6579 first_bit = MIN (lr_bitpos, rr_bitpos);
6580 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6581 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6582 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6583 volatilep, &rnmode))
6584 return 0;
6586 rnbitsize = GET_MODE_BITSIZE (rnmode);
6587 rnbitpos = first_bit & ~ (rnbitsize - 1);
6588 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6589 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6591 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6593 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6594 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6597 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6598 rntype, lr_mask),
6599 size_int (xlr_bitpos));
6600 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6601 rntype, rr_mask),
6602 size_int (xrr_bitpos));
6603 if (lr_mask == NULL_TREE || rr_mask == NULL_TREE)
6604 return 0;
6606 /* Make a mask that corresponds to both fields being compared.
6607 Do this for both items being compared. If the operands are the
6608 same size and the bits being compared are in the same position
6609 then we can do this by masking both and comparing the masked
6610 results. */
6611 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6612 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6613 if (lnbitsize == rnbitsize
6614 && xll_bitpos == xlr_bitpos
6615 && lnbitpos >= 0
6616 && rnbitpos >= 0)
6618 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6619 lntype, lnbitsize, lnbitpos,
6620 ll_unsignedp || rl_unsignedp, ll_reversep);
6621 if (! all_ones_mask_p (ll_mask, lnbitsize))
6622 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6624 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6625 rntype, rnbitsize, rnbitpos,
6626 lr_unsignedp || rr_unsignedp, lr_reversep);
6627 if (! all_ones_mask_p (lr_mask, rnbitsize))
6628 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6630 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6633 /* There is still another way we can do something: If both pairs of
6634 fields being compared are adjacent, we may be able to make a wider
6635 field containing them both.
6637 Note that we still must mask the lhs/rhs expressions. Furthermore,
6638 the mask must be shifted to account for the shift done by
6639 make_bit_field_ref. */
6640 if (((ll_bitsize + ll_bitpos == rl_bitpos
6641 && lr_bitsize + lr_bitpos == rr_bitpos)
6642 || (ll_bitpos == rl_bitpos + rl_bitsize
6643 && lr_bitpos == rr_bitpos + rr_bitsize))
6644 && ll_bitpos >= 0
6645 && rl_bitpos >= 0
6646 && lr_bitpos >= 0
6647 && rr_bitpos >= 0)
6649 tree type;
6651 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6652 ll_bitsize + rl_bitsize,
6653 MIN (ll_bitpos, rl_bitpos),
6654 ll_unsignedp, ll_reversep);
6655 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6656 lr_bitsize + rr_bitsize,
6657 MIN (lr_bitpos, rr_bitpos),
6658 lr_unsignedp, lr_reversep);
6660 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6661 size_int (MIN (xll_bitpos, xrl_bitpos)));
6662 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6663 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6664 if (ll_mask == NULL_TREE || lr_mask == NULL_TREE)
6665 return 0;
6667 /* Convert to the smaller type before masking out unwanted bits. */
6668 type = lntype;
6669 if (lntype != rntype)
6671 if (lnbitsize > rnbitsize)
6673 lhs = fold_convert_loc (loc, rntype, lhs);
6674 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6675 type = rntype;
6677 else if (lnbitsize < rnbitsize)
6679 rhs = fold_convert_loc (loc, lntype, rhs);
6680 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6681 type = lntype;
6685 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6686 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6688 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6689 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6691 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6694 return 0;
6697 /* Handle the case of comparisons with constants. If there is something in
6698 common between the masks, those bits of the constants must be the same.
6699 If not, the condition is always false. Test for this to avoid generating
6700 incorrect code below. */
6701 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6702 if (! integer_zerop (result)
6703 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6704 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6706 if (wanted_code == NE_EXPR)
6708 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6709 return constant_boolean_node (true, truth_type);
6711 else
6713 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6714 return constant_boolean_node (false, truth_type);
6718 if (lnbitpos < 0)
6719 return 0;
6721 /* Construct the expression we will return. First get the component
6722 reference we will make. Unless the mask is all ones the width of
6723 that field, perform the mask operation. Then compare with the
6724 merged constant. */
6725 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6726 lntype, lnbitsize, lnbitpos,
6727 ll_unsignedp || rl_unsignedp, ll_reversep);
6729 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6730 if (! all_ones_mask_p (ll_mask, lnbitsize))
6731 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6733 return build2_loc (loc, wanted_code, truth_type, result,
6734 const_binop (BIT_IOR_EXPR, l_const, r_const));
6737 /* T is an integer expression that is being multiplied, divided, or taken a
6738 modulus (CODE says which and what kind of divide or modulus) by a
6739 constant C. See if we can eliminate that operation by folding it with
6740 other operations already in T. WIDE_TYPE, if non-null, is a type that
6741 should be used for the computation if wider than our type.
6743 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6744 (X * 2) + (Y * 4). We must, however, be assured that either the original
6745 expression would not overflow or that overflow is undefined for the type
6746 in the language in question.
6748 If we return a non-null expression, it is an equivalent form of the
6749 original computation, but need not be in the original type.
6751 We set *STRICT_OVERFLOW_P to true if the return values depends on
6752 signed overflow being undefined. Otherwise we do not change
6753 *STRICT_OVERFLOW_P. */
6755 static tree
6756 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6757 bool *strict_overflow_p)
6759 /* To avoid exponential search depth, refuse to allow recursion past
6760 three levels. Beyond that (1) it's highly unlikely that we'll find
6761 something interesting and (2) we've probably processed it before
6762 when we built the inner expression. */
6764 static int depth;
6765 tree ret;
6767 if (depth > 3)
6768 return NULL;
6770 depth++;
6771 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6772 depth--;
6774 return ret;
6777 static tree
6778 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6779 bool *strict_overflow_p)
6781 tree type = TREE_TYPE (t);
6782 enum tree_code tcode = TREE_CODE (t);
6783 tree ctype = (wide_type != 0
6784 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6785 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6786 ? wide_type : type);
6787 tree t1, t2;
6788 int same_p = tcode == code;
6789 tree op0 = NULL_TREE, op1 = NULL_TREE;
6790 bool sub_strict_overflow_p;
6792 /* Don't deal with constants of zero here; they confuse the code below. */
6793 if (integer_zerop (c))
6794 return NULL_TREE;
6796 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6797 op0 = TREE_OPERAND (t, 0);
6799 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6800 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6802 /* Note that we need not handle conditional operations here since fold
6803 already handles those cases. So just do arithmetic here. */
6804 switch (tcode)
6806 case INTEGER_CST:
6807 /* For a constant, we can always simplify if we are a multiply
6808 or (for divide and modulus) if it is a multiple of our constant. */
6809 if (code == MULT_EXPR
6810 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6811 TYPE_SIGN (type)))
6813 tree tem = const_binop (code, fold_convert (ctype, t),
6814 fold_convert (ctype, c));
6815 /* If the multiplication overflowed, we lost information on it.
6816 See PR68142 and PR69845. */
6817 if (TREE_OVERFLOW (tem))
6818 return NULL_TREE;
6819 return tem;
6821 break;
6823 CASE_CONVERT: case NON_LVALUE_EXPR:
6824 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0)))
6825 break;
6826 /* If op0 is an expression ... */
6827 if ((COMPARISON_CLASS_P (op0)
6828 || UNARY_CLASS_P (op0)
6829 || BINARY_CLASS_P (op0)
6830 || VL_EXP_CLASS_P (op0)
6831 || EXPRESSION_CLASS_P (op0))
6832 /* ... and has wrapping overflow, and its type is smaller
6833 than ctype, then we cannot pass through as widening. */
6834 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6835 && (TYPE_PRECISION (ctype)
6836 > TYPE_PRECISION (TREE_TYPE (op0))))
6837 /* ... or this is a truncation (t is narrower than op0),
6838 then we cannot pass through this narrowing. */
6839 || (TYPE_PRECISION (type)
6840 < TYPE_PRECISION (TREE_TYPE (op0)))
6841 /* ... or signedness changes for division or modulus,
6842 then we cannot pass through this conversion. */
6843 || (code != MULT_EXPR
6844 && (TYPE_UNSIGNED (ctype)
6845 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6846 /* ... or has undefined overflow while the converted to
6847 type has not, we cannot do the operation in the inner type
6848 as that would introduce undefined overflow. */
6849 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6850 && !TYPE_OVERFLOW_UNDEFINED (type))))
6851 break;
6853 /* Pass the constant down and see if we can make a simplification. If
6854 we can, replace this expression with the inner simplification for
6855 possible later conversion to our or some other type. */
6856 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6857 && TREE_CODE (t2) == INTEGER_CST
6858 && !TREE_OVERFLOW (t2)
6859 && (t1 = extract_muldiv (op0, t2, code,
6860 code == MULT_EXPR ? ctype : NULL_TREE,
6861 strict_overflow_p)) != 0)
6862 return t1;
6863 break;
6865 case ABS_EXPR:
6866 /* If widening the type changes it from signed to unsigned, then we
6867 must avoid building ABS_EXPR itself as unsigned. */
6868 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6870 tree cstype = (*signed_type_for) (ctype);
6871 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6872 != 0)
6874 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6875 return fold_convert (ctype, t1);
6877 break;
6879 /* If the constant is negative, we cannot simplify this. */
6880 if (tree_int_cst_sgn (c) == -1)
6881 break;
6882 /* FALLTHROUGH */
6883 case NEGATE_EXPR:
6884 /* For division and modulus, type can't be unsigned, as e.g.
6885 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6886 For signed types, even with wrapping overflow, this is fine. */
6887 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6888 break;
6889 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6890 != 0)
6891 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6892 break;
6894 case MIN_EXPR: case MAX_EXPR:
6895 /* If widening the type changes the signedness, then we can't perform
6896 this optimization as that changes the result. */
6897 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6898 break;
6900 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6901 sub_strict_overflow_p = false;
6902 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6903 &sub_strict_overflow_p)) != 0
6904 && (t2 = extract_muldiv (op1, c, code, wide_type,
6905 &sub_strict_overflow_p)) != 0)
6907 if (tree_int_cst_sgn (c) < 0)
6908 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6909 if (sub_strict_overflow_p)
6910 *strict_overflow_p = true;
6911 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6912 fold_convert (ctype, t2));
6914 break;
6916 case LSHIFT_EXPR: case RSHIFT_EXPR:
6917 /* If the second operand is constant, this is a multiplication
6918 or floor division, by a power of two, so we can treat it that
6919 way unless the multiplier or divisor overflows. Signed
6920 left-shift overflow is implementation-defined rather than
6921 undefined in C90, so do not convert signed left shift into
6922 multiplication. */
6923 if (TREE_CODE (op1) == INTEGER_CST
6924 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6925 /* const_binop may not detect overflow correctly,
6926 so check for it explicitly here. */
6927 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6928 wi::to_wide (op1))
6929 && (t1 = fold_convert (ctype,
6930 const_binop (LSHIFT_EXPR, size_one_node,
6931 op1))) != 0
6932 && !TREE_OVERFLOW (t1))
6933 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6934 ? MULT_EXPR : FLOOR_DIV_EXPR,
6935 ctype,
6936 fold_convert (ctype, op0),
6937 t1),
6938 c, code, wide_type, strict_overflow_p);
6939 break;
6941 case PLUS_EXPR: case MINUS_EXPR:
6942 /* See if we can eliminate the operation on both sides. If we can, we
6943 can return a new PLUS or MINUS. If we can't, the only remaining
6944 cases where we can do anything are if the second operand is a
6945 constant. */
6946 sub_strict_overflow_p = false;
6947 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6948 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6949 if (t1 != 0 && t2 != 0
6950 && TYPE_OVERFLOW_WRAPS (ctype)
6951 && (code == MULT_EXPR
6952 /* If not multiplication, we can only do this if both operands
6953 are divisible by c. */
6954 || (multiple_of_p (ctype, op0, c)
6955 && multiple_of_p (ctype, op1, c))))
6957 if (sub_strict_overflow_p)
6958 *strict_overflow_p = true;
6959 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6960 fold_convert (ctype, t2));
6963 /* If this was a subtraction, negate OP1 and set it to be an addition.
6964 This simplifies the logic below. */
6965 if (tcode == MINUS_EXPR)
6967 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6968 /* If OP1 was not easily negatable, the constant may be OP0. */
6969 if (TREE_CODE (op0) == INTEGER_CST)
6971 std::swap (op0, op1);
6972 std::swap (t1, t2);
6976 if (TREE_CODE (op1) != INTEGER_CST)
6977 break;
6979 /* If either OP1 or C are negative, this optimization is not safe for
6980 some of the division and remainder types while for others we need
6981 to change the code. */
6982 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6984 if (code == CEIL_DIV_EXPR)
6985 code = FLOOR_DIV_EXPR;
6986 else if (code == FLOOR_DIV_EXPR)
6987 code = CEIL_DIV_EXPR;
6988 else if (code != MULT_EXPR
6989 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6990 break;
6993 /* If it's a multiply or a division/modulus operation of a multiple
6994 of our constant, do the operation and verify it doesn't overflow. */
6995 if (code == MULT_EXPR
6996 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6997 TYPE_SIGN (type)))
6999 op1 = const_binop (code, fold_convert (ctype, op1),
7000 fold_convert (ctype, c));
7001 /* We allow the constant to overflow with wrapping semantics. */
7002 if (op1 == 0
7003 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
7004 break;
7006 else
7007 break;
7009 /* If we have an unsigned type, we cannot widen the operation since it
7010 will change the result if the original computation overflowed. */
7011 if (TYPE_UNSIGNED (ctype) && ctype != type)
7012 break;
7014 /* The last case is if we are a multiply. In that case, we can
7015 apply the distributive law to commute the multiply and addition
7016 if the multiplication of the constants doesn't overflow
7017 and overflow is defined. With undefined overflow
7018 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
7019 But fold_plusminus_mult_expr would factor back any power-of-two
7020 value so do not distribute in the first place in this case. */
7021 if (code == MULT_EXPR
7022 && TYPE_OVERFLOW_WRAPS (ctype)
7023 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
7024 return fold_build2 (tcode, ctype,
7025 fold_build2 (code, ctype,
7026 fold_convert (ctype, op0),
7027 fold_convert (ctype, c)),
7028 op1);
7030 break;
7032 case MULT_EXPR:
7033 /* We have a special case here if we are doing something like
7034 (C * 8) % 4 since we know that's zero. */
7035 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
7036 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
7037 /* If the multiplication can overflow we cannot optimize this. */
7038 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
7039 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
7040 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7041 TYPE_SIGN (type)))
7043 *strict_overflow_p = true;
7044 return omit_one_operand (type, integer_zero_node, op0);
7047 /* ... fall through ... */
7049 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
7050 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
7051 /* If we can extract our operation from the LHS, do so and return a
7052 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
7053 do something only if the second operand is a constant. */
7054 if (same_p
7055 && TYPE_OVERFLOW_WRAPS (ctype)
7056 && (t1 = extract_muldiv (op0, c, code, wide_type,
7057 strict_overflow_p)) != 0)
7058 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7059 fold_convert (ctype, op1));
7060 else if (tcode == MULT_EXPR && code == MULT_EXPR
7061 && TYPE_OVERFLOW_WRAPS (ctype)
7062 && (t1 = extract_muldiv (op1, c, code, wide_type,
7063 strict_overflow_p)) != 0)
7064 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7065 fold_convert (ctype, t1));
7066 else if (TREE_CODE (op1) != INTEGER_CST)
7067 return 0;
7069 /* If these are the same operation types, we can associate them
7070 assuming no overflow. */
7071 if (tcode == code)
7073 bool overflow_p = false;
7074 wi::overflow_type overflow_mul;
7075 signop sign = TYPE_SIGN (ctype);
7076 unsigned prec = TYPE_PRECISION (ctype);
7077 wide_int mul = wi::mul (wi::to_wide (op1, prec),
7078 wi::to_wide (c, prec),
7079 sign, &overflow_mul);
7080 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
7081 if (overflow_mul
7082 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
7083 overflow_p = true;
7084 if (!overflow_p)
7085 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7086 wide_int_to_tree (ctype, mul));
7089 /* If these operations "cancel" each other, we have the main
7090 optimizations of this pass, which occur when either constant is a
7091 multiple of the other, in which case we replace this with either an
7092 operation or CODE or TCODE.
7094 If we have an unsigned type, we cannot do this since it will change
7095 the result if the original computation overflowed. */
7096 if (TYPE_OVERFLOW_UNDEFINED (ctype)
7097 && !TYPE_OVERFLOW_SANITIZED (ctype)
7098 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
7099 || (tcode == MULT_EXPR
7100 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
7101 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
7102 && code != MULT_EXPR)))
7104 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7105 TYPE_SIGN (type)))
7107 *strict_overflow_p = true;
7108 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7109 fold_convert (ctype,
7110 const_binop (TRUNC_DIV_EXPR,
7111 op1, c)));
7113 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
7114 TYPE_SIGN (type)))
7116 *strict_overflow_p = true;
7117 return fold_build2 (code, ctype, fold_convert (ctype, op0),
7118 fold_convert (ctype,
7119 const_binop (TRUNC_DIV_EXPR,
7120 c, op1)));
7123 break;
7125 default:
7126 break;
7129 return 0;
7132 /* Return a node which has the indicated constant VALUE (either 0 or
7133 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7134 and is of the indicated TYPE. */
7136 tree
7137 constant_boolean_node (bool value, tree type)
7139 if (type == integer_type_node)
7140 return value ? integer_one_node : integer_zero_node;
7141 else if (type == boolean_type_node)
7142 return value ? boolean_true_node : boolean_false_node;
7143 else if (TREE_CODE (type) == VECTOR_TYPE)
7144 return build_vector_from_val (type,
7145 build_int_cst (TREE_TYPE (type),
7146 value ? -1 : 0));
7147 else
7148 return fold_convert (type, value ? integer_one_node : integer_zero_node);
7152 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7153 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7154 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7155 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7156 COND is the first argument to CODE; otherwise (as in the example
7157 given here), it is the second argument. TYPE is the type of the
7158 original expression. Return NULL_TREE if no simplification is
7159 possible. */
7161 static tree
7162 fold_binary_op_with_conditional_arg (location_t loc,
7163 enum tree_code code,
7164 tree type, tree op0, tree op1,
7165 tree cond, tree arg, int cond_first_p)
7167 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7168 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7169 tree test, true_value, false_value;
7170 tree lhs = NULL_TREE;
7171 tree rhs = NULL_TREE;
7172 enum tree_code cond_code = COND_EXPR;
7174 /* Do not move possibly trapping operations into the conditional as this
7175 pessimizes code and causes gimplification issues when applied late. */
7176 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7177 ANY_INTEGRAL_TYPE_P (type)
7178 && TYPE_OVERFLOW_TRAPS (type), op1))
7179 return NULL_TREE;
7181 if (TREE_CODE (cond) == COND_EXPR
7182 || TREE_CODE (cond) == VEC_COND_EXPR)
7184 test = TREE_OPERAND (cond, 0);
7185 true_value = TREE_OPERAND (cond, 1);
7186 false_value = TREE_OPERAND (cond, 2);
7187 /* If this operand throws an expression, then it does not make
7188 sense to try to perform a logical or arithmetic operation
7189 involving it. */
7190 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7191 lhs = true_value;
7192 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7193 rhs = false_value;
7195 else if (!(TREE_CODE (type) != VECTOR_TYPE
7196 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
7198 tree testtype = TREE_TYPE (cond);
7199 test = cond;
7200 true_value = constant_boolean_node (true, testtype);
7201 false_value = constant_boolean_node (false, testtype);
7203 else
7204 /* Detect the case of mixing vector and scalar types - bail out. */
7205 return NULL_TREE;
7207 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
7208 cond_code = VEC_COND_EXPR;
7210 /* This transformation is only worthwhile if we don't have to wrap ARG
7211 in a SAVE_EXPR and the operation can be simplified without recursing
7212 on at least one of the branches once its pushed inside the COND_EXPR. */
7213 if (!TREE_CONSTANT (arg)
7214 && (TREE_SIDE_EFFECTS (arg)
7215 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7216 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7217 return NULL_TREE;
7219 arg = fold_convert_loc (loc, arg_type, arg);
7220 if (lhs == 0)
7222 true_value = fold_convert_loc (loc, cond_type, true_value);
7223 if (cond_first_p)
7224 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7225 else
7226 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7228 if (rhs == 0)
7230 false_value = fold_convert_loc (loc, cond_type, false_value);
7231 if (cond_first_p)
7232 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7233 else
7234 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7237 /* Check that we have simplified at least one of the branches. */
7238 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7239 return NULL_TREE;
7241 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7245 /* Subroutine of fold() that checks for the addition of ARG +/- 0.0.
7247 If !NEGATE, return true if ZERO_ARG is +/-0.0 and, for all ARG of
7248 type TYPE, ARG + ZERO_ARG is the same as ARG. If NEGATE, return true
7249 if ARG - ZERO_ARG is the same as X.
7251 If ARG is NULL, check for any value of type TYPE.
7253 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7254 and finite. The problematic cases are when X is zero, and its mode
7255 has signed zeros. In the case of rounding towards -infinity,
7256 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7257 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7259 bool
7260 fold_real_zero_addition_p (const_tree type, const_tree arg,
7261 const_tree zero_arg, int negate)
7263 if (!real_zerop (zero_arg))
7264 return false;
7266 /* Don't allow the fold with -fsignaling-nans. */
7267 if (arg ? tree_expr_maybe_signaling_nan_p (arg) : HONOR_SNANS (type))
7268 return false;
7270 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7271 if (!HONOR_SIGNED_ZEROS (type))
7272 return true;
7274 /* There is no case that is safe for all rounding modes. */
7275 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7276 return false;
7278 /* In a vector or complex, we would need to check the sign of all zeros. */
7279 if (TREE_CODE (zero_arg) == VECTOR_CST)
7280 zero_arg = uniform_vector_p (zero_arg);
7281 if (!zero_arg || TREE_CODE (zero_arg) != REAL_CST)
7282 return false;
7284 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7285 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (zero_arg)))
7286 negate = !negate;
7288 /* The mode has signed zeros, and we have to honor their sign.
7289 In this situation, there are only two cases we can return true for.
7290 (i) X - 0 is the same as X with default rounding.
7291 (ii) X + 0 is X when X can't possibly be -0.0. */
7292 return negate || (arg && !tree_expr_maybe_real_minus_zero_p (arg));
7295 /* Subroutine of match.pd that optimizes comparisons of a division by
7296 a nonzero integer constant against an integer constant, i.e.
7297 X/C1 op C2.
7299 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7300 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7302 enum tree_code
7303 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7304 tree *hi, bool *neg_overflow)
7306 tree prod, tmp, type = TREE_TYPE (c1);
7307 signop sign = TYPE_SIGN (type);
7308 wi::overflow_type overflow;
7310 /* We have to do this the hard way to detect unsigned overflow.
7311 prod = int_const_binop (MULT_EXPR, c1, c2); */
7312 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7313 prod = force_fit_type (type, val, -1, overflow);
7314 *neg_overflow = false;
7316 if (sign == UNSIGNED)
7318 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7319 *lo = prod;
7321 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7322 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7323 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7325 else if (tree_int_cst_sgn (c1) >= 0)
7327 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7328 switch (tree_int_cst_sgn (c2))
7330 case -1:
7331 *neg_overflow = true;
7332 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7333 *hi = prod;
7334 break;
7336 case 0:
7337 *lo = fold_negate_const (tmp, type);
7338 *hi = tmp;
7339 break;
7341 case 1:
7342 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7343 *lo = prod;
7344 break;
7346 default:
7347 gcc_unreachable ();
7350 else
7352 /* A negative divisor reverses the relational operators. */
7353 code = swap_tree_comparison (code);
7355 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7356 switch (tree_int_cst_sgn (c2))
7358 case -1:
7359 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7360 *lo = prod;
7361 break;
7363 case 0:
7364 *hi = fold_negate_const (tmp, type);
7365 *lo = tmp;
7366 break;
7368 case 1:
7369 *neg_overflow = true;
7370 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7371 *hi = prod;
7372 break;
7374 default:
7375 gcc_unreachable ();
7379 if (code != EQ_EXPR && code != NE_EXPR)
7380 return code;
7382 if (TREE_OVERFLOW (*lo)
7383 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7384 *lo = NULL_TREE;
7385 if (TREE_OVERFLOW (*hi)
7386 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7387 *hi = NULL_TREE;
7389 return code;
7393 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7394 equality/inequality test, then return a simplified form of the test
7395 using a sign testing. Otherwise return NULL. TYPE is the desired
7396 result type. */
7398 static tree
7399 fold_single_bit_test_into_sign_test (location_t loc,
7400 enum tree_code code, tree arg0, tree arg1,
7401 tree result_type)
7403 /* If this is testing a single bit, we can optimize the test. */
7404 if ((code == NE_EXPR || code == EQ_EXPR)
7405 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7406 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7408 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7409 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7410 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7412 if (arg00 != NULL_TREE
7413 /* This is only a win if casting to a signed type is cheap,
7414 i.e. when arg00's type is not a partial mode. */
7415 && type_has_mode_precision_p (TREE_TYPE (arg00)))
7417 tree stype = signed_type_for (TREE_TYPE (arg00));
7418 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7419 result_type,
7420 fold_convert_loc (loc, stype, arg00),
7421 build_int_cst (stype, 0));
7425 return NULL_TREE;
7428 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7429 equality/inequality test, then return a simplified form of
7430 the test using shifts and logical operations. Otherwise return
7431 NULL. TYPE is the desired result type. */
7433 tree
7434 fold_single_bit_test (location_t loc, enum tree_code code,
7435 tree arg0, tree arg1, tree result_type)
7437 /* If this is testing a single bit, we can optimize the test. */
7438 if ((code == NE_EXPR || code == EQ_EXPR)
7439 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7440 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7442 tree inner = TREE_OPERAND (arg0, 0);
7443 tree type = TREE_TYPE (arg0);
7444 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7445 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
7446 int ops_unsigned;
7447 tree signed_type, unsigned_type, intermediate_type;
7448 tree tem, one;
7450 /* First, see if we can fold the single bit test into a sign-bit
7451 test. */
7452 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7453 result_type);
7454 if (tem)
7455 return tem;
7457 /* Otherwise we have (A & C) != 0 where C is a single bit,
7458 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7459 Similarly for (A & C) == 0. */
7461 /* If INNER is a right shift of a constant and it plus BITNUM does
7462 not overflow, adjust BITNUM and INNER. */
7463 if (TREE_CODE (inner) == RSHIFT_EXPR
7464 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7465 && bitnum < TYPE_PRECISION (type)
7466 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
7467 TYPE_PRECISION (type) - bitnum))
7469 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
7470 inner = TREE_OPERAND (inner, 0);
7473 /* If we are going to be able to omit the AND below, we must do our
7474 operations as unsigned. If we must use the AND, we have a choice.
7475 Normally unsigned is faster, but for some machines signed is. */
7476 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
7477 && !flag_syntax_only) ? 0 : 1;
7479 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7480 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7481 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7482 inner = fold_convert_loc (loc, intermediate_type, inner);
7484 if (bitnum != 0)
7485 inner = build2 (RSHIFT_EXPR, intermediate_type,
7486 inner, size_int (bitnum));
7488 one = build_int_cst (intermediate_type, 1);
7490 if (code == EQ_EXPR)
7491 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7493 /* Put the AND last so it can combine with more things. */
7494 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7496 /* Make sure to return the proper type. */
7497 inner = fold_convert_loc (loc, result_type, inner);
7499 return inner;
7501 return NULL_TREE;
7504 /* Test whether it is preferable to swap two operands, ARG0 and
7505 ARG1, for example because ARG0 is an integer constant and ARG1
7506 isn't. */
7508 bool
7509 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7511 if (CONSTANT_CLASS_P (arg1))
7512 return 0;
7513 if (CONSTANT_CLASS_P (arg0))
7514 return 1;
7516 STRIP_NOPS (arg0);
7517 STRIP_NOPS (arg1);
7519 if (TREE_CONSTANT (arg1))
7520 return 0;
7521 if (TREE_CONSTANT (arg0))
7522 return 1;
7524 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7525 for commutative and comparison operators. Ensuring a canonical
7526 form allows the optimizers to find additional redundancies without
7527 having to explicitly check for both orderings. */
7528 if (TREE_CODE (arg0) == SSA_NAME
7529 && TREE_CODE (arg1) == SSA_NAME
7530 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7531 return 1;
7533 /* Put SSA_NAMEs last. */
7534 if (TREE_CODE (arg1) == SSA_NAME)
7535 return 0;
7536 if (TREE_CODE (arg0) == SSA_NAME)
7537 return 1;
7539 /* Put variables last. */
7540 if (DECL_P (arg1))
7541 return 0;
7542 if (DECL_P (arg0))
7543 return 1;
7545 return 0;
7549 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7550 means A >= Y && A != MAX, but in this case we know that
7551 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7553 static tree
7554 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7556 tree a, typea, type = TREE_TYPE (bound), a1, diff, y;
7558 if (TREE_CODE (bound) == LT_EXPR)
7559 a = TREE_OPERAND (bound, 0);
7560 else if (TREE_CODE (bound) == GT_EXPR)
7561 a = TREE_OPERAND (bound, 1);
7562 else
7563 return NULL_TREE;
7565 typea = TREE_TYPE (a);
7566 if (!INTEGRAL_TYPE_P (typea)
7567 && !POINTER_TYPE_P (typea))
7568 return NULL_TREE;
7570 if (TREE_CODE (ineq) == LT_EXPR)
7572 a1 = TREE_OPERAND (ineq, 1);
7573 y = TREE_OPERAND (ineq, 0);
7575 else if (TREE_CODE (ineq) == GT_EXPR)
7577 a1 = TREE_OPERAND (ineq, 0);
7578 y = TREE_OPERAND (ineq, 1);
7580 else
7581 return NULL_TREE;
7583 if (TREE_TYPE (a1) != typea)
7584 return NULL_TREE;
7586 if (POINTER_TYPE_P (typea))
7588 /* Convert the pointer types into integer before taking the difference. */
7589 tree ta = fold_convert_loc (loc, ssizetype, a);
7590 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7591 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7593 else
7594 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7596 if (!diff || !integer_onep (diff))
7597 return NULL_TREE;
7599 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7602 /* Fold a sum or difference of at least one multiplication.
7603 Returns the folded tree or NULL if no simplification could be made. */
7605 static tree
7606 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7607 tree arg0, tree arg1)
7609 tree arg00, arg01, arg10, arg11;
7610 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7612 /* (A * C) +- (B * C) -> (A+-B) * C.
7613 (A * C) +- A -> A * (C+-1).
7614 We are most concerned about the case where C is a constant,
7615 but other combinations show up during loop reduction. Since
7616 it is not difficult, try all four possibilities. */
7618 if (TREE_CODE (arg0) == MULT_EXPR)
7620 arg00 = TREE_OPERAND (arg0, 0);
7621 arg01 = TREE_OPERAND (arg0, 1);
7623 else if (TREE_CODE (arg0) == INTEGER_CST)
7625 arg00 = build_one_cst (type);
7626 arg01 = arg0;
7628 else
7630 /* We cannot generate constant 1 for fract. */
7631 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7632 return NULL_TREE;
7633 arg00 = arg0;
7634 arg01 = build_one_cst (type);
7636 if (TREE_CODE (arg1) == MULT_EXPR)
7638 arg10 = TREE_OPERAND (arg1, 0);
7639 arg11 = TREE_OPERAND (arg1, 1);
7641 else if (TREE_CODE (arg1) == INTEGER_CST)
7643 arg10 = build_one_cst (type);
7644 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7645 the purpose of this canonicalization. */
7646 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7647 && negate_expr_p (arg1)
7648 && code == PLUS_EXPR)
7650 arg11 = negate_expr (arg1);
7651 code = MINUS_EXPR;
7653 else
7654 arg11 = arg1;
7656 else
7658 /* We cannot generate constant 1 for fract. */
7659 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7660 return NULL_TREE;
7661 arg10 = arg1;
7662 arg11 = build_one_cst (type);
7664 same = NULL_TREE;
7666 /* Prefer factoring a common non-constant. */
7667 if (operand_equal_p (arg00, arg10, 0))
7668 same = arg00, alt0 = arg01, alt1 = arg11;
7669 else if (operand_equal_p (arg01, arg11, 0))
7670 same = arg01, alt0 = arg00, alt1 = arg10;
7671 else if (operand_equal_p (arg00, arg11, 0))
7672 same = arg00, alt0 = arg01, alt1 = arg10;
7673 else if (operand_equal_p (arg01, arg10, 0))
7674 same = arg01, alt0 = arg00, alt1 = arg11;
7676 /* No identical multiplicands; see if we can find a common
7677 power-of-two factor in non-power-of-two multiplies. This
7678 can help in multi-dimensional array access. */
7679 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7681 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7682 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7683 HOST_WIDE_INT tmp;
7684 bool swap = false;
7685 tree maybe_same;
7687 /* Move min of absolute values to int11. */
7688 if (absu_hwi (int01) < absu_hwi (int11))
7690 tmp = int01, int01 = int11, int11 = tmp;
7691 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7692 maybe_same = arg01;
7693 swap = true;
7695 else
7696 maybe_same = arg11;
7698 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7699 if (factor > 1
7700 && pow2p_hwi (factor)
7701 && (int01 & (factor - 1)) == 0
7702 /* The remainder should not be a constant, otherwise we
7703 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7704 increased the number of multiplications necessary. */
7705 && TREE_CODE (arg10) != INTEGER_CST)
7707 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7708 build_int_cst (TREE_TYPE (arg00),
7709 int01 / int11));
7710 alt1 = arg10;
7711 same = maybe_same;
7712 if (swap)
7713 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7717 if (!same)
7718 return NULL_TREE;
7720 if (! ANY_INTEGRAL_TYPE_P (type)
7721 || TYPE_OVERFLOW_WRAPS (type)
7722 /* We are neither factoring zero nor minus one. */
7723 || TREE_CODE (same) == INTEGER_CST)
7724 return fold_build2_loc (loc, MULT_EXPR, type,
7725 fold_build2_loc (loc, code, type,
7726 fold_convert_loc (loc, type, alt0),
7727 fold_convert_loc (loc, type, alt1)),
7728 fold_convert_loc (loc, type, same));
7730 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7731 same may be minus one and thus the multiplication may overflow. Perform
7732 the sum operation in an unsigned type. */
7733 tree utype = unsigned_type_for (type);
7734 tree tem = fold_build2_loc (loc, code, utype,
7735 fold_convert_loc (loc, utype, alt0),
7736 fold_convert_loc (loc, utype, alt1));
7737 /* If the sum evaluated to a constant that is not -INF the multiplication
7738 cannot overflow. */
7739 if (TREE_CODE (tem) == INTEGER_CST
7740 && (wi::to_wide (tem)
7741 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7742 return fold_build2_loc (loc, MULT_EXPR, type,
7743 fold_convert (type, tem), same);
7745 /* Do not resort to unsigned multiplication because
7746 we lose the no-overflow property of the expression. */
7747 return NULL_TREE;
7750 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7751 specified by EXPR into the buffer PTR of length LEN bytes.
7752 Return the number of bytes placed in the buffer, or zero
7753 upon failure. */
7755 static int
7756 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7758 tree type = TREE_TYPE (expr);
7759 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7760 int byte, offset, word, words;
7761 unsigned char value;
7763 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7764 return 0;
7765 if (off == -1)
7766 off = 0;
7768 if (ptr == NULL)
7769 /* Dry run. */
7770 return MIN (len, total_bytes - off);
7772 words = total_bytes / UNITS_PER_WORD;
7774 for (byte = 0; byte < total_bytes; byte++)
7776 int bitpos = byte * BITS_PER_UNIT;
7777 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7778 number of bytes. */
7779 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7781 if (total_bytes > UNITS_PER_WORD)
7783 word = byte / UNITS_PER_WORD;
7784 if (WORDS_BIG_ENDIAN)
7785 word = (words - 1) - word;
7786 offset = word * UNITS_PER_WORD;
7787 if (BYTES_BIG_ENDIAN)
7788 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7789 else
7790 offset += byte % UNITS_PER_WORD;
7792 else
7793 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7794 if (offset >= off && offset - off < len)
7795 ptr[offset - off] = value;
7797 return MIN (len, total_bytes - off);
7801 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7802 specified by EXPR into the buffer PTR of length LEN bytes.
7803 Return the number of bytes placed in the buffer, or zero
7804 upon failure. */
7806 static int
7807 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7809 tree type = TREE_TYPE (expr);
7810 scalar_mode mode = SCALAR_TYPE_MODE (type);
7811 int total_bytes = GET_MODE_SIZE (mode);
7812 FIXED_VALUE_TYPE value;
7813 tree i_value, i_type;
7815 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7816 return 0;
7818 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7820 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7821 return 0;
7823 value = TREE_FIXED_CST (expr);
7824 i_value = double_int_to_tree (i_type, value.data);
7826 return native_encode_int (i_value, ptr, len, off);
7830 /* Subroutine of native_encode_expr. Encode the REAL_CST
7831 specified by EXPR into the buffer PTR of length LEN bytes.
7832 Return the number of bytes placed in the buffer, or zero
7833 upon failure. */
7835 static int
7836 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7838 tree type = TREE_TYPE (expr);
7839 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7840 int byte, offset, word, words, bitpos;
7841 unsigned char value;
7843 /* There are always 32 bits in each long, no matter the size of
7844 the hosts long. We handle floating point representations with
7845 up to 192 bits. */
7846 long tmp[6];
7848 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7849 return 0;
7850 if (off == -1)
7851 off = 0;
7853 if (ptr == NULL)
7854 /* Dry run. */
7855 return MIN (len, total_bytes - off);
7857 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7859 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7861 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7862 bitpos += BITS_PER_UNIT)
7864 byte = (bitpos / BITS_PER_UNIT) & 3;
7865 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7867 if (UNITS_PER_WORD < 4)
7869 word = byte / UNITS_PER_WORD;
7870 if (WORDS_BIG_ENDIAN)
7871 word = (words - 1) - word;
7872 offset = word * UNITS_PER_WORD;
7873 if (BYTES_BIG_ENDIAN)
7874 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7875 else
7876 offset += byte % UNITS_PER_WORD;
7878 else
7880 offset = byte;
7881 if (BYTES_BIG_ENDIAN)
7883 /* Reverse bytes within each long, or within the entire float
7884 if it's smaller than a long (for HFmode). */
7885 offset = MIN (3, total_bytes - 1) - offset;
7886 gcc_assert (offset >= 0);
7889 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7890 if (offset >= off
7891 && offset - off < len)
7892 ptr[offset - off] = value;
7894 return MIN (len, total_bytes - off);
7897 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7898 specified by EXPR into the buffer PTR of length LEN bytes.
7899 Return the number of bytes placed in the buffer, or zero
7900 upon failure. */
7902 static int
7903 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7905 int rsize, isize;
7906 tree part;
7908 part = TREE_REALPART (expr);
7909 rsize = native_encode_expr (part, ptr, len, off);
7910 if (off == -1 && rsize == 0)
7911 return 0;
7912 part = TREE_IMAGPART (expr);
7913 if (off != -1)
7914 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7915 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7916 len - rsize, off);
7917 if (off == -1 && isize != rsize)
7918 return 0;
7919 return rsize + isize;
7922 /* Like native_encode_vector, but only encode the first COUNT elements.
7923 The other arguments are as for native_encode_vector. */
7925 static int
7926 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7927 int off, unsigned HOST_WIDE_INT count)
7929 tree itype = TREE_TYPE (TREE_TYPE (expr));
7930 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7931 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7933 /* This is the only case in which elements can be smaller than a byte.
7934 Element 0 is always in the lsb of the containing byte. */
7935 unsigned int elt_bits = TYPE_PRECISION (itype);
7936 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7937 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7938 return 0;
7940 if (off == -1)
7941 off = 0;
7943 /* Zero the buffer and then set bits later where necessary. */
7944 int extract_bytes = MIN (len, total_bytes - off);
7945 if (ptr)
7946 memset (ptr, 0, extract_bytes);
7948 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7949 unsigned int first_elt = off * elts_per_byte;
7950 unsigned int extract_elts = extract_bytes * elts_per_byte;
7951 for (unsigned int i = 0; i < extract_elts; ++i)
7953 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7954 if (TREE_CODE (elt) != INTEGER_CST)
7955 return 0;
7957 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7959 unsigned int bit = i * elt_bits;
7960 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7963 return extract_bytes;
7966 int offset = 0;
7967 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7968 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7970 if (off >= size)
7972 off -= size;
7973 continue;
7975 tree elem = VECTOR_CST_ELT (expr, i);
7976 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7977 len - offset, off);
7978 if ((off == -1 && res != size) || res == 0)
7979 return 0;
7980 offset += res;
7981 if (offset >= len)
7982 return (off == -1 && i < count - 1) ? 0 : offset;
7983 if (off != -1)
7984 off = 0;
7986 return offset;
7989 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7990 specified by EXPR into the buffer PTR of length LEN bytes.
7991 Return the number of bytes placed in the buffer, or zero
7992 upon failure. */
7994 static int
7995 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7997 unsigned HOST_WIDE_INT count;
7998 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7999 return 0;
8000 return native_encode_vector_part (expr, ptr, len, off, count);
8004 /* Subroutine of native_encode_expr. Encode the STRING_CST
8005 specified by EXPR into the buffer PTR of length LEN bytes.
8006 Return the number of bytes placed in the buffer, or zero
8007 upon failure. */
8009 static int
8010 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
8012 tree type = TREE_TYPE (expr);
8014 /* Wide-char strings are encoded in target byte-order so native
8015 encoding them is trivial. */
8016 if (BITS_PER_UNIT != CHAR_BIT
8017 || TREE_CODE (type) != ARRAY_TYPE
8018 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
8019 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
8020 return 0;
8022 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
8023 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8024 return 0;
8025 if (off == -1)
8026 off = 0;
8027 len = MIN (total_bytes - off, len);
8028 if (ptr == NULL)
8029 /* Dry run. */;
8030 else
8032 int written = 0;
8033 if (off < TREE_STRING_LENGTH (expr))
8035 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
8036 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
8038 memset (ptr + written, 0, len - written);
8040 return len;
8044 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
8045 FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
8046 the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
8047 anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
8048 sufficient to encode the entire EXPR, or if OFF is out of bounds.
8049 Otherwise, start at byte offset OFF and encode at most LEN bytes.
8050 Return the number of bytes placed in the buffer, or zero upon failure. */
8053 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
8055 /* We don't support starting at negative offset and -1 is special. */
8056 if (off < -1)
8057 return 0;
8059 switch (TREE_CODE (expr))
8061 case INTEGER_CST:
8062 return native_encode_int (expr, ptr, len, off);
8064 case REAL_CST:
8065 return native_encode_real (expr, ptr, len, off);
8067 case FIXED_CST:
8068 return native_encode_fixed (expr, ptr, len, off);
8070 case COMPLEX_CST:
8071 return native_encode_complex (expr, ptr, len, off);
8073 case VECTOR_CST:
8074 return native_encode_vector (expr, ptr, len, off);
8076 case STRING_CST:
8077 return native_encode_string (expr, ptr, len, off);
8079 default:
8080 return 0;
8084 /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
8085 or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
8086 of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
8087 machine modes, we can't just use build_nonstandard_integer_type. */
8089 tree
8090 find_bitfield_repr_type (int fieldsize, int len)
8092 machine_mode mode;
8093 for (int pass = 0; pass < 2; pass++)
8095 enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
8096 FOR_EACH_MODE_IN_CLASS (mode, mclass)
8097 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8098 && known_eq (GET_MODE_PRECISION (mode),
8099 GET_MODE_BITSIZE (mode))
8100 && known_le (GET_MODE_SIZE (mode), len))
8102 tree ret = lang_hooks.types.type_for_mode (mode, 1);
8103 if (ret && TYPE_MODE (ret) == mode)
8104 return ret;
8108 for (int i = 0; i < NUM_INT_N_ENTS; i ++)
8109 if (int_n_enabled_p[i]
8110 && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
8111 && int_n_trees[i].unsigned_type)
8113 tree ret = int_n_trees[i].unsigned_type;
8114 mode = TYPE_MODE (ret);
8115 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8116 && known_eq (GET_MODE_PRECISION (mode),
8117 GET_MODE_BITSIZE (mode))
8118 && known_le (GET_MODE_SIZE (mode), len))
8119 return ret;
8122 return NULL_TREE;
8125 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8126 NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
8127 to be non-NULL and OFF zero), then in addition to filling the
8128 bytes pointed by PTR with the value also clear any bits pointed
8129 by MASK that are known to be initialized, keep them as is for
8130 e.g. uninitialized padding bits or uninitialized fields. */
8133 native_encode_initializer (tree init, unsigned char *ptr, int len,
8134 int off, unsigned char *mask)
8136 int r;
8138 /* We don't support starting at negative offset and -1 is special. */
8139 if (off < -1 || init == NULL_TREE)
8140 return 0;
8142 gcc_assert (mask == NULL || (off == 0 && ptr));
8144 STRIP_NOPS (init);
8145 switch (TREE_CODE (init))
8147 case VIEW_CONVERT_EXPR:
8148 case NON_LVALUE_EXPR:
8149 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
8150 mask);
8151 default:
8152 r = native_encode_expr (init, ptr, len, off);
8153 if (mask)
8154 memset (mask, 0, r);
8155 return r;
8156 case CONSTRUCTOR:
8157 tree type = TREE_TYPE (init);
8158 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
8159 if (total_bytes < 0)
8160 return 0;
8161 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8162 return 0;
8163 int o = off == -1 ? 0 : off;
8164 if (TREE_CODE (type) == ARRAY_TYPE)
8166 tree min_index;
8167 unsigned HOST_WIDE_INT cnt;
8168 HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
8169 constructor_elt *ce;
8171 if (!TYPE_DOMAIN (type)
8172 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
8173 return 0;
8175 fieldsize = int_size_in_bytes (TREE_TYPE (type));
8176 if (fieldsize <= 0)
8177 return 0;
8179 min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8180 if (ptr)
8181 memset (ptr, '\0', MIN (total_bytes - off, len));
8183 for (cnt = 0; ; cnt++)
8185 tree val = NULL_TREE, index = NULL_TREE;
8186 HOST_WIDE_INT pos = curpos, count = 0;
8187 bool full = false;
8188 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8190 val = ce->value;
8191 index = ce->index;
8193 else if (mask == NULL
8194 || CONSTRUCTOR_NO_CLEARING (init)
8195 || curpos >= total_bytes)
8196 break;
8197 else
8198 pos = total_bytes;
8200 if (index && TREE_CODE (index) == RANGE_EXPR)
8202 if (TREE_CODE (TREE_OPERAND (index, 0)) != INTEGER_CST
8203 || TREE_CODE (TREE_OPERAND (index, 1)) != INTEGER_CST)
8204 return 0;
8205 offset_int wpos
8206 = wi::sext (wi::to_offset (TREE_OPERAND (index, 0))
8207 - wi::to_offset (min_index),
8208 TYPE_PRECISION (sizetype));
8209 wpos *= fieldsize;
8210 if (!wi::fits_shwi_p (pos))
8211 return 0;
8212 pos = wpos.to_shwi ();
8213 offset_int wcount
8214 = wi::sext (wi::to_offset (TREE_OPERAND (index, 1))
8215 - wi::to_offset (TREE_OPERAND (index, 0)),
8216 TYPE_PRECISION (sizetype));
8217 if (!wi::fits_shwi_p (wcount))
8218 return 0;
8219 count = wcount.to_shwi ();
8221 else if (index)
8223 if (TREE_CODE (index) != INTEGER_CST)
8224 return 0;
8225 offset_int wpos
8226 = wi::sext (wi::to_offset (index)
8227 - wi::to_offset (min_index),
8228 TYPE_PRECISION (sizetype));
8229 wpos *= fieldsize;
8230 if (!wi::fits_shwi_p (wpos))
8231 return 0;
8232 pos = wpos.to_shwi ();
8235 if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
8237 if (valueinit == -1)
8239 tree zero = build_zero_cst (TREE_TYPE (type));
8240 r = native_encode_initializer (zero, ptr + curpos,
8241 fieldsize, 0,
8242 mask + curpos);
8243 if (TREE_CODE (zero) == CONSTRUCTOR)
8244 ggc_free (zero);
8245 if (!r)
8246 return 0;
8247 valueinit = curpos;
8248 curpos += fieldsize;
8250 while (curpos != pos)
8252 memcpy (ptr + curpos, ptr + valueinit, fieldsize);
8253 memcpy (mask + curpos, mask + valueinit, fieldsize);
8254 curpos += fieldsize;
8258 curpos = pos;
8259 if (val)
8262 if (off == -1
8263 || (curpos >= off
8264 && (curpos + fieldsize
8265 <= (HOST_WIDE_INT) off + len)))
8267 if (full)
8269 if (ptr)
8270 memcpy (ptr + (curpos - o), ptr + (pos - o),
8271 fieldsize);
8272 if (mask)
8273 memcpy (mask + curpos, mask + pos, fieldsize);
8275 else if (!native_encode_initializer (val,
8277 ? ptr + curpos - o
8278 : NULL,
8279 fieldsize,
8280 off == -1 ? -1
8281 : 0,
8282 mask
8283 ? mask + curpos
8284 : NULL))
8285 return 0;
8286 else
8288 full = true;
8289 pos = curpos;
8292 else if (curpos + fieldsize > off
8293 && curpos < (HOST_WIDE_INT) off + len)
8295 /* Partial overlap. */
8296 unsigned char *p = NULL;
8297 int no = 0;
8298 int l;
8299 gcc_assert (mask == NULL);
8300 if (curpos >= off)
8302 if (ptr)
8303 p = ptr + curpos - off;
8304 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8305 fieldsize);
8307 else
8309 p = ptr;
8310 no = off - curpos;
8311 l = len;
8313 if (!native_encode_initializer (val, p, l, no, NULL))
8314 return 0;
8316 curpos += fieldsize;
8318 while (count-- != 0);
8320 return MIN (total_bytes - off, len);
8322 else if (TREE_CODE (type) == RECORD_TYPE
8323 || TREE_CODE (type) == UNION_TYPE)
8325 unsigned HOST_WIDE_INT cnt;
8326 constructor_elt *ce;
8327 tree fld_base = TYPE_FIELDS (type);
8328 tree to_free = NULL_TREE;
8330 gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
8331 if (ptr != NULL)
8332 memset (ptr, '\0', MIN (total_bytes - o, len));
8333 for (cnt = 0; ; cnt++)
8335 tree val = NULL_TREE, field = NULL_TREE;
8336 HOST_WIDE_INT pos = 0, fieldsize;
8337 unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8339 if (to_free)
8341 ggc_free (to_free);
8342 to_free = NULL_TREE;
8345 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8347 val = ce->value;
8348 field = ce->index;
8349 if (field == NULL_TREE)
8350 return 0;
8352 pos = int_byte_position (field);
8353 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8354 continue;
8356 else if (mask == NULL
8357 || CONSTRUCTOR_NO_CLEARING (init))
8358 break;
8359 else
8360 pos = total_bytes;
8362 if (mask && !CONSTRUCTOR_NO_CLEARING (init))
8364 tree fld;
8365 for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
8367 if (TREE_CODE (fld) != FIELD_DECL)
8368 continue;
8369 if (fld == field)
8370 break;
8371 if (DECL_PADDING_P (fld))
8372 continue;
8373 if (DECL_SIZE_UNIT (fld) == NULL_TREE
8374 || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
8375 return 0;
8376 if (integer_zerop (DECL_SIZE_UNIT (fld)))
8377 continue;
8378 break;
8380 if (fld == NULL_TREE)
8382 if (ce == NULL)
8383 break;
8384 return 0;
8386 fld_base = DECL_CHAIN (fld);
8387 if (fld != field)
8389 cnt--;
8390 field = fld;
8391 pos = int_byte_position (field);
8392 val = build_zero_cst (TREE_TYPE (fld));
8393 if (TREE_CODE (val) == CONSTRUCTOR)
8394 to_free = val;
8398 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8399 && TYPE_DOMAIN (TREE_TYPE (field))
8400 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8402 if (mask || off != -1)
8403 return 0;
8404 if (val == NULL_TREE)
8405 continue;
8406 if (TREE_CODE (TREE_TYPE (val)) != ARRAY_TYPE)
8407 return 0;
8408 fieldsize = int_size_in_bytes (TREE_TYPE (val));
8409 if (fieldsize < 0
8410 || (int) fieldsize != fieldsize
8411 || (pos + fieldsize) > INT_MAX)
8412 return 0;
8413 if (pos + fieldsize > total_bytes)
8415 if (ptr != NULL && total_bytes < len)
8416 memset (ptr + total_bytes, '\0',
8417 MIN (pos + fieldsize, len) - total_bytes);
8418 total_bytes = pos + fieldsize;
8421 else
8423 if (DECL_SIZE_UNIT (field) == NULL_TREE
8424 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8425 return 0;
8426 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8428 if (fieldsize == 0)
8429 continue;
8431 if (DECL_BIT_FIELD (field))
8433 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8434 return 0;
8435 fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8436 bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8437 if (bpos % BITS_PER_UNIT)
8438 bpos %= BITS_PER_UNIT;
8439 else
8440 bpos = 0;
8441 fieldsize += bpos;
8442 epos = fieldsize % BITS_PER_UNIT;
8443 fieldsize += BITS_PER_UNIT - 1;
8444 fieldsize /= BITS_PER_UNIT;
8447 if (off != -1 && pos + fieldsize <= off)
8448 continue;
8450 if (val == NULL_TREE)
8451 continue;
8453 if (DECL_BIT_FIELD (field))
8455 /* FIXME: Handle PDP endian. */
8456 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8457 return 0;
8459 if (TREE_CODE (val) != INTEGER_CST)
8460 return 0;
8462 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8463 tree repr_type = NULL_TREE;
8464 HOST_WIDE_INT rpos = 0;
8465 if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8467 rpos = int_byte_position (repr);
8468 repr_type = TREE_TYPE (repr);
8470 else
8472 repr_type = find_bitfield_repr_type (fieldsize, len);
8473 if (repr_type == NULL_TREE)
8474 return 0;
8475 HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
8476 gcc_assert (repr_size > 0 && repr_size <= len);
8477 if (pos + repr_size <= o + len)
8478 rpos = pos;
8479 else
8481 rpos = o + len - repr_size;
8482 gcc_assert (rpos <= pos);
8486 if (rpos > pos)
8487 return 0;
8488 wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
8489 int diff = (TYPE_PRECISION (repr_type)
8490 - TYPE_PRECISION (TREE_TYPE (field)));
8491 HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8492 if (!BYTES_BIG_ENDIAN)
8493 w = wi::lshift (w, bitoff);
8494 else
8495 w = wi::lshift (w, diff - bitoff);
8496 val = wide_int_to_tree (repr_type, w);
8498 unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8499 / BITS_PER_UNIT + 1];
8500 int l = native_encode_int (val, buf, sizeof buf, 0);
8501 if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
8502 return 0;
8504 if (ptr == NULL)
8505 continue;
8507 /* If the bitfield does not start at byte boundary, handle
8508 the partial byte at the start. */
8509 if (bpos
8510 && (off == -1 || (pos >= off && len >= 1)))
8512 if (!BYTES_BIG_ENDIAN)
8514 int msk = (1 << bpos) - 1;
8515 buf[pos - rpos] &= ~msk;
8516 buf[pos - rpos] |= ptr[pos - o] & msk;
8517 if (mask)
8519 if (fieldsize > 1 || epos == 0)
8520 mask[pos] &= msk;
8521 else
8522 mask[pos] &= (msk | ~((1 << epos) - 1));
8525 else
8527 int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
8528 buf[pos - rpos] &= msk;
8529 buf[pos - rpos] |= ptr[pos - o] & ~msk;
8530 if (mask)
8532 if (fieldsize > 1 || epos == 0)
8533 mask[pos] &= ~msk;
8534 else
8535 mask[pos] &= (~msk
8536 | ((1 << (BITS_PER_UNIT - epos))
8537 - 1));
8541 /* If the bitfield does not end at byte boundary, handle
8542 the partial byte at the end. */
8543 if (epos
8544 && (off == -1
8545 || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8547 if (!BYTES_BIG_ENDIAN)
8549 int msk = (1 << epos) - 1;
8550 buf[pos - rpos + fieldsize - 1] &= msk;
8551 buf[pos - rpos + fieldsize - 1]
8552 |= ptr[pos + fieldsize - 1 - o] & ~msk;
8553 if (mask && (fieldsize > 1 || bpos == 0))
8554 mask[pos + fieldsize - 1] &= ~msk;
8556 else
8558 int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
8559 buf[pos - rpos + fieldsize - 1] &= ~msk;
8560 buf[pos - rpos + fieldsize - 1]
8561 |= ptr[pos + fieldsize - 1 - o] & msk;
8562 if (mask && (fieldsize > 1 || bpos == 0))
8563 mask[pos + fieldsize - 1] &= msk;
8566 if (off == -1
8567 || (pos >= off
8568 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8570 memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8571 if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
8572 memset (mask + pos + (bpos != 0), 0,
8573 fieldsize - (bpos != 0) - (epos != 0));
8575 else
8577 /* Partial overlap. */
8578 HOST_WIDE_INT fsz = fieldsize;
8579 gcc_assert (mask == NULL);
8580 if (pos < off)
8582 fsz -= (off - pos);
8583 pos = off;
8585 if (pos + fsz > (HOST_WIDE_INT) off + len)
8586 fsz = (HOST_WIDE_INT) off + len - pos;
8587 memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8589 continue;
8592 if (off == -1
8593 || (pos >= off
8594 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8596 int fldsize = fieldsize;
8597 if (off == -1)
8599 tree fld = DECL_CHAIN (field);
8600 while (fld)
8602 if (TREE_CODE (fld) == FIELD_DECL)
8603 break;
8604 fld = DECL_CHAIN (fld);
8606 if (fld == NULL_TREE)
8607 fldsize = len - pos;
8609 r = native_encode_initializer (val, ptr ? ptr + pos - o
8610 : NULL,
8611 fldsize,
8612 off == -1 ? -1 : 0,
8613 mask ? mask + pos : NULL);
8614 if (!r)
8615 return 0;
8616 if (off == -1
8617 && fldsize != fieldsize
8618 && r > fieldsize
8619 && pos + r > total_bytes)
8620 total_bytes = pos + r;
8622 else
8624 /* Partial overlap. */
8625 unsigned char *p = NULL;
8626 int no = 0;
8627 int l;
8628 gcc_assert (mask == NULL);
8629 if (pos >= off)
8631 if (ptr)
8632 p = ptr + pos - off;
8633 l = MIN ((HOST_WIDE_INT) off + len - pos,
8634 fieldsize);
8636 else
8638 p = ptr;
8639 no = off - pos;
8640 l = len;
8642 if (!native_encode_initializer (val, p, l, no, NULL))
8643 return 0;
8646 return MIN (total_bytes - off, len);
8648 return 0;
8653 /* Subroutine of native_interpret_expr. Interpret the contents of
8654 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8655 If the buffer cannot be interpreted, return NULL_TREE. */
8657 static tree
8658 native_interpret_int (tree type, const unsigned char *ptr, int len)
8660 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8662 if (total_bytes > len
8663 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8664 return NULL_TREE;
8666 wide_int result = wi::from_buffer (ptr, total_bytes);
8668 return wide_int_to_tree (type, result);
8672 /* Subroutine of native_interpret_expr. Interpret the contents of
8673 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8674 If the buffer cannot be interpreted, return NULL_TREE. */
8676 static tree
8677 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8679 scalar_mode mode = SCALAR_TYPE_MODE (type);
8680 int total_bytes = GET_MODE_SIZE (mode);
8681 double_int result;
8682 FIXED_VALUE_TYPE fixed_value;
8684 if (total_bytes > len
8685 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8686 return NULL_TREE;
8688 result = double_int::from_buffer (ptr, total_bytes);
8689 fixed_value = fixed_from_double_int (result, mode);
8691 return build_fixed (type, fixed_value);
8695 /* Subroutine of native_interpret_expr. Interpret the contents of
8696 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8697 If the buffer cannot be interpreted, return NULL_TREE. */
8699 tree
8700 native_interpret_real (tree type, const unsigned char *ptr, int len)
8702 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8703 int total_bytes = GET_MODE_SIZE (mode);
8704 unsigned char value;
8705 /* There are always 32 bits in each long, no matter the size of
8706 the hosts long. We handle floating point representations with
8707 up to 192 bits. */
8708 REAL_VALUE_TYPE r;
8709 long tmp[6];
8711 if (total_bytes > len || total_bytes > 24)
8712 return NULL_TREE;
8713 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8715 memset (tmp, 0, sizeof (tmp));
8716 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8717 bitpos += BITS_PER_UNIT)
8719 /* Both OFFSET and BYTE index within a long;
8720 bitpos indexes the whole float. */
8721 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8722 if (UNITS_PER_WORD < 4)
8724 int word = byte / UNITS_PER_WORD;
8725 if (WORDS_BIG_ENDIAN)
8726 word = (words - 1) - word;
8727 offset = word * UNITS_PER_WORD;
8728 if (BYTES_BIG_ENDIAN)
8729 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8730 else
8731 offset += byte % UNITS_PER_WORD;
8733 else
8735 offset = byte;
8736 if (BYTES_BIG_ENDIAN)
8738 /* Reverse bytes within each long, or within the entire float
8739 if it's smaller than a long (for HFmode). */
8740 offset = MIN (3, total_bytes - 1) - offset;
8741 gcc_assert (offset >= 0);
8744 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8746 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8749 real_from_target (&r, tmp, mode);
8750 return build_real (type, r);
8754 /* Subroutine of native_interpret_expr. Interpret the contents of
8755 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8756 If the buffer cannot be interpreted, return NULL_TREE. */
8758 static tree
8759 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8761 tree etype, rpart, ipart;
8762 int size;
8764 etype = TREE_TYPE (type);
8765 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8766 if (size * 2 > len)
8767 return NULL_TREE;
8768 rpart = native_interpret_expr (etype, ptr, size);
8769 if (!rpart)
8770 return NULL_TREE;
8771 ipart = native_interpret_expr (etype, ptr+size, size);
8772 if (!ipart)
8773 return NULL_TREE;
8774 return build_complex (type, rpart, ipart);
8777 /* Read a vector of type TYPE from the target memory image given by BYTES,
8778 which contains LEN bytes. The vector is known to be encodable using
8779 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8781 Return the vector on success, otherwise return null. */
8783 static tree
8784 native_interpret_vector_part (tree type, const unsigned char *bytes,
8785 unsigned int len, unsigned int npatterns,
8786 unsigned int nelts_per_pattern)
8788 tree elt_type = TREE_TYPE (type);
8789 if (VECTOR_BOOLEAN_TYPE_P (type)
8790 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8792 /* This is the only case in which elements can be smaller than a byte.
8793 Element 0 is always in the lsb of the containing byte. */
8794 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8795 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8796 return NULL_TREE;
8798 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8799 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8801 unsigned int bit_index = i * elt_bits;
8802 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8803 unsigned int lsb = bit_index % BITS_PER_UNIT;
8804 builder.quick_push (bytes[byte_index] & (1 << lsb)
8805 ? build_all_ones_cst (elt_type)
8806 : build_zero_cst (elt_type));
8808 return builder.build ();
8811 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8812 if (elt_bytes * npatterns * nelts_per_pattern > len)
8813 return NULL_TREE;
8815 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8816 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8818 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8819 if (!elt)
8820 return NULL_TREE;
8821 builder.quick_push (elt);
8822 bytes += elt_bytes;
8824 return builder.build ();
8827 /* Subroutine of native_interpret_expr. Interpret the contents of
8828 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8829 If the buffer cannot be interpreted, return NULL_TREE. */
8831 static tree
8832 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8834 tree etype;
8835 unsigned int size;
8836 unsigned HOST_WIDE_INT count;
8838 etype = TREE_TYPE (type);
8839 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8840 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
8841 || size * count > len)
8842 return NULL_TREE;
8844 return native_interpret_vector_part (type, ptr, len, count, 1);
8848 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8849 the buffer PTR of length LEN as a constant of type TYPE. For
8850 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8851 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8852 return NULL_TREE. */
8854 tree
8855 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8857 switch (TREE_CODE (type))
8859 case INTEGER_TYPE:
8860 case ENUMERAL_TYPE:
8861 case BOOLEAN_TYPE:
8862 case POINTER_TYPE:
8863 case REFERENCE_TYPE:
8864 case OFFSET_TYPE:
8865 return native_interpret_int (type, ptr, len);
8867 case REAL_TYPE:
8868 if (tree ret = native_interpret_real (type, ptr, len))
8870 /* For floating point values in composite modes, punt if this
8871 folding doesn't preserve bit representation. As the mode doesn't
8872 have fixed precision while GCC pretends it does, there could be
8873 valid values that GCC can't really represent accurately.
8874 See PR95450. Even for other modes, e.g. x86 XFmode can have some
8875 bit combinationations which GCC doesn't preserve. */
8876 unsigned char buf[24 * 2];
8877 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8878 int total_bytes = GET_MODE_SIZE (mode);
8879 memcpy (buf + 24, ptr, total_bytes);
8880 clear_type_padding_in_mask (type, buf + 24);
8881 if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
8882 || memcmp (buf + 24, buf, total_bytes) != 0)
8883 return NULL_TREE;
8884 return ret;
8886 return NULL_TREE;
8888 case FIXED_POINT_TYPE:
8889 return native_interpret_fixed (type, ptr, len);
8891 case COMPLEX_TYPE:
8892 return native_interpret_complex (type, ptr, len);
8894 case VECTOR_TYPE:
8895 return native_interpret_vector (type, ptr, len);
8897 default:
8898 return NULL_TREE;
8902 /* Returns true if we can interpret the contents of a native encoding
8903 as TYPE. */
8905 bool
8906 can_native_interpret_type_p (tree type)
8908 switch (TREE_CODE (type))
8910 case INTEGER_TYPE:
8911 case ENUMERAL_TYPE:
8912 case BOOLEAN_TYPE:
8913 case POINTER_TYPE:
8914 case REFERENCE_TYPE:
8915 case FIXED_POINT_TYPE:
8916 case REAL_TYPE:
8917 case COMPLEX_TYPE:
8918 case VECTOR_TYPE:
8919 case OFFSET_TYPE:
8920 return true;
8921 default:
8922 return false;
8926 /* Attempt to interpret aggregate of TYPE from bytes encoded in target
8927 byte order at PTR + OFF with LEN bytes. Does not handle unions. */
8929 tree
8930 native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
8931 int len)
8933 vec<constructor_elt, va_gc> *elts = NULL;
8934 if (TREE_CODE (type) == ARRAY_TYPE)
8936 HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
8937 if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
8938 return NULL_TREE;
8940 HOST_WIDE_INT cnt = 0;
8941 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
8943 if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
8944 return NULL_TREE;
8945 cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
8947 if (eltsz == 0)
8948 cnt = 0;
8949 HOST_WIDE_INT pos = 0;
8950 for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
8952 tree v = NULL_TREE;
8953 if (pos >= len || pos + eltsz > len)
8954 return NULL_TREE;
8955 if (can_native_interpret_type_p (TREE_TYPE (type)))
8957 v = native_interpret_expr (TREE_TYPE (type),
8958 ptr + off + pos, eltsz);
8959 if (v == NULL_TREE)
8960 return NULL_TREE;
8962 else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
8963 || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
8964 v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos,
8965 eltsz);
8966 if (v == NULL_TREE)
8967 return NULL_TREE;
8968 CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
8970 return build_constructor (type, elts);
8972 if (TREE_CODE (type) != RECORD_TYPE)
8973 return NULL_TREE;
8974 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8976 if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field))
8977 continue;
8978 tree fld = field;
8979 HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
8980 int diff = 0;
8981 tree v = NULL_TREE;
8982 if (DECL_BIT_FIELD (field))
8984 fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
8985 if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
8987 poly_int64 bitoffset;
8988 poly_uint64 field_offset, fld_offset;
8989 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8990 && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
8991 bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
8992 else
8993 bitoffset = 0;
8994 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8995 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
8996 diff = (TYPE_PRECISION (TREE_TYPE (fld))
8997 - TYPE_PRECISION (TREE_TYPE (field)));
8998 if (!bitoffset.is_constant (&bitoff)
8999 || bitoff < 0
9000 || bitoff > diff)
9001 return NULL_TREE;
9003 else
9005 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
9006 return NULL_TREE;
9007 int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
9008 int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
9009 bpos %= BITS_PER_UNIT;
9010 fieldsize += bpos;
9011 fieldsize += BITS_PER_UNIT - 1;
9012 fieldsize /= BITS_PER_UNIT;
9013 tree repr_type = find_bitfield_repr_type (fieldsize, len);
9014 if (repr_type == NULL_TREE)
9015 return NULL_TREE;
9016 sz = int_size_in_bytes (repr_type);
9017 if (sz < 0 || sz > len)
9018 return NULL_TREE;
9019 pos = int_byte_position (field);
9020 if (pos < 0 || pos > len || pos + fieldsize > len)
9021 return NULL_TREE;
9022 HOST_WIDE_INT rpos;
9023 if (pos + sz <= len)
9024 rpos = pos;
9025 else
9027 rpos = len - sz;
9028 gcc_assert (rpos <= pos);
9030 bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
9031 pos = rpos;
9032 diff = (TYPE_PRECISION (repr_type)
9033 - TYPE_PRECISION (TREE_TYPE (field)));
9034 v = native_interpret_expr (repr_type, ptr + off + pos, sz);
9035 if (v == NULL_TREE)
9036 return NULL_TREE;
9037 fld = NULL_TREE;
9041 if (fld)
9043 sz = int_size_in_bytes (TREE_TYPE (fld));
9044 if (sz < 0 || sz > len)
9045 return NULL_TREE;
9046 tree byte_pos = byte_position (fld);
9047 if (!tree_fits_shwi_p (byte_pos))
9048 return NULL_TREE;
9049 pos = tree_to_shwi (byte_pos);
9050 if (pos < 0 || pos > len || pos + sz > len)
9051 return NULL_TREE;
9053 if (fld == NULL_TREE)
9054 /* Already handled above. */;
9055 else if (can_native_interpret_type_p (TREE_TYPE (fld)))
9057 v = native_interpret_expr (TREE_TYPE (fld),
9058 ptr + off + pos, sz);
9059 if (v == NULL_TREE)
9060 return NULL_TREE;
9062 else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
9063 || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
9064 v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz);
9065 if (v == NULL_TREE)
9066 return NULL_TREE;
9067 if (fld != field)
9069 if (TREE_CODE (v) != INTEGER_CST)
9070 return NULL_TREE;
9072 /* FIXME: Figure out how to handle PDP endian bitfields. */
9073 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
9074 return NULL_TREE;
9075 if (!BYTES_BIG_ENDIAN)
9076 v = wide_int_to_tree (TREE_TYPE (field),
9077 wi::lrshift (wi::to_wide (v), bitoff));
9078 else
9079 v = wide_int_to_tree (TREE_TYPE (field),
9080 wi::lrshift (wi::to_wide (v),
9081 diff - bitoff));
9083 CONSTRUCTOR_APPEND_ELT (elts, field, v);
9085 return build_constructor (type, elts);
9088 /* Routines for manipulation of native_encode_expr encoded data if the encoded
9089 or extracted constant positions and/or sizes aren't byte aligned. */
9091 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
9092 bits between adjacent elements. AMNT should be within
9093 [0, BITS_PER_UNIT).
9094 Example, AMNT = 2:
9095 00011111|11100000 << 2 = 01111111|10000000
9096 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
9098 void
9099 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
9100 unsigned int amnt)
9102 if (amnt == 0)
9103 return;
9105 unsigned char carry_over = 0U;
9106 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
9107 unsigned char clear_mask = (~0U) << amnt;
9109 for (unsigned int i = 0; i < sz; i++)
9111 unsigned prev_carry_over = carry_over;
9112 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
9114 ptr[i] <<= amnt;
9115 if (i != 0)
9117 ptr[i] &= clear_mask;
9118 ptr[i] |= prev_carry_over;
9123 /* Like shift_bytes_in_array_left but for big-endian.
9124 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
9125 bits between adjacent elements. AMNT should be within
9126 [0, BITS_PER_UNIT).
9127 Example, AMNT = 2:
9128 00011111|11100000 >> 2 = 00000111|11111000
9129 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
9131 void
9132 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
9133 unsigned int amnt)
9135 if (amnt == 0)
9136 return;
9138 unsigned char carry_over = 0U;
9139 unsigned char carry_mask = ~(~0U << amnt);
9141 for (unsigned int i = 0; i < sz; i++)
9143 unsigned prev_carry_over = carry_over;
9144 carry_over = ptr[i] & carry_mask;
9146 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
9147 ptr[i] >>= amnt;
9148 ptr[i] |= prev_carry_over;
9152 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9153 directly on the VECTOR_CST encoding, in a way that works for variable-
9154 length vectors. Return the resulting VECTOR_CST on success or null
9155 on failure. */
9157 static tree
9158 fold_view_convert_vector_encoding (tree type, tree expr)
9160 tree expr_type = TREE_TYPE (expr);
9161 poly_uint64 type_bits, expr_bits;
9162 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
9163 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
9164 return NULL_TREE;
9166 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
9167 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
9168 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
9169 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
9171 /* We can only preserve the semantics of a stepped pattern if the new
9172 vector element is an integer of the same size. */
9173 if (VECTOR_CST_STEPPED_P (expr)
9174 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
9175 return NULL_TREE;
9177 /* The number of bits needed to encode one element from every pattern
9178 of the original vector. */
9179 unsigned int expr_sequence_bits
9180 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
9182 /* The number of bits needed to encode one element from every pattern
9183 of the result. */
9184 unsigned int type_sequence_bits
9185 = least_common_multiple (expr_sequence_bits, type_elt_bits);
9187 /* Don't try to read more bytes than are available, which can happen
9188 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9189 The general VIEW_CONVERT handling can cope with that case, so there's
9190 no point complicating things here. */
9191 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
9192 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
9193 BITS_PER_UNIT);
9194 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
9195 if (known_gt (buffer_bits, expr_bits))
9196 return NULL_TREE;
9198 /* Get enough bytes of EXPR to form the new encoding. */
9199 auto_vec<unsigned char, 128> buffer (buffer_bytes);
9200 buffer.quick_grow (buffer_bytes);
9201 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
9202 buffer_bits / expr_elt_bits)
9203 != (int) buffer_bytes)
9204 return NULL_TREE;
9206 /* Reencode the bytes as TYPE. */
9207 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
9208 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
9209 type_npatterns, nelts_per_pattern);
9212 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9213 TYPE at compile-time. If we're unable to perform the conversion
9214 return NULL_TREE. */
9216 static tree
9217 fold_view_convert_expr (tree type, tree expr)
9219 /* We support up to 512-bit values (for V8DFmode). */
9220 unsigned char buffer[64];
9221 int len;
9223 /* Check that the host and target are sane. */
9224 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
9225 return NULL_TREE;
9227 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
9228 if (tree res = fold_view_convert_vector_encoding (type, expr))
9229 return res;
9231 len = native_encode_expr (expr, buffer, sizeof (buffer));
9232 if (len == 0)
9233 return NULL_TREE;
9235 return native_interpret_expr (type, buffer, len);
9238 /* Build an expression for the address of T. Folds away INDIRECT_REF
9239 to avoid confusing the gimplify process. */
9241 tree
9242 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
9244 /* The size of the object is not relevant when talking about its address. */
9245 if (TREE_CODE (t) == WITH_SIZE_EXPR)
9246 t = TREE_OPERAND (t, 0);
9248 if (TREE_CODE (t) == INDIRECT_REF)
9250 t = TREE_OPERAND (t, 0);
9252 if (TREE_TYPE (t) != ptrtype)
9253 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
9255 else if (TREE_CODE (t) == MEM_REF
9256 && integer_zerop (TREE_OPERAND (t, 1)))
9258 t = TREE_OPERAND (t, 0);
9260 if (TREE_TYPE (t) != ptrtype)
9261 t = fold_convert_loc (loc, ptrtype, t);
9263 else if (TREE_CODE (t) == MEM_REF
9264 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
9265 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
9266 TREE_OPERAND (t, 0),
9267 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
9268 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
9270 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
9272 if (TREE_TYPE (t) != ptrtype)
9273 t = fold_convert_loc (loc, ptrtype, t);
9275 else
9276 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
9278 return t;
9281 /* Build an expression for the address of T. */
9283 tree
9284 build_fold_addr_expr_loc (location_t loc, tree t)
9286 tree ptrtype = build_pointer_type (TREE_TYPE (t));
9288 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
9291 /* Fold a unary expression of code CODE and type TYPE with operand
9292 OP0. Return the folded expression if folding is successful.
9293 Otherwise, return NULL_TREE. */
9295 tree
9296 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
9298 tree tem;
9299 tree arg0;
9300 enum tree_code_class kind = TREE_CODE_CLASS (code);
9302 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9303 && TREE_CODE_LENGTH (code) == 1);
9305 arg0 = op0;
9306 if (arg0)
9308 if (CONVERT_EXPR_CODE_P (code)
9309 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
9311 /* Don't use STRIP_NOPS, because signedness of argument type
9312 matters. */
9313 STRIP_SIGN_NOPS (arg0);
9315 else
9317 /* Strip any conversions that don't change the mode. This
9318 is safe for every expression, except for a comparison
9319 expression because its signedness is derived from its
9320 operands.
9322 Note that this is done as an internal manipulation within
9323 the constant folder, in order to find the simplest
9324 representation of the arguments so that their form can be
9325 studied. In any cases, the appropriate type conversions
9326 should be put back in the tree that will get out of the
9327 constant folder. */
9328 STRIP_NOPS (arg0);
9331 if (CONSTANT_CLASS_P (arg0))
9333 tree tem = const_unop (code, type, arg0);
9334 if (tem)
9336 if (TREE_TYPE (tem) != type)
9337 tem = fold_convert_loc (loc, type, tem);
9338 return tem;
9343 tem = generic_simplify (loc, code, type, op0);
9344 if (tem)
9345 return tem;
9347 if (TREE_CODE_CLASS (code) == tcc_unary)
9349 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9350 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9351 fold_build1_loc (loc, code, type,
9352 fold_convert_loc (loc, TREE_TYPE (op0),
9353 TREE_OPERAND (arg0, 1))));
9354 else if (TREE_CODE (arg0) == COND_EXPR)
9356 tree arg01 = TREE_OPERAND (arg0, 1);
9357 tree arg02 = TREE_OPERAND (arg0, 2);
9358 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
9359 arg01 = fold_build1_loc (loc, code, type,
9360 fold_convert_loc (loc,
9361 TREE_TYPE (op0), arg01));
9362 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
9363 arg02 = fold_build1_loc (loc, code, type,
9364 fold_convert_loc (loc,
9365 TREE_TYPE (op0), arg02));
9366 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
9367 arg01, arg02);
9369 /* If this was a conversion, and all we did was to move into
9370 inside the COND_EXPR, bring it back out. But leave it if
9371 it is a conversion from integer to integer and the
9372 result precision is no wider than a word since such a
9373 conversion is cheap and may be optimized away by combine,
9374 while it couldn't if it were outside the COND_EXPR. Then return
9375 so we don't get into an infinite recursion loop taking the
9376 conversion out and then back in. */
9378 if ((CONVERT_EXPR_CODE_P (code)
9379 || code == NON_LVALUE_EXPR)
9380 && TREE_CODE (tem) == COND_EXPR
9381 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
9382 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
9383 && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 1)))
9384 && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 2)))
9385 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
9386 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
9387 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9388 && (INTEGRAL_TYPE_P
9389 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
9390 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
9391 || flag_syntax_only))
9392 tem = build1_loc (loc, code, type,
9393 build3 (COND_EXPR,
9394 TREE_TYPE (TREE_OPERAND
9395 (TREE_OPERAND (tem, 1), 0)),
9396 TREE_OPERAND (tem, 0),
9397 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
9398 TREE_OPERAND (TREE_OPERAND (tem, 2),
9399 0)));
9400 return tem;
9404 switch (code)
9406 case NON_LVALUE_EXPR:
9407 if (!maybe_lvalue_p (op0))
9408 return fold_convert_loc (loc, type, op0);
9409 return NULL_TREE;
9411 CASE_CONVERT:
9412 case FLOAT_EXPR:
9413 case FIX_TRUNC_EXPR:
9414 if (COMPARISON_CLASS_P (op0))
9416 /* If we have (type) (a CMP b) and type is an integral type, return
9417 new expression involving the new type. Canonicalize
9418 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9419 non-integral type.
9420 Do not fold the result as that would not simplify further, also
9421 folding again results in recursions. */
9422 if (TREE_CODE (type) == BOOLEAN_TYPE)
9423 return build2_loc (loc, TREE_CODE (op0), type,
9424 TREE_OPERAND (op0, 0),
9425 TREE_OPERAND (op0, 1));
9426 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
9427 && TREE_CODE (type) != VECTOR_TYPE)
9428 return build3_loc (loc, COND_EXPR, type, op0,
9429 constant_boolean_node (true, type),
9430 constant_boolean_node (false, type));
9433 /* Handle (T *)&A.B.C for A being of type T and B and C
9434 living at offset zero. This occurs frequently in
9435 C++ upcasting and then accessing the base. */
9436 if (TREE_CODE (op0) == ADDR_EXPR
9437 && POINTER_TYPE_P (type)
9438 && handled_component_p (TREE_OPERAND (op0, 0)))
9440 poly_int64 bitsize, bitpos;
9441 tree offset;
9442 machine_mode mode;
9443 int unsignedp, reversep, volatilep;
9444 tree base
9445 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
9446 &offset, &mode, &unsignedp, &reversep,
9447 &volatilep);
9448 /* If the reference was to a (constant) zero offset, we can use
9449 the address of the base if it has the same base type
9450 as the result type and the pointer type is unqualified. */
9451 if (!offset
9452 && known_eq (bitpos, 0)
9453 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
9454 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
9455 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
9456 return fold_convert_loc (loc, type,
9457 build_fold_addr_expr_loc (loc, base));
9460 if (TREE_CODE (op0) == MODIFY_EXPR
9461 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
9462 /* Detect assigning a bitfield. */
9463 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
9464 && DECL_BIT_FIELD
9465 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
9467 /* Don't leave an assignment inside a conversion
9468 unless assigning a bitfield. */
9469 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
9470 /* First do the assignment, then return converted constant. */
9471 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
9472 suppress_warning (tem /* What warning? */);
9473 TREE_USED (tem) = 1;
9474 return tem;
9477 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9478 constants (if x has signed type, the sign bit cannot be set
9479 in c). This folds extension into the BIT_AND_EXPR.
9480 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9481 very likely don't have maximal range for their precision and this
9482 transformation effectively doesn't preserve non-maximal ranges. */
9483 if (TREE_CODE (type) == INTEGER_TYPE
9484 && TREE_CODE (op0) == BIT_AND_EXPR
9485 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9487 tree and_expr = op0;
9488 tree and0 = TREE_OPERAND (and_expr, 0);
9489 tree and1 = TREE_OPERAND (and_expr, 1);
9490 int change = 0;
9492 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
9493 || (TYPE_PRECISION (type)
9494 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
9495 change = 1;
9496 else if (TYPE_PRECISION (TREE_TYPE (and1))
9497 <= HOST_BITS_PER_WIDE_INT
9498 && tree_fits_uhwi_p (and1))
9500 unsigned HOST_WIDE_INT cst;
9502 cst = tree_to_uhwi (and1);
9503 cst &= HOST_WIDE_INT_M1U
9504 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
9505 change = (cst == 0);
9506 if (change
9507 && !flag_syntax_only
9508 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
9509 == ZERO_EXTEND))
9511 tree uns = unsigned_type_for (TREE_TYPE (and0));
9512 and0 = fold_convert_loc (loc, uns, and0);
9513 and1 = fold_convert_loc (loc, uns, and1);
9516 if (change)
9518 tem = force_fit_type (type, wi::to_widest (and1), 0,
9519 TREE_OVERFLOW (and1));
9520 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9521 fold_convert_loc (loc, type, and0), tem);
9525 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9526 cast (T1)X will fold away. We assume that this happens when X itself
9527 is a cast. */
9528 if (POINTER_TYPE_P (type)
9529 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
9530 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
9532 tree arg00 = TREE_OPERAND (arg0, 0);
9533 tree arg01 = TREE_OPERAND (arg0, 1);
9535 /* If -fsanitize=alignment, avoid this optimization in GENERIC
9536 when the pointed type needs higher alignment than
9537 the p+ first operand's pointed type. */
9538 if (!in_gimple_form
9539 && sanitize_flags_p (SANITIZE_ALIGNMENT)
9540 && (min_align_of_type (TREE_TYPE (type))
9541 > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00)))))
9542 return NULL_TREE;
9544 /* Similarly, avoid this optimization in GENERIC for -fsanitize=null
9545 when type is a reference type and arg00's type is not,
9546 because arg00 could be validly nullptr and if arg01 doesn't return,
9547 we don't want false positive binding of reference to nullptr. */
9548 if (TREE_CODE (type) == REFERENCE_TYPE
9549 && !in_gimple_form
9550 && sanitize_flags_p (SANITIZE_NULL)
9551 && TREE_CODE (TREE_TYPE (arg00)) != REFERENCE_TYPE)
9552 return NULL_TREE;
9554 arg00 = fold_convert_loc (loc, type, arg00);
9555 return fold_build_pointer_plus_loc (loc, arg00, arg01);
9558 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9559 of the same precision, and X is an integer type not narrower than
9560 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9561 if (INTEGRAL_TYPE_P (type)
9562 && TREE_CODE (op0) == BIT_NOT_EXPR
9563 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9564 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
9565 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
9567 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
9568 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9569 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9570 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9571 fold_convert_loc (loc, type, tem));
9574 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9575 type of X and Y (integer types only). */
9576 if (INTEGRAL_TYPE_P (type)
9577 && TREE_CODE (op0) == MULT_EXPR
9578 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9579 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0))
9580 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
9581 || !sanitize_flags_p (SANITIZE_SI_OVERFLOW)))
9583 /* Be careful not to introduce new overflows. */
9584 tree mult_type;
9585 if (TYPE_OVERFLOW_WRAPS (type))
9586 mult_type = type;
9587 else
9588 mult_type = unsigned_type_for (type);
9590 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9592 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9593 fold_convert_loc (loc, mult_type,
9594 TREE_OPERAND (op0, 0)),
9595 fold_convert_loc (loc, mult_type,
9596 TREE_OPERAND (op0, 1)));
9597 return fold_convert_loc (loc, type, tem);
9601 return NULL_TREE;
9603 case VIEW_CONVERT_EXPR:
9604 if (TREE_CODE (op0) == MEM_REF)
9606 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9607 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9608 tem = fold_build2_loc (loc, MEM_REF, type,
9609 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9610 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9611 return tem;
9614 return NULL_TREE;
9616 case NEGATE_EXPR:
9617 tem = fold_negate_expr (loc, arg0);
9618 if (tem)
9619 return fold_convert_loc (loc, type, tem);
9620 return NULL_TREE;
9622 case ABS_EXPR:
9623 /* Convert fabs((double)float) into (double)fabsf(float). */
9624 if (TREE_CODE (arg0) == NOP_EXPR
9625 && TREE_CODE (type) == REAL_TYPE)
9627 tree targ0 = strip_float_extensions (arg0);
9628 if (targ0 != arg0)
9629 return fold_convert_loc (loc, type,
9630 fold_build1_loc (loc, ABS_EXPR,
9631 TREE_TYPE (targ0),
9632 targ0));
9634 return NULL_TREE;
9636 case BIT_NOT_EXPR:
9637 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9638 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9639 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9640 fold_convert_loc (loc, type,
9641 TREE_OPERAND (arg0, 0)))))
9642 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9643 fold_convert_loc (loc, type,
9644 TREE_OPERAND (arg0, 1)));
9645 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9646 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9647 fold_convert_loc (loc, type,
9648 TREE_OPERAND (arg0, 1)))))
9649 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9650 fold_convert_loc (loc, type,
9651 TREE_OPERAND (arg0, 0)), tem);
9653 return NULL_TREE;
9655 case TRUTH_NOT_EXPR:
9656 /* Note that the operand of this must be an int
9657 and its values must be 0 or 1.
9658 ("true" is a fixed value perhaps depending on the language,
9659 but we don't handle values other than 1 correctly yet.) */
9660 tem = fold_truth_not_expr (loc, arg0);
9661 if (!tem)
9662 return NULL_TREE;
9663 return fold_convert_loc (loc, type, tem);
9665 case INDIRECT_REF:
9666 /* Fold *&X to X if X is an lvalue. */
9667 if (TREE_CODE (op0) == ADDR_EXPR)
9669 tree op00 = TREE_OPERAND (op0, 0);
9670 if ((VAR_P (op00)
9671 || TREE_CODE (op00) == PARM_DECL
9672 || TREE_CODE (op00) == RESULT_DECL)
9673 && !TREE_READONLY (op00))
9674 return op00;
9676 return NULL_TREE;
9678 default:
9679 return NULL_TREE;
9680 } /* switch (code) */
9684 /* If the operation was a conversion do _not_ mark a resulting constant
9685 with TREE_OVERFLOW if the original constant was not. These conversions
9686 have implementation defined behavior and retaining the TREE_OVERFLOW
9687 flag here would confuse later passes such as VRP. */
9688 tree
9689 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9690 tree type, tree op0)
9692 tree res = fold_unary_loc (loc, code, type, op0);
9693 if (res
9694 && TREE_CODE (res) == INTEGER_CST
9695 && TREE_CODE (op0) == INTEGER_CST
9696 && CONVERT_EXPR_CODE_P (code))
9697 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9699 return res;
9702 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9703 operands OP0 and OP1. LOC is the location of the resulting expression.
9704 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9705 Return the folded expression if folding is successful. Otherwise,
9706 return NULL_TREE. */
9707 static tree
9708 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9709 tree arg0, tree arg1, tree op0, tree op1)
9711 tree tem;
9713 /* We only do these simplifications if we are optimizing. */
9714 if (!optimize)
9715 return NULL_TREE;
9717 /* Check for things like (A || B) && (A || C). We can convert this
9718 to A || (B && C). Note that either operator can be any of the four
9719 truth and/or operations and the transformation will still be
9720 valid. Also note that we only care about order for the
9721 ANDIF and ORIF operators. If B contains side effects, this
9722 might change the truth-value of A. */
9723 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9724 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9725 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9726 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9727 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9728 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9730 tree a00 = TREE_OPERAND (arg0, 0);
9731 tree a01 = TREE_OPERAND (arg0, 1);
9732 tree a10 = TREE_OPERAND (arg1, 0);
9733 tree a11 = TREE_OPERAND (arg1, 1);
9734 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9735 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9736 && (code == TRUTH_AND_EXPR
9737 || code == TRUTH_OR_EXPR));
9739 if (operand_equal_p (a00, a10, 0))
9740 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9741 fold_build2_loc (loc, code, type, a01, a11));
9742 else if (commutative && operand_equal_p (a00, a11, 0))
9743 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9744 fold_build2_loc (loc, code, type, a01, a10));
9745 else if (commutative && operand_equal_p (a01, a10, 0))
9746 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9747 fold_build2_loc (loc, code, type, a00, a11));
9749 /* This case if tricky because we must either have commutative
9750 operators or else A10 must not have side-effects. */
9752 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9753 && operand_equal_p (a01, a11, 0))
9754 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9755 fold_build2_loc (loc, code, type, a00, a10),
9756 a01);
9759 /* See if we can build a range comparison. */
9760 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9761 return tem;
9763 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9764 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9766 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9767 if (tem)
9768 return fold_build2_loc (loc, code, type, tem, arg1);
9771 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9772 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9774 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9775 if (tem)
9776 return fold_build2_loc (loc, code, type, arg0, tem);
9779 /* Check for the possibility of merging component references. If our
9780 lhs is another similar operation, try to merge its rhs with our
9781 rhs. Then try to merge our lhs and rhs. */
9782 if (TREE_CODE (arg0) == code
9783 && (tem = fold_truth_andor_1 (loc, code, type,
9784 TREE_OPERAND (arg0, 1), arg1)) != 0)
9785 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9787 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9788 return tem;
9790 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9791 if (param_logical_op_non_short_circuit != -1)
9792 logical_op_non_short_circuit
9793 = param_logical_op_non_short_circuit;
9794 if (logical_op_non_short_circuit
9795 && !sanitize_coverage_p ()
9796 && (code == TRUTH_AND_EXPR
9797 || code == TRUTH_ANDIF_EXPR
9798 || code == TRUTH_OR_EXPR
9799 || code == TRUTH_ORIF_EXPR))
9801 enum tree_code ncode, icode;
9803 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9804 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9805 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9807 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9808 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9809 We don't want to pack more than two leafs to a non-IF AND/OR
9810 expression.
9811 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9812 equal to IF-CODE, then we don't want to add right-hand operand.
9813 If the inner right-hand side of left-hand operand has
9814 side-effects, or isn't simple, then we can't add to it,
9815 as otherwise we might destroy if-sequence. */
9816 if (TREE_CODE (arg0) == icode
9817 && simple_condition_p (arg1)
9818 /* Needed for sequence points to handle trappings, and
9819 side-effects. */
9820 && simple_condition_p (TREE_OPERAND (arg0, 1)))
9822 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9823 arg1);
9824 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9825 tem);
9827 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9828 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9829 else if (TREE_CODE (arg1) == icode
9830 && simple_condition_p (arg0)
9831 /* Needed for sequence points to handle trappings, and
9832 side-effects. */
9833 && simple_condition_p (TREE_OPERAND (arg1, 0)))
9835 tem = fold_build2_loc (loc, ncode, type,
9836 arg0, TREE_OPERAND (arg1, 0));
9837 return fold_build2_loc (loc, icode, type, tem,
9838 TREE_OPERAND (arg1, 1));
9840 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9841 into (A OR B).
9842 For sequence point consistancy, we need to check for trapping,
9843 and side-effects. */
9844 else if (code == icode && simple_condition_p (arg0)
9845 && simple_condition_p (arg1))
9846 return fold_build2_loc (loc, ncode, type, arg0, arg1);
9849 return NULL_TREE;
9852 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9853 by changing CODE to reduce the magnitude of constants involved in
9854 ARG0 of the comparison.
9855 Returns a canonicalized comparison tree if a simplification was
9856 possible, otherwise returns NULL_TREE.
9857 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9858 valid if signed overflow is undefined. */
9860 static tree
9861 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9862 tree arg0, tree arg1,
9863 bool *strict_overflow_p)
9865 enum tree_code code0 = TREE_CODE (arg0);
9866 tree t, cst0 = NULL_TREE;
9867 int sgn0;
9869 /* Match A +- CST code arg1. We can change this only if overflow
9870 is undefined. */
9871 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9872 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9873 /* In principle pointers also have undefined overflow behavior,
9874 but that causes problems elsewhere. */
9875 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9876 && (code0 == MINUS_EXPR
9877 || code0 == PLUS_EXPR)
9878 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9879 return NULL_TREE;
9881 /* Identify the constant in arg0 and its sign. */
9882 cst0 = TREE_OPERAND (arg0, 1);
9883 sgn0 = tree_int_cst_sgn (cst0);
9885 /* Overflowed constants and zero will cause problems. */
9886 if (integer_zerop (cst0)
9887 || TREE_OVERFLOW (cst0))
9888 return NULL_TREE;
9890 /* See if we can reduce the magnitude of the constant in
9891 arg0 by changing the comparison code. */
9892 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9893 if (code == LT_EXPR
9894 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9895 code = LE_EXPR;
9896 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9897 else if (code == GT_EXPR
9898 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9899 code = GE_EXPR;
9900 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9901 else if (code == LE_EXPR
9902 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9903 code = LT_EXPR;
9904 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9905 else if (code == GE_EXPR
9906 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9907 code = GT_EXPR;
9908 else
9909 return NULL_TREE;
9910 *strict_overflow_p = true;
9912 /* Now build the constant reduced in magnitude. But not if that
9913 would produce one outside of its types range. */
9914 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9915 && ((sgn0 == 1
9916 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9917 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9918 || (sgn0 == -1
9919 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9920 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9921 return NULL_TREE;
9923 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9924 cst0, build_int_cst (TREE_TYPE (cst0), 1));
9925 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9926 t = fold_convert (TREE_TYPE (arg1), t);
9928 return fold_build2_loc (loc, code, type, t, arg1);
9931 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9932 overflow further. Try to decrease the magnitude of constants involved
9933 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9934 and put sole constants at the second argument position.
9935 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9937 static tree
9938 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9939 tree arg0, tree arg1)
9941 tree t;
9942 bool strict_overflow_p;
9943 const char * const warnmsg = G_("assuming signed overflow does not occur "
9944 "when reducing constant in comparison");
9946 /* Try canonicalization by simplifying arg0. */
9947 strict_overflow_p = false;
9948 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9949 &strict_overflow_p);
9950 if (t)
9952 if (strict_overflow_p)
9953 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9954 return t;
9957 /* Try canonicalization by simplifying arg1 using the swapped
9958 comparison. */
9959 code = swap_tree_comparison (code);
9960 strict_overflow_p = false;
9961 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9962 &strict_overflow_p);
9963 if (t && strict_overflow_p)
9964 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9965 return t;
9968 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9969 space. This is used to avoid issuing overflow warnings for
9970 expressions like &p->x which cannot wrap. */
9972 static bool
9973 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9975 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9976 return true;
9978 if (maybe_lt (bitpos, 0))
9979 return true;
9981 poly_wide_int wi_offset;
9982 int precision = TYPE_PRECISION (TREE_TYPE (base));
9983 if (offset == NULL_TREE)
9984 wi_offset = wi::zero (precision);
9985 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9986 return true;
9987 else
9988 wi_offset = wi::to_poly_wide (offset);
9990 wi::overflow_type overflow;
9991 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9992 precision);
9993 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9994 if (overflow)
9995 return true;
9997 poly_uint64 total_hwi, size;
9998 if (!total.to_uhwi (&total_hwi)
9999 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
10000 &size)
10001 || known_eq (size, 0U))
10002 return true;
10004 if (known_le (total_hwi, size))
10005 return false;
10007 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
10008 array. */
10009 if (TREE_CODE (base) == ADDR_EXPR
10010 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
10011 &size)
10012 && maybe_ne (size, 0U)
10013 && known_le (total_hwi, size))
10014 return false;
10016 return true;
10019 /* Return a positive integer when the symbol DECL is known to have
10020 a nonzero address, zero when it's known not to (e.g., it's a weak
10021 symbol), and a negative integer when the symbol is not yet in the
10022 symbol table and so whether or not its address is zero is unknown.
10023 For function local objects always return positive integer. */
10024 static int
10025 maybe_nonzero_address (tree decl)
10027 /* Normally, don't do anything for variables and functions before symtab is
10028 built; it is quite possible that DECL will be declared weak later.
10029 But if folding_initializer, we need a constant answer now, so create
10030 the symtab entry and prevent later weak declaration. */
10031 if (DECL_P (decl) && decl_in_symtab_p (decl))
10032 if (struct symtab_node *symbol
10033 = (folding_initializer
10034 ? symtab_node::get_create (decl)
10035 : symtab_node::get (decl)))
10036 return symbol->nonzero_address ();
10038 /* Function local objects are never NULL. */
10039 if (DECL_P (decl)
10040 && (DECL_CONTEXT (decl)
10041 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
10042 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
10043 return 1;
10045 return -1;
10048 /* Subroutine of fold_binary. This routine performs all of the
10049 transformations that are common to the equality/inequality
10050 operators (EQ_EXPR and NE_EXPR) and the ordering operators
10051 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
10052 fold_binary should call fold_binary. Fold a comparison with
10053 tree code CODE and type TYPE with operands OP0 and OP1. Return
10054 the folded comparison or NULL_TREE. */
10056 static tree
10057 fold_comparison (location_t loc, enum tree_code code, tree type,
10058 tree op0, tree op1)
10060 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
10061 tree arg0, arg1, tem;
10063 arg0 = op0;
10064 arg1 = op1;
10066 STRIP_SIGN_NOPS (arg0);
10067 STRIP_SIGN_NOPS (arg1);
10069 /* For comparisons of pointers we can decompose it to a compile time
10070 comparison of the base objects and the offsets into the object.
10071 This requires at least one operand being an ADDR_EXPR or a
10072 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
10073 if (POINTER_TYPE_P (TREE_TYPE (arg0))
10074 && (TREE_CODE (arg0) == ADDR_EXPR
10075 || TREE_CODE (arg1) == ADDR_EXPR
10076 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10077 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
10079 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
10080 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
10081 machine_mode mode;
10082 int volatilep, reversep, unsignedp;
10083 bool indirect_base0 = false, indirect_base1 = false;
10085 /* Get base and offset for the access. Strip ADDR_EXPR for
10086 get_inner_reference, but put it back by stripping INDIRECT_REF
10087 off the base object if possible. indirect_baseN will be true
10088 if baseN is not an address but refers to the object itself. */
10089 base0 = arg0;
10090 if (TREE_CODE (arg0) == ADDR_EXPR)
10092 base0
10093 = get_inner_reference (TREE_OPERAND (arg0, 0),
10094 &bitsize, &bitpos0, &offset0, &mode,
10095 &unsignedp, &reversep, &volatilep);
10096 if (TREE_CODE (base0) == INDIRECT_REF)
10097 base0 = TREE_OPERAND (base0, 0);
10098 else
10099 indirect_base0 = true;
10101 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10103 base0 = TREE_OPERAND (arg0, 0);
10104 STRIP_SIGN_NOPS (base0);
10105 if (TREE_CODE (base0) == ADDR_EXPR)
10107 base0
10108 = get_inner_reference (TREE_OPERAND (base0, 0),
10109 &bitsize, &bitpos0, &offset0, &mode,
10110 &unsignedp, &reversep, &volatilep);
10111 if (TREE_CODE (base0) == INDIRECT_REF)
10112 base0 = TREE_OPERAND (base0, 0);
10113 else
10114 indirect_base0 = true;
10116 if (offset0 == NULL_TREE || integer_zerop (offset0))
10117 offset0 = TREE_OPERAND (arg0, 1);
10118 else
10119 offset0 = size_binop (PLUS_EXPR, offset0,
10120 TREE_OPERAND (arg0, 1));
10121 if (poly_int_tree_p (offset0))
10123 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
10124 TYPE_PRECISION (sizetype));
10125 tem <<= LOG2_BITS_PER_UNIT;
10126 tem += bitpos0;
10127 if (tem.to_shwi (&bitpos0))
10128 offset0 = NULL_TREE;
10132 base1 = arg1;
10133 if (TREE_CODE (arg1) == ADDR_EXPR)
10135 base1
10136 = get_inner_reference (TREE_OPERAND (arg1, 0),
10137 &bitsize, &bitpos1, &offset1, &mode,
10138 &unsignedp, &reversep, &volatilep);
10139 if (TREE_CODE (base1) == INDIRECT_REF)
10140 base1 = TREE_OPERAND (base1, 0);
10141 else
10142 indirect_base1 = true;
10144 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10146 base1 = TREE_OPERAND (arg1, 0);
10147 STRIP_SIGN_NOPS (base1);
10148 if (TREE_CODE (base1) == ADDR_EXPR)
10150 base1
10151 = get_inner_reference (TREE_OPERAND (base1, 0),
10152 &bitsize, &bitpos1, &offset1, &mode,
10153 &unsignedp, &reversep, &volatilep);
10154 if (TREE_CODE (base1) == INDIRECT_REF)
10155 base1 = TREE_OPERAND (base1, 0);
10156 else
10157 indirect_base1 = true;
10159 if (offset1 == NULL_TREE || integer_zerop (offset1))
10160 offset1 = TREE_OPERAND (arg1, 1);
10161 else
10162 offset1 = size_binop (PLUS_EXPR, offset1,
10163 TREE_OPERAND (arg1, 1));
10164 if (poly_int_tree_p (offset1))
10166 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
10167 TYPE_PRECISION (sizetype));
10168 tem <<= LOG2_BITS_PER_UNIT;
10169 tem += bitpos1;
10170 if (tem.to_shwi (&bitpos1))
10171 offset1 = NULL_TREE;
10175 /* If we have equivalent bases we might be able to simplify. */
10176 if (indirect_base0 == indirect_base1
10177 && operand_equal_p (base0, base1,
10178 indirect_base0 ? OEP_ADDRESS_OF : 0))
10180 /* We can fold this expression to a constant if the non-constant
10181 offset parts are equal. */
10182 if ((offset0 == offset1
10183 || (offset0 && offset1
10184 && operand_equal_p (offset0, offset1, 0)))
10185 && (equality_code
10186 || (indirect_base0
10187 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10188 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10190 if (!equality_code
10191 && maybe_ne (bitpos0, bitpos1)
10192 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10193 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10194 fold_overflow_warning (("assuming pointer wraparound does not "
10195 "occur when comparing P +- C1 with "
10196 "P +- C2"),
10197 WARN_STRICT_OVERFLOW_CONDITIONAL);
10199 switch (code)
10201 case EQ_EXPR:
10202 if (known_eq (bitpos0, bitpos1))
10203 return constant_boolean_node (true, type);
10204 if (known_ne (bitpos0, bitpos1))
10205 return constant_boolean_node (false, type);
10206 break;
10207 case NE_EXPR:
10208 if (known_ne (bitpos0, bitpos1))
10209 return constant_boolean_node (true, type);
10210 if (known_eq (bitpos0, bitpos1))
10211 return constant_boolean_node (false, type);
10212 break;
10213 case LT_EXPR:
10214 if (known_lt (bitpos0, bitpos1))
10215 return constant_boolean_node (true, type);
10216 if (known_ge (bitpos0, bitpos1))
10217 return constant_boolean_node (false, type);
10218 break;
10219 case LE_EXPR:
10220 if (known_le (bitpos0, bitpos1))
10221 return constant_boolean_node (true, type);
10222 if (known_gt (bitpos0, bitpos1))
10223 return constant_boolean_node (false, type);
10224 break;
10225 case GE_EXPR:
10226 if (known_ge (bitpos0, bitpos1))
10227 return constant_boolean_node (true, type);
10228 if (known_lt (bitpos0, bitpos1))
10229 return constant_boolean_node (false, type);
10230 break;
10231 case GT_EXPR:
10232 if (known_gt (bitpos0, bitpos1))
10233 return constant_boolean_node (true, type);
10234 if (known_le (bitpos0, bitpos1))
10235 return constant_boolean_node (false, type);
10236 break;
10237 default:;
10240 /* We can simplify the comparison to a comparison of the variable
10241 offset parts if the constant offset parts are equal.
10242 Be careful to use signed sizetype here because otherwise we
10243 mess with array offsets in the wrong way. This is possible
10244 because pointer arithmetic is restricted to retain within an
10245 object and overflow on pointer differences is undefined as of
10246 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10247 else if (known_eq (bitpos0, bitpos1)
10248 && (equality_code
10249 || (indirect_base0
10250 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10251 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10253 /* By converting to signed sizetype we cover middle-end pointer
10254 arithmetic which operates on unsigned pointer types of size
10255 type size and ARRAY_REF offsets which are properly sign or
10256 zero extended from their type in case it is narrower than
10257 sizetype. */
10258 if (offset0 == NULL_TREE)
10259 offset0 = build_int_cst (ssizetype, 0);
10260 else
10261 offset0 = fold_convert_loc (loc, ssizetype, offset0);
10262 if (offset1 == NULL_TREE)
10263 offset1 = build_int_cst (ssizetype, 0);
10264 else
10265 offset1 = fold_convert_loc (loc, ssizetype, offset1);
10267 if (!equality_code
10268 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10269 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10270 fold_overflow_warning (("assuming pointer wraparound does not "
10271 "occur when comparing P +- C1 with "
10272 "P +- C2"),
10273 WARN_STRICT_OVERFLOW_COMPARISON);
10275 return fold_build2_loc (loc, code, type, offset0, offset1);
10278 /* For equal offsets we can simplify to a comparison of the
10279 base addresses. */
10280 else if (known_eq (bitpos0, bitpos1)
10281 && (indirect_base0
10282 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
10283 && (indirect_base1
10284 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
10285 && ((offset0 == offset1)
10286 || (offset0 && offset1
10287 && operand_equal_p (offset0, offset1, 0))))
10289 if (indirect_base0)
10290 base0 = build_fold_addr_expr_loc (loc, base0);
10291 if (indirect_base1)
10292 base1 = build_fold_addr_expr_loc (loc, base1);
10293 return fold_build2_loc (loc, code, type, base0, base1);
10295 /* Comparison between an ordinary (non-weak) symbol and a null
10296 pointer can be eliminated since such symbols must have a non
10297 null address. In C, relational expressions between pointers
10298 to objects and null pointers are undefined. The results
10299 below follow the C++ rules with the additional property that
10300 every object pointer compares greater than a null pointer.
10302 else if (((DECL_P (base0)
10303 && maybe_nonzero_address (base0) > 0
10304 /* Avoid folding references to struct members at offset 0 to
10305 prevent tests like '&ptr->firstmember == 0' from getting
10306 eliminated. When ptr is null, although the -> expression
10307 is strictly speaking invalid, GCC retains it as a matter
10308 of QoI. See PR c/44555. */
10309 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
10310 || CONSTANT_CLASS_P (base0))
10311 && indirect_base0
10312 /* The caller guarantees that when one of the arguments is
10313 constant (i.e., null in this case) it is second. */
10314 && integer_zerop (arg1))
10316 switch (code)
10318 case EQ_EXPR:
10319 case LE_EXPR:
10320 case LT_EXPR:
10321 return constant_boolean_node (false, type);
10322 case GE_EXPR:
10323 case GT_EXPR:
10324 case NE_EXPR:
10325 return constant_boolean_node (true, type);
10326 default:
10327 gcc_unreachable ();
10332 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10333 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10334 the resulting offset is smaller in absolute value than the
10335 original one and has the same sign. */
10336 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10337 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
10338 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10339 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10340 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
10341 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
10342 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10343 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
10345 tree const1 = TREE_OPERAND (arg0, 1);
10346 tree const2 = TREE_OPERAND (arg1, 1);
10347 tree variable1 = TREE_OPERAND (arg0, 0);
10348 tree variable2 = TREE_OPERAND (arg1, 0);
10349 tree cst;
10350 const char * const warnmsg = G_("assuming signed overflow does not "
10351 "occur when combining constants around "
10352 "a comparison");
10354 /* Put the constant on the side where it doesn't overflow and is
10355 of lower absolute value and of same sign than before. */
10356 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10357 ? MINUS_EXPR : PLUS_EXPR,
10358 const2, const1);
10359 if (!TREE_OVERFLOW (cst)
10360 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
10361 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
10363 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10364 return fold_build2_loc (loc, code, type,
10365 variable1,
10366 fold_build2_loc (loc, TREE_CODE (arg1),
10367 TREE_TYPE (arg1),
10368 variable2, cst));
10371 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10372 ? MINUS_EXPR : PLUS_EXPR,
10373 const1, const2);
10374 if (!TREE_OVERFLOW (cst)
10375 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
10376 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
10378 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10379 return fold_build2_loc (loc, code, type,
10380 fold_build2_loc (loc, TREE_CODE (arg0),
10381 TREE_TYPE (arg0),
10382 variable1, cst),
10383 variable2);
10387 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
10388 if (tem)
10389 return tem;
10391 /* If we are comparing an expression that just has comparisons
10392 of two integer values, arithmetic expressions of those comparisons,
10393 and constants, we can simplify it. There are only three cases
10394 to check: the two values can either be equal, the first can be
10395 greater, or the second can be greater. Fold the expression for
10396 those three values. Since each value must be 0 or 1, we have
10397 eight possibilities, each of which corresponds to the constant 0
10398 or 1 or one of the six possible comparisons.
10400 This handles common cases like (a > b) == 0 but also handles
10401 expressions like ((x > y) - (y > x)) > 0, which supposedly
10402 occur in macroized code. */
10404 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
10406 tree cval1 = 0, cval2 = 0;
10408 if (twoval_comparison_p (arg0, &cval1, &cval2)
10409 /* Don't handle degenerate cases here; they should already
10410 have been handled anyway. */
10411 && cval1 != 0 && cval2 != 0
10412 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
10413 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
10414 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
10415 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
10416 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
10417 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
10418 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
10420 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
10421 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
10423 /* We can't just pass T to eval_subst in case cval1 or cval2
10424 was the same as ARG1. */
10426 tree high_result
10427 = fold_build2_loc (loc, code, type,
10428 eval_subst (loc, arg0, cval1, maxval,
10429 cval2, minval),
10430 arg1);
10431 tree equal_result
10432 = fold_build2_loc (loc, code, type,
10433 eval_subst (loc, arg0, cval1, maxval,
10434 cval2, maxval),
10435 arg1);
10436 tree low_result
10437 = fold_build2_loc (loc, code, type,
10438 eval_subst (loc, arg0, cval1, minval,
10439 cval2, maxval),
10440 arg1);
10442 /* All three of these results should be 0 or 1. Confirm they are.
10443 Then use those values to select the proper code to use. */
10445 if (TREE_CODE (high_result) == INTEGER_CST
10446 && TREE_CODE (equal_result) == INTEGER_CST
10447 && TREE_CODE (low_result) == INTEGER_CST)
10449 /* Make a 3-bit mask with the high-order bit being the
10450 value for `>', the next for '=', and the low for '<'. */
10451 switch ((integer_onep (high_result) * 4)
10452 + (integer_onep (equal_result) * 2)
10453 + integer_onep (low_result))
10455 case 0:
10456 /* Always false. */
10457 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10458 case 1:
10459 code = LT_EXPR;
10460 break;
10461 case 2:
10462 code = EQ_EXPR;
10463 break;
10464 case 3:
10465 code = LE_EXPR;
10466 break;
10467 case 4:
10468 code = GT_EXPR;
10469 break;
10470 case 5:
10471 code = NE_EXPR;
10472 break;
10473 case 6:
10474 code = GE_EXPR;
10475 break;
10476 case 7:
10477 /* Always true. */
10478 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10481 return fold_build2_loc (loc, code, type, cval1, cval2);
10486 return NULL_TREE;
10490 /* Subroutine of fold_binary. Optimize complex multiplications of the
10491 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10492 argument EXPR represents the expression "z" of type TYPE. */
10494 static tree
10495 fold_mult_zconjz (location_t loc, tree type, tree expr)
10497 tree itype = TREE_TYPE (type);
10498 tree rpart, ipart, tem;
10500 if (TREE_CODE (expr) == COMPLEX_EXPR)
10502 rpart = TREE_OPERAND (expr, 0);
10503 ipart = TREE_OPERAND (expr, 1);
10505 else if (TREE_CODE (expr) == COMPLEX_CST)
10507 rpart = TREE_REALPART (expr);
10508 ipart = TREE_IMAGPART (expr);
10510 else
10512 expr = save_expr (expr);
10513 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
10514 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
10517 rpart = save_expr (rpart);
10518 ipart = save_expr (ipart);
10519 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
10520 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
10521 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
10522 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
10523 build_zero_cst (itype));
10527 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10528 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10529 true if successful. */
10531 static bool
10532 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
10534 unsigned HOST_WIDE_INT i, nunits;
10536 if (TREE_CODE (arg) == VECTOR_CST
10537 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
10539 for (i = 0; i < nunits; ++i)
10540 elts[i] = VECTOR_CST_ELT (arg, i);
10542 else if (TREE_CODE (arg) == CONSTRUCTOR)
10544 constructor_elt *elt;
10546 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
10547 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
10548 return false;
10549 else
10550 elts[i] = elt->value;
10552 else
10553 return false;
10554 for (; i < nelts; i++)
10555 elts[i]
10556 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
10557 return true;
10560 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10561 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10562 NULL_TREE otherwise. */
10564 tree
10565 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
10567 unsigned int i;
10568 unsigned HOST_WIDE_INT nelts;
10569 bool need_ctor = false;
10571 if (!sel.length ().is_constant (&nelts))
10572 return NULL_TREE;
10573 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
10574 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
10575 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
10576 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
10577 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10578 return NULL_TREE;
10580 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10581 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
10582 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
10583 return NULL_TREE;
10585 tree_vector_builder out_elts (type, nelts, 1);
10586 for (i = 0; i < nelts; i++)
10588 HOST_WIDE_INT index;
10589 if (!sel[i].is_constant (&index))
10590 return NULL_TREE;
10591 if (!CONSTANT_CLASS_P (in_elts[index]))
10592 need_ctor = true;
10593 out_elts.quick_push (unshare_expr (in_elts[index]));
10596 if (need_ctor)
10598 vec<constructor_elt, va_gc> *v;
10599 vec_alloc (v, nelts);
10600 for (i = 0; i < nelts; i++)
10601 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
10602 return build_constructor (type, v);
10604 else
10605 return out_elts.build ();
10608 /* Try to fold a pointer difference of type TYPE two address expressions of
10609 array references AREF0 and AREF1 using location LOC. Return a
10610 simplified expression for the difference or NULL_TREE. */
10612 static tree
10613 fold_addr_of_array_ref_difference (location_t loc, tree type,
10614 tree aref0, tree aref1,
10615 bool use_pointer_diff)
10617 tree base0 = TREE_OPERAND (aref0, 0);
10618 tree base1 = TREE_OPERAND (aref1, 0);
10619 tree base_offset = build_int_cst (type, 0);
10621 /* If the bases are array references as well, recurse. If the bases
10622 are pointer indirections compute the difference of the pointers.
10623 If the bases are equal, we are set. */
10624 if ((TREE_CODE (base0) == ARRAY_REF
10625 && TREE_CODE (base1) == ARRAY_REF
10626 && (base_offset
10627 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
10628 use_pointer_diff)))
10629 || (INDIRECT_REF_P (base0)
10630 && INDIRECT_REF_P (base1)
10631 && (base_offset
10632 = use_pointer_diff
10633 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
10634 TREE_OPERAND (base0, 0),
10635 TREE_OPERAND (base1, 0))
10636 : fold_binary_loc (loc, MINUS_EXPR, type,
10637 fold_convert (type,
10638 TREE_OPERAND (base0, 0)),
10639 fold_convert (type,
10640 TREE_OPERAND (base1, 0)))))
10641 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
10643 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10644 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10645 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
10646 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
10647 return fold_build2_loc (loc, PLUS_EXPR, type,
10648 base_offset,
10649 fold_build2_loc (loc, MULT_EXPR, type,
10650 diff, esz));
10652 return NULL_TREE;
10655 /* If the real or vector real constant CST of type TYPE has an exact
10656 inverse, return it, else return NULL. */
10658 tree
10659 exact_inverse (tree type, tree cst)
10661 REAL_VALUE_TYPE r;
10662 tree unit_type;
10663 machine_mode mode;
10665 switch (TREE_CODE (cst))
10667 case REAL_CST:
10668 r = TREE_REAL_CST (cst);
10670 if (exact_real_inverse (TYPE_MODE (type), &r))
10671 return build_real (type, r);
10673 return NULL_TREE;
10675 case VECTOR_CST:
10677 unit_type = TREE_TYPE (type);
10678 mode = TYPE_MODE (unit_type);
10680 tree_vector_builder elts;
10681 if (!elts.new_unary_operation (type, cst, false))
10682 return NULL_TREE;
10683 unsigned int count = elts.encoded_nelts ();
10684 for (unsigned int i = 0; i < count; ++i)
10686 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10687 if (!exact_real_inverse (mode, &r))
10688 return NULL_TREE;
10689 elts.quick_push (build_real (unit_type, r));
10692 return elts.build ();
10695 default:
10696 return NULL_TREE;
10700 /* Mask out the tz least significant bits of X of type TYPE where
10701 tz is the number of trailing zeroes in Y. */
10702 static wide_int
10703 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10705 int tz = wi::ctz (y);
10706 if (tz > 0)
10707 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10708 return x;
10711 /* Return true when T is an address and is known to be nonzero.
10712 For floating point we further ensure that T is not denormal.
10713 Similar logic is present in nonzero_address in rtlanal.h.
10715 If the return value is based on the assumption that signed overflow
10716 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10717 change *STRICT_OVERFLOW_P. */
10719 static bool
10720 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10722 tree type = TREE_TYPE (t);
10723 enum tree_code code;
10725 /* Doing something useful for floating point would need more work. */
10726 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10727 return false;
10729 code = TREE_CODE (t);
10730 switch (TREE_CODE_CLASS (code))
10732 case tcc_unary:
10733 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10734 strict_overflow_p);
10735 case tcc_binary:
10736 case tcc_comparison:
10737 return tree_binary_nonzero_warnv_p (code, type,
10738 TREE_OPERAND (t, 0),
10739 TREE_OPERAND (t, 1),
10740 strict_overflow_p);
10741 case tcc_constant:
10742 case tcc_declaration:
10743 case tcc_reference:
10744 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10746 default:
10747 break;
10750 switch (code)
10752 case TRUTH_NOT_EXPR:
10753 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10754 strict_overflow_p);
10756 case TRUTH_AND_EXPR:
10757 case TRUTH_OR_EXPR:
10758 case TRUTH_XOR_EXPR:
10759 return tree_binary_nonzero_warnv_p (code, type,
10760 TREE_OPERAND (t, 0),
10761 TREE_OPERAND (t, 1),
10762 strict_overflow_p);
10764 case COND_EXPR:
10765 case CONSTRUCTOR:
10766 case OBJ_TYPE_REF:
10767 case ADDR_EXPR:
10768 case WITH_SIZE_EXPR:
10769 case SSA_NAME:
10770 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10772 case COMPOUND_EXPR:
10773 case MODIFY_EXPR:
10774 case BIND_EXPR:
10775 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10776 strict_overflow_p);
10778 case SAVE_EXPR:
10779 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10780 strict_overflow_p);
10782 case CALL_EXPR:
10784 tree fndecl = get_callee_fndecl (t);
10785 if (!fndecl) return false;
10786 if (flag_delete_null_pointer_checks && !flag_check_new
10787 && DECL_IS_OPERATOR_NEW_P (fndecl)
10788 && !TREE_NOTHROW (fndecl))
10789 return true;
10790 if (flag_delete_null_pointer_checks
10791 && lookup_attribute ("returns_nonnull",
10792 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10793 return true;
10794 return alloca_call_p (t);
10797 default:
10798 break;
10800 return false;
10803 /* Return true when T is an address and is known to be nonzero.
10804 Handle warnings about undefined signed overflow. */
10806 bool
10807 tree_expr_nonzero_p (tree t)
10809 bool ret, strict_overflow_p;
10811 strict_overflow_p = false;
10812 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10813 if (strict_overflow_p)
10814 fold_overflow_warning (("assuming signed overflow does not occur when "
10815 "determining that expression is always "
10816 "non-zero"),
10817 WARN_STRICT_OVERFLOW_MISC);
10818 return ret;
10821 /* Return true if T is known not to be equal to an integer W. */
10823 bool
10824 expr_not_equal_to (tree t, const wide_int &w)
10826 int_range_max vr;
10827 switch (TREE_CODE (t))
10829 case INTEGER_CST:
10830 return wi::to_wide (t) != w;
10832 case SSA_NAME:
10833 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10834 return false;
10836 if (cfun)
10837 get_range_query (cfun)->range_of_expr (vr, t);
10838 else
10839 get_global_range_query ()->range_of_expr (vr, t);
10841 if (!vr.undefined_p ()
10842 && !vr.contains_p (wide_int_to_tree (TREE_TYPE (t), w)))
10843 return true;
10844 /* If T has some known zero bits and W has any of those bits set,
10845 then T is known not to be equal to W. */
10846 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10847 TYPE_PRECISION (TREE_TYPE (t))), 0))
10848 return true;
10849 return false;
10851 default:
10852 return false;
10856 /* Fold a binary expression of code CODE and type TYPE with operands
10857 OP0 and OP1. LOC is the location of the resulting expression.
10858 Return the folded expression if folding is successful. Otherwise,
10859 return NULL_TREE. */
10861 tree
10862 fold_binary_loc (location_t loc, enum tree_code code, tree type,
10863 tree op0, tree op1)
10865 enum tree_code_class kind = TREE_CODE_CLASS (code);
10866 tree arg0, arg1, tem;
10867 tree t1 = NULL_TREE;
10868 bool strict_overflow_p;
10869 unsigned int prec;
10871 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10872 && TREE_CODE_LENGTH (code) == 2
10873 && op0 != NULL_TREE
10874 && op1 != NULL_TREE);
10876 arg0 = op0;
10877 arg1 = op1;
10879 /* Strip any conversions that don't change the mode. This is
10880 safe for every expression, except for a comparison expression
10881 because its signedness is derived from its operands. So, in
10882 the latter case, only strip conversions that don't change the
10883 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10884 preserved.
10886 Note that this is done as an internal manipulation within the
10887 constant folder, in order to find the simplest representation
10888 of the arguments so that their form can be studied. In any
10889 cases, the appropriate type conversions should be put back in
10890 the tree that will get out of the constant folder. */
10892 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10894 STRIP_SIGN_NOPS (arg0);
10895 STRIP_SIGN_NOPS (arg1);
10897 else
10899 STRIP_NOPS (arg0);
10900 STRIP_NOPS (arg1);
10903 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10904 constant but we can't do arithmetic on them. */
10905 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
10907 tem = const_binop (code, type, arg0, arg1);
10908 if (tem != NULL_TREE)
10910 if (TREE_TYPE (tem) != type)
10911 tem = fold_convert_loc (loc, type, tem);
10912 return tem;
10916 /* If this is a commutative operation, and ARG0 is a constant, move it
10917 to ARG1 to reduce the number of tests below. */
10918 if (commutative_tree_code (code)
10919 && tree_swap_operands_p (arg0, arg1))
10920 return fold_build2_loc (loc, code, type, op1, op0);
10922 /* Likewise if this is a comparison, and ARG0 is a constant, move it
10923 to ARG1 to reduce the number of tests below. */
10924 if (kind == tcc_comparison
10925 && tree_swap_operands_p (arg0, arg1))
10926 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
10928 tem = generic_simplify (loc, code, type, op0, op1);
10929 if (tem)
10930 return tem;
10932 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10934 First check for cases where an arithmetic operation is applied to a
10935 compound, conditional, or comparison operation. Push the arithmetic
10936 operation inside the compound or conditional to see if any folding
10937 can then be done. Convert comparison to conditional for this purpose.
10938 The also optimizes non-constant cases that used to be done in
10939 expand_expr.
10941 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10942 one of the operands is a comparison and the other is a comparison, a
10943 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10944 code below would make the expression more complex. Change it to a
10945 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10946 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10948 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10949 || code == EQ_EXPR || code == NE_EXPR)
10950 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
10951 && ((truth_value_p (TREE_CODE (arg0))
10952 && (truth_value_p (TREE_CODE (arg1))
10953 || (TREE_CODE (arg1) == BIT_AND_EXPR
10954 && integer_onep (TREE_OPERAND (arg1, 1)))))
10955 || (truth_value_p (TREE_CODE (arg1))
10956 && (truth_value_p (TREE_CODE (arg0))
10957 || (TREE_CODE (arg0) == BIT_AND_EXPR
10958 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10960 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10961 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10962 : TRUTH_XOR_EXPR,
10963 boolean_type_node,
10964 fold_convert_loc (loc, boolean_type_node, arg0),
10965 fold_convert_loc (loc, boolean_type_node, arg1));
10967 if (code == EQ_EXPR)
10968 tem = invert_truthvalue_loc (loc, tem);
10970 return fold_convert_loc (loc, type, tem);
10973 if (TREE_CODE_CLASS (code) == tcc_binary
10974 || TREE_CODE_CLASS (code) == tcc_comparison)
10976 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10978 tem = fold_build2_loc (loc, code, type,
10979 fold_convert_loc (loc, TREE_TYPE (op0),
10980 TREE_OPERAND (arg0, 1)), op1);
10981 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10982 tem);
10984 if (TREE_CODE (arg1) == COMPOUND_EXPR)
10986 tem = fold_build2_loc (loc, code, type, op0,
10987 fold_convert_loc (loc, TREE_TYPE (op1),
10988 TREE_OPERAND (arg1, 1)));
10989 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10990 tem);
10993 if (TREE_CODE (arg0) == COND_EXPR
10994 || TREE_CODE (arg0) == VEC_COND_EXPR
10995 || COMPARISON_CLASS_P (arg0))
10997 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10998 arg0, arg1,
10999 /*cond_first_p=*/1);
11000 if (tem != NULL_TREE)
11001 return tem;
11004 if (TREE_CODE (arg1) == COND_EXPR
11005 || TREE_CODE (arg1) == VEC_COND_EXPR
11006 || COMPARISON_CLASS_P (arg1))
11008 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
11009 arg1, arg0,
11010 /*cond_first_p=*/0);
11011 if (tem != NULL_TREE)
11012 return tem;
11016 switch (code)
11018 case MEM_REF:
11019 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
11020 if (TREE_CODE (arg0) == ADDR_EXPR
11021 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
11023 tree iref = TREE_OPERAND (arg0, 0);
11024 return fold_build2 (MEM_REF, type,
11025 TREE_OPERAND (iref, 0),
11026 int_const_binop (PLUS_EXPR, arg1,
11027 TREE_OPERAND (iref, 1)));
11030 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
11031 if (TREE_CODE (arg0) == ADDR_EXPR
11032 && handled_component_p (TREE_OPERAND (arg0, 0)))
11034 tree base;
11035 poly_int64 coffset;
11036 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
11037 &coffset);
11038 if (!base)
11039 return NULL_TREE;
11040 return fold_build2 (MEM_REF, type,
11041 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
11042 int_const_binop (PLUS_EXPR, arg1,
11043 size_int (coffset)));
11046 return NULL_TREE;
11048 case POINTER_PLUS_EXPR:
11049 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
11050 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11051 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
11052 return fold_convert_loc (loc, type,
11053 fold_build2_loc (loc, PLUS_EXPR, sizetype,
11054 fold_convert_loc (loc, sizetype,
11055 arg1),
11056 fold_convert_loc (loc, sizetype,
11057 arg0)));
11059 return NULL_TREE;
11061 case PLUS_EXPR:
11062 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
11064 /* X + (X / CST) * -CST is X % CST. */
11065 if (TREE_CODE (arg1) == MULT_EXPR
11066 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
11067 && operand_equal_p (arg0,
11068 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
11070 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
11071 tree cst1 = TREE_OPERAND (arg1, 1);
11072 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
11073 cst1, cst0);
11074 if (sum && integer_zerop (sum))
11075 return fold_convert_loc (loc, type,
11076 fold_build2_loc (loc, TRUNC_MOD_EXPR,
11077 TREE_TYPE (arg0), arg0,
11078 cst0));
11082 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
11083 one. Make sure the type is not saturating and has the signedness of
11084 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11085 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11086 if ((TREE_CODE (arg0) == MULT_EXPR
11087 || TREE_CODE (arg1) == MULT_EXPR)
11088 && !TYPE_SATURATING (type)
11089 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11090 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11091 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11093 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11094 if (tem)
11095 return tem;
11098 if (! FLOAT_TYPE_P (type))
11100 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
11101 (plus (plus (mult) (mult)) (foo)) so that we can
11102 take advantage of the factoring cases below. */
11103 if (ANY_INTEGRAL_TYPE_P (type)
11104 && TYPE_OVERFLOW_WRAPS (type)
11105 && (((TREE_CODE (arg0) == PLUS_EXPR
11106 || TREE_CODE (arg0) == MINUS_EXPR)
11107 && TREE_CODE (arg1) == MULT_EXPR)
11108 || ((TREE_CODE (arg1) == PLUS_EXPR
11109 || TREE_CODE (arg1) == MINUS_EXPR)
11110 && TREE_CODE (arg0) == MULT_EXPR)))
11112 tree parg0, parg1, parg, marg;
11113 enum tree_code pcode;
11115 if (TREE_CODE (arg1) == MULT_EXPR)
11116 parg = arg0, marg = arg1;
11117 else
11118 parg = arg1, marg = arg0;
11119 pcode = TREE_CODE (parg);
11120 parg0 = TREE_OPERAND (parg, 0);
11121 parg1 = TREE_OPERAND (parg, 1);
11122 STRIP_NOPS (parg0);
11123 STRIP_NOPS (parg1);
11125 if (TREE_CODE (parg0) == MULT_EXPR
11126 && TREE_CODE (parg1) != MULT_EXPR)
11127 return fold_build2_loc (loc, pcode, type,
11128 fold_build2_loc (loc, PLUS_EXPR, type,
11129 fold_convert_loc (loc, type,
11130 parg0),
11131 fold_convert_loc (loc, type,
11132 marg)),
11133 fold_convert_loc (loc, type, parg1));
11134 if (TREE_CODE (parg0) != MULT_EXPR
11135 && TREE_CODE (parg1) == MULT_EXPR)
11136 return
11137 fold_build2_loc (loc, PLUS_EXPR, type,
11138 fold_convert_loc (loc, type, parg0),
11139 fold_build2_loc (loc, pcode, type,
11140 fold_convert_loc (loc, type, marg),
11141 fold_convert_loc (loc, type,
11142 parg1)));
11145 else
11147 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
11148 to __complex__ ( x, y ). This is not the same for SNaNs or
11149 if signed zeros are involved. */
11150 if (!HONOR_SNANS (arg0)
11151 && !HONOR_SIGNED_ZEROS (arg0)
11152 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11154 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11155 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11156 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11157 bool arg0rz = false, arg0iz = false;
11158 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11159 || (arg0i && (arg0iz = real_zerop (arg0i))))
11161 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11162 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11163 if (arg0rz && arg1i && real_zerop (arg1i))
11165 tree rp = arg1r ? arg1r
11166 : build1 (REALPART_EXPR, rtype, arg1);
11167 tree ip = arg0i ? arg0i
11168 : build1 (IMAGPART_EXPR, rtype, arg0);
11169 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11171 else if (arg0iz && arg1r && real_zerop (arg1r))
11173 tree rp = arg0r ? arg0r
11174 : build1 (REALPART_EXPR, rtype, arg0);
11175 tree ip = arg1i ? arg1i
11176 : build1 (IMAGPART_EXPR, rtype, arg1);
11177 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11182 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11183 We associate floats only if the user has specified
11184 -fassociative-math. */
11185 if (flag_associative_math
11186 && TREE_CODE (arg1) == PLUS_EXPR
11187 && TREE_CODE (arg0) != MULT_EXPR)
11189 tree tree10 = TREE_OPERAND (arg1, 0);
11190 tree tree11 = TREE_OPERAND (arg1, 1);
11191 if (TREE_CODE (tree11) == MULT_EXPR
11192 && TREE_CODE (tree10) == MULT_EXPR)
11194 tree tree0;
11195 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
11196 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
11199 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11200 We associate floats only if the user has specified
11201 -fassociative-math. */
11202 if (flag_associative_math
11203 && TREE_CODE (arg0) == PLUS_EXPR
11204 && TREE_CODE (arg1) != MULT_EXPR)
11206 tree tree00 = TREE_OPERAND (arg0, 0);
11207 tree tree01 = TREE_OPERAND (arg0, 1);
11208 if (TREE_CODE (tree01) == MULT_EXPR
11209 && TREE_CODE (tree00) == MULT_EXPR)
11211 tree tree0;
11212 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
11213 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
11218 bit_rotate:
11219 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11220 is a rotate of A by C1 bits. */
11221 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11222 is a rotate of A by B bits.
11223 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11224 though in this case CODE must be | and not + or ^, otherwise
11225 it doesn't return A when B is 0. */
11227 enum tree_code code0, code1;
11228 tree rtype;
11229 code0 = TREE_CODE (arg0);
11230 code1 = TREE_CODE (arg1);
11231 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
11232 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
11233 && operand_equal_p (TREE_OPERAND (arg0, 0),
11234 TREE_OPERAND (arg1, 0), 0)
11235 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
11236 TYPE_UNSIGNED (rtype))
11237 /* Only create rotates in complete modes. Other cases are not
11238 expanded properly. */
11239 && (element_precision (rtype)
11240 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
11242 tree tree01, tree11;
11243 tree orig_tree01, orig_tree11;
11244 enum tree_code code01, code11;
11246 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
11247 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
11248 STRIP_NOPS (tree01);
11249 STRIP_NOPS (tree11);
11250 code01 = TREE_CODE (tree01);
11251 code11 = TREE_CODE (tree11);
11252 if (code11 != MINUS_EXPR
11253 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
11255 std::swap (code0, code1);
11256 std::swap (code01, code11);
11257 std::swap (tree01, tree11);
11258 std::swap (orig_tree01, orig_tree11);
11260 if (code01 == INTEGER_CST
11261 && code11 == INTEGER_CST
11262 && (wi::to_widest (tree01) + wi::to_widest (tree11)
11263 == element_precision (rtype)))
11265 tem = build2_loc (loc, LROTATE_EXPR,
11266 rtype, TREE_OPERAND (arg0, 0),
11267 code0 == LSHIFT_EXPR
11268 ? orig_tree01 : orig_tree11);
11269 return fold_convert_loc (loc, type, tem);
11271 else if (code11 == MINUS_EXPR)
11273 tree tree110, tree111;
11274 tree110 = TREE_OPERAND (tree11, 0);
11275 tree111 = TREE_OPERAND (tree11, 1);
11276 STRIP_NOPS (tree110);
11277 STRIP_NOPS (tree111);
11278 if (TREE_CODE (tree110) == INTEGER_CST
11279 && compare_tree_int (tree110,
11280 element_precision (rtype)) == 0
11281 && operand_equal_p (tree01, tree111, 0))
11283 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11284 ? LROTATE_EXPR : RROTATE_EXPR),
11285 rtype, TREE_OPERAND (arg0, 0),
11286 orig_tree01);
11287 return fold_convert_loc (loc, type, tem);
11290 else if (code == BIT_IOR_EXPR
11291 && code11 == BIT_AND_EXPR
11292 && pow2p_hwi (element_precision (rtype)))
11294 tree tree110, tree111;
11295 tree110 = TREE_OPERAND (tree11, 0);
11296 tree111 = TREE_OPERAND (tree11, 1);
11297 STRIP_NOPS (tree110);
11298 STRIP_NOPS (tree111);
11299 if (TREE_CODE (tree110) == NEGATE_EXPR
11300 && TREE_CODE (tree111) == INTEGER_CST
11301 && compare_tree_int (tree111,
11302 element_precision (rtype) - 1) == 0
11303 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
11305 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11306 ? LROTATE_EXPR : RROTATE_EXPR),
11307 rtype, TREE_OPERAND (arg0, 0),
11308 orig_tree01);
11309 return fold_convert_loc (loc, type, tem);
11315 associate:
11316 /* In most languages, can't associate operations on floats through
11317 parentheses. Rather than remember where the parentheses were, we
11318 don't associate floats at all, unless the user has specified
11319 -fassociative-math.
11320 And, we need to make sure type is not saturating. */
11322 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
11323 && !TYPE_SATURATING (type)
11324 && !TYPE_OVERFLOW_SANITIZED (type))
11326 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
11327 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
11328 tree atype = type;
11329 bool ok = true;
11331 /* Split both trees into variables, constants, and literals. Then
11332 associate each group together, the constants with literals,
11333 then the result with variables. This increases the chances of
11334 literals being recombined later and of generating relocatable
11335 expressions for the sum of a constant and literal. */
11336 var0 = split_tree (arg0, type, code,
11337 &minus_var0, &con0, &minus_con0,
11338 &lit0, &minus_lit0, 0);
11339 var1 = split_tree (arg1, type, code,
11340 &minus_var1, &con1, &minus_con1,
11341 &lit1, &minus_lit1, code == MINUS_EXPR);
11343 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11344 if (code == MINUS_EXPR)
11345 code = PLUS_EXPR;
11347 /* With undefined overflow prefer doing association in a type
11348 which wraps on overflow, if that is one of the operand types. */
11349 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
11350 && !TYPE_OVERFLOW_WRAPS (type))
11352 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11353 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11354 atype = TREE_TYPE (arg0);
11355 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11356 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
11357 atype = TREE_TYPE (arg1);
11358 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
11361 /* With undefined overflow we can only associate constants with one
11362 variable, and constants whose association doesn't overflow. */
11363 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
11364 && !TYPE_OVERFLOW_WRAPS (atype))
11366 if ((var0 && var1) || (minus_var0 && minus_var1))
11368 /* ??? If split_tree would handle NEGATE_EXPR we could
11369 simply reject these cases and the allowed cases would
11370 be the var0/minus_var1 ones. */
11371 tree tmp0 = var0 ? var0 : minus_var0;
11372 tree tmp1 = var1 ? var1 : minus_var1;
11373 bool one_neg = false;
11375 if (TREE_CODE (tmp0) == NEGATE_EXPR)
11377 tmp0 = TREE_OPERAND (tmp0, 0);
11378 one_neg = !one_neg;
11380 if (CONVERT_EXPR_P (tmp0)
11381 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11382 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11383 <= TYPE_PRECISION (atype)))
11384 tmp0 = TREE_OPERAND (tmp0, 0);
11385 if (TREE_CODE (tmp1) == NEGATE_EXPR)
11387 tmp1 = TREE_OPERAND (tmp1, 0);
11388 one_neg = !one_neg;
11390 if (CONVERT_EXPR_P (tmp1)
11391 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11392 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11393 <= TYPE_PRECISION (atype)))
11394 tmp1 = TREE_OPERAND (tmp1, 0);
11395 /* The only case we can still associate with two variables
11396 is if they cancel out. */
11397 if (!one_neg
11398 || !operand_equal_p (tmp0, tmp1, 0))
11399 ok = false;
11401 else if ((var0 && minus_var1
11402 && ! operand_equal_p (var0, minus_var1, 0))
11403 || (minus_var0 && var1
11404 && ! operand_equal_p (minus_var0, var1, 0)))
11405 ok = false;
11408 /* Only do something if we found more than two objects. Otherwise,
11409 nothing has changed and we risk infinite recursion. */
11410 if (ok
11411 && ((var0 != 0) + (var1 != 0)
11412 + (minus_var0 != 0) + (minus_var1 != 0)
11413 + (con0 != 0) + (con1 != 0)
11414 + (minus_con0 != 0) + (minus_con1 != 0)
11415 + (lit0 != 0) + (lit1 != 0)
11416 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
11418 var0 = associate_trees (loc, var0, var1, code, atype);
11419 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
11420 code, atype);
11421 con0 = associate_trees (loc, con0, con1, code, atype);
11422 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
11423 code, atype);
11424 lit0 = associate_trees (loc, lit0, lit1, code, atype);
11425 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
11426 code, atype);
11428 if (minus_var0 && var0)
11430 var0 = associate_trees (loc, var0, minus_var0,
11431 MINUS_EXPR, atype);
11432 minus_var0 = 0;
11434 if (minus_con0 && con0)
11436 con0 = associate_trees (loc, con0, minus_con0,
11437 MINUS_EXPR, atype);
11438 minus_con0 = 0;
11441 /* Preserve the MINUS_EXPR if the negative part of the literal is
11442 greater than the positive part. Otherwise, the multiplicative
11443 folding code (i.e extract_muldiv) may be fooled in case
11444 unsigned constants are subtracted, like in the following
11445 example: ((X*2 + 4) - 8U)/2. */
11446 if (minus_lit0 && lit0)
11448 if (TREE_CODE (lit0) == INTEGER_CST
11449 && TREE_CODE (minus_lit0) == INTEGER_CST
11450 && tree_int_cst_lt (lit0, minus_lit0)
11451 /* But avoid ending up with only negated parts. */
11452 && (var0 || con0))
11454 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
11455 MINUS_EXPR, atype);
11456 lit0 = 0;
11458 else
11460 lit0 = associate_trees (loc, lit0, minus_lit0,
11461 MINUS_EXPR, atype);
11462 minus_lit0 = 0;
11466 /* Don't introduce overflows through reassociation. */
11467 if ((lit0 && TREE_OVERFLOW_P (lit0))
11468 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
11469 return NULL_TREE;
11471 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11472 con0 = associate_trees (loc, con0, lit0, code, atype);
11473 lit0 = 0;
11474 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
11475 code, atype);
11476 minus_lit0 = 0;
11478 /* Eliminate minus_con0. */
11479 if (minus_con0)
11481 if (con0)
11482 con0 = associate_trees (loc, con0, minus_con0,
11483 MINUS_EXPR, atype);
11484 else if (var0)
11485 var0 = associate_trees (loc, var0, minus_con0,
11486 MINUS_EXPR, atype);
11487 else
11488 gcc_unreachable ();
11489 minus_con0 = 0;
11492 /* Eliminate minus_var0. */
11493 if (minus_var0)
11495 if (con0)
11496 con0 = associate_trees (loc, con0, minus_var0,
11497 MINUS_EXPR, atype);
11498 else
11499 gcc_unreachable ();
11500 minus_var0 = 0;
11503 return
11504 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
11505 code, atype));
11509 return NULL_TREE;
11511 case POINTER_DIFF_EXPR:
11512 case MINUS_EXPR:
11513 /* Fold &a[i] - &a[j] to i-j. */
11514 if (TREE_CODE (arg0) == ADDR_EXPR
11515 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11516 && TREE_CODE (arg1) == ADDR_EXPR
11517 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11519 tree tem = fold_addr_of_array_ref_difference (loc, type,
11520 TREE_OPERAND (arg0, 0),
11521 TREE_OPERAND (arg1, 0),
11522 code
11523 == POINTER_DIFF_EXPR);
11524 if (tem)
11525 return tem;
11528 /* Further transformations are not for pointers. */
11529 if (code == POINTER_DIFF_EXPR)
11530 return NULL_TREE;
11532 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11533 if (TREE_CODE (arg0) == NEGATE_EXPR
11534 && negate_expr_p (op1)
11535 /* If arg0 is e.g. unsigned int and type is int, then this could
11536 introduce UB, because if A is INT_MIN at runtime, the original
11537 expression can be well defined while the latter is not.
11538 See PR83269. */
11539 && !(ANY_INTEGRAL_TYPE_P (type)
11540 && TYPE_OVERFLOW_UNDEFINED (type)
11541 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11542 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
11543 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
11544 fold_convert_loc (loc, type,
11545 TREE_OPERAND (arg0, 0)));
11547 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11548 __complex__ ( x, -y ). This is not the same for SNaNs or if
11549 signed zeros are involved. */
11550 if (!HONOR_SNANS (arg0)
11551 && !HONOR_SIGNED_ZEROS (arg0)
11552 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11554 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11555 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11556 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11557 bool arg0rz = false, arg0iz = false;
11558 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11559 || (arg0i && (arg0iz = real_zerop (arg0i))))
11561 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11562 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11563 if (arg0rz && arg1i && real_zerop (arg1i))
11565 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11566 arg1r ? arg1r
11567 : build1 (REALPART_EXPR, rtype, arg1));
11568 tree ip = arg0i ? arg0i
11569 : build1 (IMAGPART_EXPR, rtype, arg0);
11570 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11572 else if (arg0iz && arg1r && real_zerop (arg1r))
11574 tree rp = arg0r ? arg0r
11575 : build1 (REALPART_EXPR, rtype, arg0);
11576 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11577 arg1i ? arg1i
11578 : build1 (IMAGPART_EXPR, rtype, arg1));
11579 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11584 /* A - B -> A + (-B) if B is easily negatable. */
11585 if (negate_expr_p (op1)
11586 && ! TYPE_OVERFLOW_SANITIZED (type)
11587 && ((FLOAT_TYPE_P (type)
11588 /* Avoid this transformation if B is a positive REAL_CST. */
11589 && (TREE_CODE (op1) != REAL_CST
11590 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
11591 || INTEGRAL_TYPE_P (type)))
11592 return fold_build2_loc (loc, PLUS_EXPR, type,
11593 fold_convert_loc (loc, type, arg0),
11594 negate_expr (op1));
11596 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11597 one. Make sure the type is not saturating and has the signedness of
11598 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11599 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11600 if ((TREE_CODE (arg0) == MULT_EXPR
11601 || TREE_CODE (arg1) == MULT_EXPR)
11602 && !TYPE_SATURATING (type)
11603 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11604 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11605 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11607 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11608 if (tem)
11609 return tem;
11612 goto associate;
11614 case MULT_EXPR:
11615 if (! FLOAT_TYPE_P (type))
11617 /* Transform x * -C into -x * C if x is easily negatable. */
11618 if (TREE_CODE (op1) == INTEGER_CST
11619 && tree_int_cst_sgn (op1) == -1
11620 && negate_expr_p (op0)
11621 && negate_expr_p (op1)
11622 && (tem = negate_expr (op1)) != op1
11623 && ! TREE_OVERFLOW (tem))
11624 return fold_build2_loc (loc, MULT_EXPR, type,
11625 fold_convert_loc (loc, type,
11626 negate_expr (op0)), tem);
11628 strict_overflow_p = false;
11629 if (TREE_CODE (arg1) == INTEGER_CST
11630 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11631 &strict_overflow_p)) != 0)
11633 if (strict_overflow_p)
11634 fold_overflow_warning (("assuming signed overflow does not "
11635 "occur when simplifying "
11636 "multiplication"),
11637 WARN_STRICT_OVERFLOW_MISC);
11638 return fold_convert_loc (loc, type, tem);
11641 /* Optimize z * conj(z) for integer complex numbers. */
11642 if (TREE_CODE (arg0) == CONJ_EXPR
11643 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11644 return fold_mult_zconjz (loc, type, arg1);
11645 if (TREE_CODE (arg1) == CONJ_EXPR
11646 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11647 return fold_mult_zconjz (loc, type, arg0);
11649 else
11651 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11652 This is not the same for NaNs or if signed zeros are
11653 involved. */
11654 if (!HONOR_NANS (arg0)
11655 && !HONOR_SIGNED_ZEROS (arg0)
11656 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11657 && TREE_CODE (arg1) == COMPLEX_CST
11658 && real_zerop (TREE_REALPART (arg1)))
11660 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11661 if (real_onep (TREE_IMAGPART (arg1)))
11662 return
11663 fold_build2_loc (loc, COMPLEX_EXPR, type,
11664 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11665 rtype, arg0)),
11666 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11667 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11668 return
11669 fold_build2_loc (loc, COMPLEX_EXPR, type,
11670 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11671 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11672 rtype, arg0)));
11675 /* Optimize z * conj(z) for floating point complex numbers.
11676 Guarded by flag_unsafe_math_optimizations as non-finite
11677 imaginary components don't produce scalar results. */
11678 if (flag_unsafe_math_optimizations
11679 && TREE_CODE (arg0) == CONJ_EXPR
11680 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11681 return fold_mult_zconjz (loc, type, arg1);
11682 if (flag_unsafe_math_optimizations
11683 && TREE_CODE (arg1) == CONJ_EXPR
11684 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11685 return fold_mult_zconjz (loc, type, arg0);
11687 goto associate;
11689 case BIT_IOR_EXPR:
11690 /* Canonicalize (X & C1) | C2. */
11691 if (TREE_CODE (arg0) == BIT_AND_EXPR
11692 && TREE_CODE (arg1) == INTEGER_CST
11693 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11695 int width = TYPE_PRECISION (type), w;
11696 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
11697 wide_int c2 = wi::to_wide (arg1);
11699 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11700 if ((c1 & c2) == c1)
11701 return omit_one_operand_loc (loc, type, arg1,
11702 TREE_OPERAND (arg0, 0));
11704 wide_int msk = wi::mask (width, false,
11705 TYPE_PRECISION (TREE_TYPE (arg1)));
11707 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11708 if (wi::bit_and_not (msk, c1 | c2) == 0)
11710 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11711 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11714 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11715 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11716 mode which allows further optimizations. */
11717 c1 &= msk;
11718 c2 &= msk;
11719 wide_int c3 = wi::bit_and_not (c1, c2);
11720 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11722 wide_int mask = wi::mask (w, false,
11723 TYPE_PRECISION (type));
11724 if (((c1 | c2) & mask) == mask
11725 && wi::bit_and_not (c1, mask) == 0)
11727 c3 = mask;
11728 break;
11732 if (c3 != c1)
11734 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11735 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
11736 wide_int_to_tree (type, c3));
11737 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11741 /* See if this can be simplified into a rotate first. If that
11742 is unsuccessful continue in the association code. */
11743 goto bit_rotate;
11745 case BIT_XOR_EXPR:
11746 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11747 if (TREE_CODE (arg0) == BIT_AND_EXPR
11748 && INTEGRAL_TYPE_P (type)
11749 && integer_onep (TREE_OPERAND (arg0, 1))
11750 && integer_onep (arg1))
11751 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11752 build_zero_cst (TREE_TYPE (arg0)));
11754 /* See if this can be simplified into a rotate first. If that
11755 is unsuccessful continue in the association code. */
11756 goto bit_rotate;
11758 case BIT_AND_EXPR:
11759 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11760 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11761 && INTEGRAL_TYPE_P (type)
11762 && integer_onep (TREE_OPERAND (arg0, 1))
11763 && integer_onep (arg1))
11765 tree tem2;
11766 tem = TREE_OPERAND (arg0, 0);
11767 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11768 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11769 tem, tem2);
11770 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11771 build_zero_cst (TREE_TYPE (tem)));
11773 /* Fold ~X & 1 as (X & 1) == 0. */
11774 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11775 && INTEGRAL_TYPE_P (type)
11776 && integer_onep (arg1))
11778 tree tem2;
11779 tem = TREE_OPERAND (arg0, 0);
11780 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11781 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11782 tem, tem2);
11783 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11784 build_zero_cst (TREE_TYPE (tem)));
11786 /* Fold !X & 1 as X == 0. */
11787 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11788 && integer_onep (arg1))
11790 tem = TREE_OPERAND (arg0, 0);
11791 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11792 build_zero_cst (TREE_TYPE (tem)));
11795 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11796 multiple of 1 << CST. */
11797 if (TREE_CODE (arg1) == INTEGER_CST)
11799 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11800 wide_int ncst1 = -cst1;
11801 if ((cst1 & ncst1) == ncst1
11802 && multiple_of_p (type, arg0,
11803 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11804 return fold_convert_loc (loc, type, arg0);
11807 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11808 bits from CST2. */
11809 if (TREE_CODE (arg1) == INTEGER_CST
11810 && TREE_CODE (arg0) == MULT_EXPR
11811 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11813 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11814 wide_int masked
11815 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11817 if (masked == 0)
11818 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11819 arg0, arg1);
11820 else if (masked != warg1)
11822 /* Avoid the transform if arg1 is a mask of some
11823 mode which allows further optimizations. */
11824 int pop = wi::popcount (warg1);
11825 if (!(pop >= BITS_PER_UNIT
11826 && pow2p_hwi (pop)
11827 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11828 return fold_build2_loc (loc, code, type, op0,
11829 wide_int_to_tree (type, masked));
11833 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11834 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11835 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11837 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11839 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11840 if (mask == -1)
11841 return
11842 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11845 goto associate;
11847 case RDIV_EXPR:
11848 /* Don't touch a floating-point divide by zero unless the mode
11849 of the constant can represent infinity. */
11850 if (TREE_CODE (arg1) == REAL_CST
11851 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11852 && real_zerop (arg1))
11853 return NULL_TREE;
11855 /* (-A) / (-B) -> A / B */
11856 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11857 return fold_build2_loc (loc, RDIV_EXPR, type,
11858 TREE_OPERAND (arg0, 0),
11859 negate_expr (arg1));
11860 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11861 return fold_build2_loc (loc, RDIV_EXPR, type,
11862 negate_expr (arg0),
11863 TREE_OPERAND (arg1, 0));
11864 return NULL_TREE;
11866 case TRUNC_DIV_EXPR:
11867 /* Fall through */
11869 case FLOOR_DIV_EXPR:
11870 /* Simplify A / (B << N) where A and B are positive and B is
11871 a power of 2, to A >> (N + log2(B)). */
11872 strict_overflow_p = false;
11873 if (TREE_CODE (arg1) == LSHIFT_EXPR
11874 && (TYPE_UNSIGNED (type)
11875 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11877 tree sval = TREE_OPERAND (arg1, 0);
11878 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11880 tree sh_cnt = TREE_OPERAND (arg1, 1);
11881 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11882 wi::exact_log2 (wi::to_wide (sval)));
11884 if (strict_overflow_p)
11885 fold_overflow_warning (("assuming signed overflow does not "
11886 "occur when simplifying A / (B << N)"),
11887 WARN_STRICT_OVERFLOW_MISC);
11889 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11890 sh_cnt, pow2);
11891 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11892 fold_convert_loc (loc, type, arg0), sh_cnt);
11896 /* Fall through */
11898 case ROUND_DIV_EXPR:
11899 case CEIL_DIV_EXPR:
11900 case EXACT_DIV_EXPR:
11901 if (integer_zerop (arg1))
11902 return NULL_TREE;
11904 /* Convert -A / -B to A / B when the type is signed and overflow is
11905 undefined. */
11906 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11907 && TREE_CODE (op0) == NEGATE_EXPR
11908 && negate_expr_p (op1))
11910 if (ANY_INTEGRAL_TYPE_P (type))
11911 fold_overflow_warning (("assuming signed overflow does not occur "
11912 "when distributing negation across "
11913 "division"),
11914 WARN_STRICT_OVERFLOW_MISC);
11915 return fold_build2_loc (loc, code, type,
11916 fold_convert_loc (loc, type,
11917 TREE_OPERAND (arg0, 0)),
11918 negate_expr (op1));
11920 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11921 && TREE_CODE (arg1) == NEGATE_EXPR
11922 && negate_expr_p (op0))
11924 if (ANY_INTEGRAL_TYPE_P (type))
11925 fold_overflow_warning (("assuming signed overflow does not occur "
11926 "when distributing negation across "
11927 "division"),
11928 WARN_STRICT_OVERFLOW_MISC);
11929 return fold_build2_loc (loc, code, type,
11930 negate_expr (op0),
11931 fold_convert_loc (loc, type,
11932 TREE_OPERAND (arg1, 0)));
11935 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11936 operation, EXACT_DIV_EXPR.
11938 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11939 At one time others generated faster code, it's not clear if they do
11940 after the last round to changes to the DIV code in expmed.cc. */
11941 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11942 && multiple_of_p (type, arg0, arg1))
11943 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
11944 fold_convert (type, arg0),
11945 fold_convert (type, arg1));
11947 strict_overflow_p = false;
11948 if (TREE_CODE (arg1) == INTEGER_CST
11949 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11950 &strict_overflow_p)) != 0)
11952 if (strict_overflow_p)
11953 fold_overflow_warning (("assuming signed overflow does not occur "
11954 "when simplifying division"),
11955 WARN_STRICT_OVERFLOW_MISC);
11956 return fold_convert_loc (loc, type, tem);
11959 return NULL_TREE;
11961 case CEIL_MOD_EXPR:
11962 case FLOOR_MOD_EXPR:
11963 case ROUND_MOD_EXPR:
11964 case TRUNC_MOD_EXPR:
11965 strict_overflow_p = false;
11966 if (TREE_CODE (arg1) == INTEGER_CST
11967 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11968 &strict_overflow_p)) != 0)
11970 if (strict_overflow_p)
11971 fold_overflow_warning (("assuming signed overflow does not occur "
11972 "when simplifying modulus"),
11973 WARN_STRICT_OVERFLOW_MISC);
11974 return fold_convert_loc (loc, type, tem);
11977 return NULL_TREE;
11979 case LROTATE_EXPR:
11980 case RROTATE_EXPR:
11981 case RSHIFT_EXPR:
11982 case LSHIFT_EXPR:
11983 /* Since negative shift count is not well-defined,
11984 don't try to compute it in the compiler. */
11985 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11986 return NULL_TREE;
11988 prec = element_precision (type);
11990 /* If we have a rotate of a bit operation with the rotate count and
11991 the second operand of the bit operation both constant,
11992 permute the two operations. */
11993 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11994 && (TREE_CODE (arg0) == BIT_AND_EXPR
11995 || TREE_CODE (arg0) == BIT_IOR_EXPR
11996 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11997 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11999 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12000 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12001 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12002 fold_build2_loc (loc, code, type,
12003 arg00, arg1),
12004 fold_build2_loc (loc, code, type,
12005 arg01, arg1));
12008 /* Two consecutive rotates adding up to the some integer
12009 multiple of the precision of the type can be ignored. */
12010 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12011 && TREE_CODE (arg0) == RROTATE_EXPR
12012 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12013 && wi::umod_trunc (wi::to_wide (arg1)
12014 + wi::to_wide (TREE_OPERAND (arg0, 1)),
12015 prec) == 0)
12016 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12018 return NULL_TREE;
12020 case MIN_EXPR:
12021 case MAX_EXPR:
12022 goto associate;
12024 case TRUTH_ANDIF_EXPR:
12025 /* Note that the operands of this must be ints
12026 and their values must be 0 or 1.
12027 ("true" is a fixed value perhaps depending on the language.) */
12028 /* If first arg is constant zero, return it. */
12029 if (integer_zerop (arg0))
12030 return fold_convert_loc (loc, type, arg0);
12031 /* FALLTHRU */
12032 case TRUTH_AND_EXPR:
12033 /* If either arg is constant true, drop it. */
12034 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12035 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12036 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12037 /* Preserve sequence points. */
12038 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12039 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12040 /* If second arg is constant zero, result is zero, but first arg
12041 must be evaluated. */
12042 if (integer_zerop (arg1))
12043 return omit_one_operand_loc (loc, type, arg1, arg0);
12044 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12045 case will be handled here. */
12046 if (integer_zerop (arg0))
12047 return omit_one_operand_loc (loc, type, arg0, arg1);
12049 /* !X && X is always false. */
12050 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12051 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12052 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12053 /* X && !X is always false. */
12054 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12055 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12056 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12058 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12059 means A >= Y && A != MAX, but in this case we know that
12060 A < X <= MAX. */
12062 if (!TREE_SIDE_EFFECTS (arg0)
12063 && !TREE_SIDE_EFFECTS (arg1))
12065 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12066 if (tem && !operand_equal_p (tem, arg0, 0))
12067 return fold_convert (type,
12068 fold_build2_loc (loc, code, TREE_TYPE (arg1),
12069 tem, arg1));
12071 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12072 if (tem && !operand_equal_p (tem, arg1, 0))
12073 return fold_convert (type,
12074 fold_build2_loc (loc, code, TREE_TYPE (arg0),
12075 arg0, tem));
12078 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12079 != NULL_TREE)
12080 return tem;
12082 return NULL_TREE;
12084 case TRUTH_ORIF_EXPR:
12085 /* Note that the operands of this must be ints
12086 and their values must be 0 or true.
12087 ("true" is a fixed value perhaps depending on the language.) */
12088 /* If first arg is constant true, return it. */
12089 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12090 return fold_convert_loc (loc, type, arg0);
12091 /* FALLTHRU */
12092 case TRUTH_OR_EXPR:
12093 /* If either arg is constant zero, drop it. */
12094 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12095 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12096 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12097 /* Preserve sequence points. */
12098 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12099 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12100 /* If second arg is constant true, result is true, but we must
12101 evaluate first arg. */
12102 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12103 return omit_one_operand_loc (loc, type, arg1, arg0);
12104 /* Likewise for first arg, but note this only occurs here for
12105 TRUTH_OR_EXPR. */
12106 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12107 return omit_one_operand_loc (loc, type, arg0, arg1);
12109 /* !X || X is always true. */
12110 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12111 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12112 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12113 /* X || !X is always true. */
12114 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12115 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12116 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12118 /* (X && !Y) || (!X && Y) is X ^ Y */
12119 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12120 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12122 tree a0, a1, l0, l1, n0, n1;
12124 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12125 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12127 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12128 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12130 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12131 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12133 if ((operand_equal_p (n0, a0, 0)
12134 && operand_equal_p (n1, a1, 0))
12135 || (operand_equal_p (n0, a1, 0)
12136 && operand_equal_p (n1, a0, 0)))
12137 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12140 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12141 != NULL_TREE)
12142 return tem;
12144 return NULL_TREE;
12146 case TRUTH_XOR_EXPR:
12147 /* If the second arg is constant zero, drop it. */
12148 if (integer_zerop (arg1))
12149 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12150 /* If the second arg is constant true, this is a logical inversion. */
12151 if (integer_onep (arg1))
12153 tem = invert_truthvalue_loc (loc, arg0);
12154 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12156 /* Identical arguments cancel to zero. */
12157 if (operand_equal_p (arg0, arg1, 0))
12158 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12160 /* !X ^ X is always true. */
12161 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12162 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12163 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12165 /* X ^ !X is always true. */
12166 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12167 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12168 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12170 return NULL_TREE;
12172 case EQ_EXPR:
12173 case NE_EXPR:
12174 STRIP_NOPS (arg0);
12175 STRIP_NOPS (arg1);
12177 tem = fold_comparison (loc, code, type, op0, op1);
12178 if (tem != NULL_TREE)
12179 return tem;
12181 /* bool_var != 1 becomes !bool_var. */
12182 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12183 && code == NE_EXPR)
12184 return fold_convert_loc (loc, type,
12185 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12186 TREE_TYPE (arg0), arg0));
12188 /* bool_var == 0 becomes !bool_var. */
12189 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12190 && code == EQ_EXPR)
12191 return fold_convert_loc (loc, type,
12192 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12193 TREE_TYPE (arg0), arg0));
12195 /* !exp != 0 becomes !exp */
12196 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12197 && code == NE_EXPR)
12198 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12200 /* If this is an EQ or NE comparison with zero and ARG0 is
12201 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12202 two operations, but the latter can be done in one less insn
12203 on machines that have only two-operand insns or on which a
12204 constant cannot be the first operand. */
12205 if (TREE_CODE (arg0) == BIT_AND_EXPR
12206 && integer_zerop (arg1))
12208 tree arg00 = TREE_OPERAND (arg0, 0);
12209 tree arg01 = TREE_OPERAND (arg0, 1);
12210 if (TREE_CODE (arg00) == LSHIFT_EXPR
12211 && integer_onep (TREE_OPERAND (arg00, 0)))
12213 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12214 arg01, TREE_OPERAND (arg00, 1));
12215 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12216 build_one_cst (TREE_TYPE (arg0)));
12217 return fold_build2_loc (loc, code, type,
12218 fold_convert_loc (loc, TREE_TYPE (arg1),
12219 tem), arg1);
12221 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12222 && integer_onep (TREE_OPERAND (arg01, 0)))
12224 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12225 arg00, TREE_OPERAND (arg01, 1));
12226 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12227 build_one_cst (TREE_TYPE (arg0)));
12228 return fold_build2_loc (loc, code, type,
12229 fold_convert_loc (loc, TREE_TYPE (arg1),
12230 tem), arg1);
12234 /* If this is a comparison of a field, we may be able to simplify it. */
12235 if ((TREE_CODE (arg0) == COMPONENT_REF
12236 || TREE_CODE (arg0) == BIT_FIELD_REF)
12237 /* Handle the constant case even without -O
12238 to make sure the warnings are given. */
12239 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12241 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12242 if (t1)
12243 return t1;
12246 /* Optimize comparisons of strlen vs zero to a compare of the
12247 first character of the string vs zero. To wit,
12248 strlen(ptr) == 0 => *ptr == 0
12249 strlen(ptr) != 0 => *ptr != 0
12250 Other cases should reduce to one of these two (or a constant)
12251 due to the return value of strlen being unsigned. */
12252 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
12254 tree fndecl = get_callee_fndecl (arg0);
12256 if (fndecl
12257 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
12258 && call_expr_nargs (arg0) == 1
12259 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
12260 == POINTER_TYPE))
12262 tree ptrtype
12263 = build_pointer_type (build_qualified_type (char_type_node,
12264 TYPE_QUAL_CONST));
12265 tree ptr = fold_convert_loc (loc, ptrtype,
12266 CALL_EXPR_ARG (arg0, 0));
12267 tree iref = build_fold_indirect_ref_loc (loc, ptr);
12268 return fold_build2_loc (loc, code, type, iref,
12269 build_int_cst (TREE_TYPE (iref), 0));
12273 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12274 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12275 if (TREE_CODE (arg0) == RSHIFT_EXPR
12276 && integer_zerop (arg1)
12277 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12279 tree arg00 = TREE_OPERAND (arg0, 0);
12280 tree arg01 = TREE_OPERAND (arg0, 1);
12281 tree itype = TREE_TYPE (arg00);
12282 if (wi::to_wide (arg01) == element_precision (itype) - 1)
12284 if (TYPE_UNSIGNED (itype))
12286 itype = signed_type_for (itype);
12287 arg00 = fold_convert_loc (loc, itype, arg00);
12289 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12290 type, arg00, build_zero_cst (itype));
12294 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12295 (X & C) == 0 when C is a single bit. */
12296 if (TREE_CODE (arg0) == BIT_AND_EXPR
12297 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12298 && integer_zerop (arg1)
12299 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12301 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12302 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12303 TREE_OPERAND (arg0, 1));
12304 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12305 type, tem,
12306 fold_convert_loc (loc, TREE_TYPE (arg0),
12307 arg1));
12310 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12311 constant C is a power of two, i.e. a single bit. */
12312 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12313 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12314 && integer_zerop (arg1)
12315 && integer_pow2p (TREE_OPERAND (arg0, 1))
12316 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12317 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12319 tree arg00 = TREE_OPERAND (arg0, 0);
12320 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12321 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12324 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12325 when is C is a power of two, i.e. a single bit. */
12326 if (TREE_CODE (arg0) == BIT_AND_EXPR
12327 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12328 && integer_zerop (arg1)
12329 && integer_pow2p (TREE_OPERAND (arg0, 1))
12330 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12331 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12333 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12334 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12335 arg000, TREE_OPERAND (arg0, 1));
12336 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12337 tem, build_int_cst (TREE_TYPE (tem), 0));
12340 if (integer_zerop (arg1)
12341 && tree_expr_nonzero_p (arg0))
12343 tree res = constant_boolean_node (code==NE_EXPR, type);
12344 return omit_one_operand_loc (loc, type, res, arg0);
12347 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12348 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12350 tree arg00 = TREE_OPERAND (arg0, 0);
12351 tree arg01 = TREE_OPERAND (arg0, 1);
12352 tree arg10 = TREE_OPERAND (arg1, 0);
12353 tree arg11 = TREE_OPERAND (arg1, 1);
12354 tree itype = TREE_TYPE (arg0);
12356 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12357 operand_equal_p guarantees no side-effects so we don't need
12358 to use omit_one_operand on Z. */
12359 if (operand_equal_p (arg01, arg11, 0))
12360 return fold_build2_loc (loc, code, type, arg00,
12361 fold_convert_loc (loc, TREE_TYPE (arg00),
12362 arg10));
12363 if (operand_equal_p (arg01, arg10, 0))
12364 return fold_build2_loc (loc, code, type, arg00,
12365 fold_convert_loc (loc, TREE_TYPE (arg00),
12366 arg11));
12367 if (operand_equal_p (arg00, arg11, 0))
12368 return fold_build2_loc (loc, code, type, arg01,
12369 fold_convert_loc (loc, TREE_TYPE (arg01),
12370 arg10));
12371 if (operand_equal_p (arg00, arg10, 0))
12372 return fold_build2_loc (loc, code, type, arg01,
12373 fold_convert_loc (loc, TREE_TYPE (arg01),
12374 arg11));
12376 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12377 if (TREE_CODE (arg01) == INTEGER_CST
12378 && TREE_CODE (arg11) == INTEGER_CST)
12380 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12381 fold_convert_loc (loc, itype, arg11));
12382 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12383 return fold_build2_loc (loc, code, type, tem,
12384 fold_convert_loc (loc, itype, arg10));
12388 /* Attempt to simplify equality/inequality comparisons of complex
12389 values. Only lower the comparison if the result is known or
12390 can be simplified to a single scalar comparison. */
12391 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12392 || TREE_CODE (arg0) == COMPLEX_CST)
12393 && (TREE_CODE (arg1) == COMPLEX_EXPR
12394 || TREE_CODE (arg1) == COMPLEX_CST))
12396 tree real0, imag0, real1, imag1;
12397 tree rcond, icond;
12399 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12401 real0 = TREE_OPERAND (arg0, 0);
12402 imag0 = TREE_OPERAND (arg0, 1);
12404 else
12406 real0 = TREE_REALPART (arg0);
12407 imag0 = TREE_IMAGPART (arg0);
12410 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12412 real1 = TREE_OPERAND (arg1, 0);
12413 imag1 = TREE_OPERAND (arg1, 1);
12415 else
12417 real1 = TREE_REALPART (arg1);
12418 imag1 = TREE_IMAGPART (arg1);
12421 rcond = fold_binary_loc (loc, code, type, real0, real1);
12422 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12424 if (integer_zerop (rcond))
12426 if (code == EQ_EXPR)
12427 return omit_two_operands_loc (loc, type, boolean_false_node,
12428 imag0, imag1);
12429 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12431 else
12433 if (code == NE_EXPR)
12434 return omit_two_operands_loc (loc, type, boolean_true_node,
12435 imag0, imag1);
12436 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12440 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12441 if (icond && TREE_CODE (icond) == INTEGER_CST)
12443 if (integer_zerop (icond))
12445 if (code == EQ_EXPR)
12446 return omit_two_operands_loc (loc, type, boolean_false_node,
12447 real0, real1);
12448 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12450 else
12452 if (code == NE_EXPR)
12453 return omit_two_operands_loc (loc, type, boolean_true_node,
12454 real0, real1);
12455 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12460 return NULL_TREE;
12462 case LT_EXPR:
12463 case GT_EXPR:
12464 case LE_EXPR:
12465 case GE_EXPR:
12466 tem = fold_comparison (loc, code, type, op0, op1);
12467 if (tem != NULL_TREE)
12468 return tem;
12470 /* Transform comparisons of the form X +- C CMP X. */
12471 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12472 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12473 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12474 && !HONOR_SNANS (arg0))
12476 tree arg01 = TREE_OPERAND (arg0, 1);
12477 enum tree_code code0 = TREE_CODE (arg0);
12478 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12480 /* (X - c) > X becomes false. */
12481 if (code == GT_EXPR
12482 && ((code0 == MINUS_EXPR && is_positive >= 0)
12483 || (code0 == PLUS_EXPR && is_positive <= 0)))
12484 return constant_boolean_node (0, type);
12486 /* Likewise (X + c) < X becomes false. */
12487 if (code == LT_EXPR
12488 && ((code0 == PLUS_EXPR && is_positive >= 0)
12489 || (code0 == MINUS_EXPR && is_positive <= 0)))
12490 return constant_boolean_node (0, type);
12492 /* Convert (X - c) <= X to true. */
12493 if (!HONOR_NANS (arg1)
12494 && code == LE_EXPR
12495 && ((code0 == MINUS_EXPR && is_positive >= 0)
12496 || (code0 == PLUS_EXPR && is_positive <= 0)))
12497 return constant_boolean_node (1, type);
12499 /* Convert (X + c) >= X to true. */
12500 if (!HONOR_NANS (arg1)
12501 && code == GE_EXPR
12502 && ((code0 == PLUS_EXPR && is_positive >= 0)
12503 || (code0 == MINUS_EXPR && is_positive <= 0)))
12504 return constant_boolean_node (1, type);
12507 /* If we are comparing an ABS_EXPR with a constant, we can
12508 convert all the cases into explicit comparisons, but they may
12509 well not be faster than doing the ABS and one comparison.
12510 But ABS (X) <= C is a range comparison, which becomes a subtraction
12511 and a comparison, and is probably faster. */
12512 if (code == LE_EXPR
12513 && TREE_CODE (arg1) == INTEGER_CST
12514 && TREE_CODE (arg0) == ABS_EXPR
12515 && ! TREE_SIDE_EFFECTS (arg0)
12516 && (tem = negate_expr (arg1)) != 0
12517 && TREE_CODE (tem) == INTEGER_CST
12518 && !TREE_OVERFLOW (tem))
12519 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12520 build2 (GE_EXPR, type,
12521 TREE_OPERAND (arg0, 0), tem),
12522 build2 (LE_EXPR, type,
12523 TREE_OPERAND (arg0, 0), arg1));
12525 /* Convert ABS_EXPR<x> >= 0 to true. */
12526 strict_overflow_p = false;
12527 if (code == GE_EXPR
12528 && (integer_zerop (arg1)
12529 || (! HONOR_NANS (arg0)
12530 && real_zerop (arg1)))
12531 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12533 if (strict_overflow_p)
12534 fold_overflow_warning (("assuming signed overflow does not occur "
12535 "when simplifying comparison of "
12536 "absolute value and zero"),
12537 WARN_STRICT_OVERFLOW_CONDITIONAL);
12538 return omit_one_operand_loc (loc, type,
12539 constant_boolean_node (true, type),
12540 arg0);
12543 /* Convert ABS_EXPR<x> < 0 to false. */
12544 strict_overflow_p = false;
12545 if (code == LT_EXPR
12546 && (integer_zerop (arg1) || real_zerop (arg1))
12547 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12549 if (strict_overflow_p)
12550 fold_overflow_warning (("assuming signed overflow does not occur "
12551 "when simplifying comparison of "
12552 "absolute value and zero"),
12553 WARN_STRICT_OVERFLOW_CONDITIONAL);
12554 return omit_one_operand_loc (loc, type,
12555 constant_boolean_node (false, type),
12556 arg0);
12559 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12560 and similarly for >= into !=. */
12561 if ((code == LT_EXPR || code == GE_EXPR)
12562 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12563 && TREE_CODE (arg1) == LSHIFT_EXPR
12564 && integer_onep (TREE_OPERAND (arg1, 0)))
12565 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12566 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12567 TREE_OPERAND (arg1, 1)),
12568 build_zero_cst (TREE_TYPE (arg0)));
12570 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12571 otherwise Y might be >= # of bits in X's type and thus e.g.
12572 (unsigned char) (1 << Y) for Y 15 might be 0.
12573 If the cast is widening, then 1 << Y should have unsigned type,
12574 otherwise if Y is number of bits in the signed shift type minus 1,
12575 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12576 31 might be 0xffffffff80000000. */
12577 if ((code == LT_EXPR || code == GE_EXPR)
12578 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12579 || VECTOR_INTEGER_TYPE_P (TREE_TYPE (arg0)))
12580 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12581 && CONVERT_EXPR_P (arg1)
12582 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12583 && (element_precision (TREE_TYPE (arg1))
12584 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12585 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12586 || (element_precision (TREE_TYPE (arg1))
12587 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12588 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12590 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12591 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12592 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12593 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12594 build_zero_cst (TREE_TYPE (arg0)));
12597 return NULL_TREE;
12599 case UNORDERED_EXPR:
12600 case ORDERED_EXPR:
12601 case UNLT_EXPR:
12602 case UNLE_EXPR:
12603 case UNGT_EXPR:
12604 case UNGE_EXPR:
12605 case UNEQ_EXPR:
12606 case LTGT_EXPR:
12607 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12609 tree targ0 = strip_float_extensions (arg0);
12610 tree targ1 = strip_float_extensions (arg1);
12611 tree newtype = TREE_TYPE (targ0);
12613 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12614 newtype = TREE_TYPE (targ1);
12616 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12617 return fold_build2_loc (loc, code, type,
12618 fold_convert_loc (loc, newtype, targ0),
12619 fold_convert_loc (loc, newtype, targ1));
12622 return NULL_TREE;
12624 case COMPOUND_EXPR:
12625 /* When pedantic, a compound expression can be neither an lvalue
12626 nor an integer constant expression. */
12627 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12628 return NULL_TREE;
12629 /* Don't let (0, 0) be null pointer constant. */
12630 tem = integer_zerop (arg1) ? build1_loc (loc, NOP_EXPR, type, arg1)
12631 : fold_convert_loc (loc, type, arg1);
12632 return tem;
12634 default:
12635 return NULL_TREE;
12636 } /* switch (code) */
12639 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12640 ((A & N) + B) & M -> (A + B) & M
12641 Similarly if (N & M) == 0,
12642 ((A | N) + B) & M -> (A + B) & M
12643 and for - instead of + (or unary - instead of +)
12644 and/or ^ instead of |.
12645 If B is constant and (B & M) == 0, fold into A & M.
12647 This function is a helper for match.pd patterns. Return non-NULL
12648 type in which the simplified operation should be performed only
12649 if any optimization is possible.
12651 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12652 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12653 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12654 +/-. */
12655 tree
12656 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12657 tree arg00, enum tree_code code00, tree arg000, tree arg001,
12658 tree arg01, enum tree_code code01, tree arg010, tree arg011,
12659 tree *pmop)
12661 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12662 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12663 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12664 if (~cst1 == 0
12665 || (cst1 & (cst1 + 1)) != 0
12666 || !INTEGRAL_TYPE_P (type)
12667 || (!TYPE_OVERFLOW_WRAPS (type)
12668 && TREE_CODE (type) != INTEGER_TYPE)
12669 || (wi::max_value (type) & cst1) != cst1)
12670 return NULL_TREE;
12672 enum tree_code codes[2] = { code00, code01 };
12673 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12674 int which = 0;
12675 wide_int cst0;
12677 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12678 arg1 (M) is == (1LL << cst) - 1.
12679 Store C into PMOP[0] and D into PMOP[1]. */
12680 pmop[0] = arg00;
12681 pmop[1] = arg01;
12682 which = code != NEGATE_EXPR;
12684 for (; which >= 0; which--)
12685 switch (codes[which])
12687 case BIT_AND_EXPR:
12688 case BIT_IOR_EXPR:
12689 case BIT_XOR_EXPR:
12690 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12691 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12692 if (codes[which] == BIT_AND_EXPR)
12694 if (cst0 != cst1)
12695 break;
12697 else if (cst0 != 0)
12698 break;
12699 /* If C or D is of the form (A & N) where
12700 (N & M) == M, or of the form (A | N) or
12701 (A ^ N) where (N & M) == 0, replace it with A. */
12702 pmop[which] = arg0xx[2 * which];
12703 break;
12704 case ERROR_MARK:
12705 if (TREE_CODE (pmop[which]) != INTEGER_CST)
12706 break;
12707 /* If C or D is a N where (N & M) == 0, it can be
12708 omitted (replaced with 0). */
12709 if ((code == PLUS_EXPR
12710 || (code == MINUS_EXPR && which == 0))
12711 && (cst1 & wi::to_wide (pmop[which])) == 0)
12712 pmop[which] = build_int_cst (type, 0);
12713 /* Similarly, with C - N where (-N & M) == 0. */
12714 if (code == MINUS_EXPR
12715 && which == 1
12716 && (cst1 & -wi::to_wide (pmop[which])) == 0)
12717 pmop[which] = build_int_cst (type, 0);
12718 break;
12719 default:
12720 gcc_unreachable ();
12723 /* Only build anything new if we optimized one or both arguments above. */
12724 if (pmop[0] == arg00 && pmop[1] == arg01)
12725 return NULL_TREE;
12727 if (TYPE_OVERFLOW_WRAPS (type))
12728 return type;
12729 else
12730 return unsigned_type_for (type);
12733 /* Used by contains_label_[p1]. */
12735 struct contains_label_data
12737 hash_set<tree> *pset;
12738 bool inside_switch_p;
12741 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12742 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12743 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12745 static tree
12746 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12748 contains_label_data *d = (contains_label_data *) data;
12749 switch (TREE_CODE (*tp))
12751 case LABEL_EXPR:
12752 return *tp;
12754 case CASE_LABEL_EXPR:
12755 if (!d->inside_switch_p)
12756 return *tp;
12757 return NULL_TREE;
12759 case SWITCH_EXPR:
12760 if (!d->inside_switch_p)
12762 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12763 return *tp;
12764 d->inside_switch_p = true;
12765 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12766 return *tp;
12767 d->inside_switch_p = false;
12768 *walk_subtrees = 0;
12770 return NULL_TREE;
12772 case GOTO_EXPR:
12773 *walk_subtrees = 0;
12774 return NULL_TREE;
12776 default:
12777 return NULL_TREE;
12781 /* Return whether the sub-tree ST contains a label which is accessible from
12782 outside the sub-tree. */
12784 static bool
12785 contains_label_p (tree st)
12787 hash_set<tree> pset;
12788 contains_label_data data = { &pset, false };
12789 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12792 /* Fold a ternary expression of code CODE and type TYPE with operands
12793 OP0, OP1, and OP2. Return the folded expression if folding is
12794 successful. Otherwise, return NULL_TREE. */
12796 tree
12797 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12798 tree op0, tree op1, tree op2)
12800 tree tem;
12801 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12802 enum tree_code_class kind = TREE_CODE_CLASS (code);
12804 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12805 && TREE_CODE_LENGTH (code) == 3);
12807 /* If this is a commutative operation, and OP0 is a constant, move it
12808 to OP1 to reduce the number of tests below. */
12809 if (commutative_ternary_tree_code (code)
12810 && tree_swap_operands_p (op0, op1))
12811 return fold_build3_loc (loc, code, type, op1, op0, op2);
12813 tem = generic_simplify (loc, code, type, op0, op1, op2);
12814 if (tem)
12815 return tem;
12817 /* Strip any conversions that don't change the mode. This is safe
12818 for every expression, except for a comparison expression because
12819 its signedness is derived from its operands. So, in the latter
12820 case, only strip conversions that don't change the signedness.
12822 Note that this is done as an internal manipulation within the
12823 constant folder, in order to find the simplest representation of
12824 the arguments so that their form can be studied. In any cases,
12825 the appropriate type conversions should be put back in the tree
12826 that will get out of the constant folder. */
12827 if (op0)
12829 arg0 = op0;
12830 STRIP_NOPS (arg0);
12833 if (op1)
12835 arg1 = op1;
12836 STRIP_NOPS (arg1);
12839 if (op2)
12841 arg2 = op2;
12842 STRIP_NOPS (arg2);
12845 switch (code)
12847 case COMPONENT_REF:
12848 if (TREE_CODE (arg0) == CONSTRUCTOR
12849 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12851 unsigned HOST_WIDE_INT idx;
12852 tree field, value;
12853 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12854 if (field == arg1)
12855 return value;
12857 return NULL_TREE;
12859 case COND_EXPR:
12860 case VEC_COND_EXPR:
12861 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12862 so all simple results must be passed through pedantic_non_lvalue. */
12863 if (TREE_CODE (arg0) == INTEGER_CST)
12865 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12866 tem = integer_zerop (arg0) ? op2 : op1;
12867 /* Only optimize constant conditions when the selected branch
12868 has the same type as the COND_EXPR. This avoids optimizing
12869 away "c ? x : throw", where the throw has a void type.
12870 Avoid throwing away that operand which contains label. */
12871 if ((!TREE_SIDE_EFFECTS (unused_op)
12872 || !contains_label_p (unused_op))
12873 && (! VOID_TYPE_P (TREE_TYPE (tem))
12874 || VOID_TYPE_P (type)))
12875 return protected_set_expr_location_unshare (tem, loc);
12876 return NULL_TREE;
12878 else if (TREE_CODE (arg0) == VECTOR_CST)
12880 unsigned HOST_WIDE_INT nelts;
12881 if ((TREE_CODE (arg1) == VECTOR_CST
12882 || TREE_CODE (arg1) == CONSTRUCTOR)
12883 && (TREE_CODE (arg2) == VECTOR_CST
12884 || TREE_CODE (arg2) == CONSTRUCTOR)
12885 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12887 vec_perm_builder sel (nelts, nelts, 1);
12888 for (unsigned int i = 0; i < nelts; i++)
12890 tree val = VECTOR_CST_ELT (arg0, i);
12891 if (integer_all_onesp (val))
12892 sel.quick_push (i);
12893 else if (integer_zerop (val))
12894 sel.quick_push (nelts + i);
12895 else /* Currently unreachable. */
12896 return NULL_TREE;
12898 vec_perm_indices indices (sel, 2, nelts);
12899 tree t = fold_vec_perm (type, arg1, arg2, indices);
12900 if (t != NULL_TREE)
12901 return t;
12905 /* If we have A op B ? A : C, we may be able to convert this to a
12906 simpler expression, depending on the operation and the values
12907 of B and C. Signed zeros prevent all of these transformations,
12908 for reasons given above each one.
12910 Also try swapping the arguments and inverting the conditional. */
12911 if (COMPARISON_CLASS_P (arg0)
12912 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
12913 && !HONOR_SIGNED_ZEROS (op1))
12915 tem = fold_cond_expr_with_comparison (loc, type, TREE_CODE (arg0),
12916 TREE_OPERAND (arg0, 0),
12917 TREE_OPERAND (arg0, 1),
12918 op1, op2);
12919 if (tem)
12920 return tem;
12923 if (COMPARISON_CLASS_P (arg0)
12924 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
12925 && !HONOR_SIGNED_ZEROS (op2))
12927 enum tree_code comp_code = TREE_CODE (arg0);
12928 tree arg00 = TREE_OPERAND (arg0, 0);
12929 tree arg01 = TREE_OPERAND (arg0, 1);
12930 comp_code = invert_tree_comparison (comp_code, HONOR_NANS (arg00));
12931 if (comp_code != ERROR_MARK)
12932 tem = fold_cond_expr_with_comparison (loc, type, comp_code,
12933 arg00,
12934 arg01,
12935 op2, op1);
12936 if (tem)
12937 return tem;
12940 /* If the second operand is simpler than the third, swap them
12941 since that produces better jump optimization results. */
12942 if (truth_value_p (TREE_CODE (arg0))
12943 && tree_swap_operands_p (op1, op2))
12945 location_t loc0 = expr_location_or (arg0, loc);
12946 /* See if this can be inverted. If it can't, possibly because
12947 it was a floating-point inequality comparison, don't do
12948 anything. */
12949 tem = fold_invert_truthvalue (loc0, arg0);
12950 if (tem)
12951 return fold_build3_loc (loc, code, type, tem, op2, op1);
12954 /* Convert A ? 1 : 0 to simply A. */
12955 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12956 : (integer_onep (op1)
12957 && !VECTOR_TYPE_P (type)))
12958 && integer_zerop (op2)
12959 /* If we try to convert OP0 to our type, the
12960 call to fold will try to move the conversion inside
12961 a COND, which will recurse. In that case, the COND_EXPR
12962 is probably the best choice, so leave it alone. */
12963 && type == TREE_TYPE (arg0))
12964 return protected_set_expr_location_unshare (arg0, loc);
12966 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12967 over COND_EXPR in cases such as floating point comparisons. */
12968 if (integer_zerop (op1)
12969 && code == COND_EXPR
12970 && integer_onep (op2)
12971 && !VECTOR_TYPE_P (type)
12972 && truth_value_p (TREE_CODE (arg0)))
12973 return fold_convert_loc (loc, type,
12974 invert_truthvalue_loc (loc, arg0));
12976 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12977 if (TREE_CODE (arg0) == LT_EXPR
12978 && integer_zerop (TREE_OPERAND (arg0, 1))
12979 && integer_zerop (op2)
12980 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12982 /* sign_bit_p looks through both zero and sign extensions,
12983 but for this optimization only sign extensions are
12984 usable. */
12985 tree tem2 = TREE_OPERAND (arg0, 0);
12986 while (tem != tem2)
12988 if (TREE_CODE (tem2) != NOP_EXPR
12989 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12991 tem = NULL_TREE;
12992 break;
12994 tem2 = TREE_OPERAND (tem2, 0);
12996 /* sign_bit_p only checks ARG1 bits within A's precision.
12997 If <sign bit of A> has wider type than A, bits outside
12998 of A's precision in <sign bit of A> need to be checked.
12999 If they are all 0, this optimization needs to be done
13000 in unsigned A's type, if they are all 1 in signed A's type,
13001 otherwise this can't be done. */
13002 if (tem
13003 && TYPE_PRECISION (TREE_TYPE (tem))
13004 < TYPE_PRECISION (TREE_TYPE (arg1))
13005 && TYPE_PRECISION (TREE_TYPE (tem))
13006 < TYPE_PRECISION (type))
13008 int inner_width, outer_width;
13009 tree tem_type;
13011 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13012 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13013 if (outer_width > TYPE_PRECISION (type))
13014 outer_width = TYPE_PRECISION (type);
13016 wide_int mask = wi::shifted_mask
13017 (inner_width, outer_width - inner_width, false,
13018 TYPE_PRECISION (TREE_TYPE (arg1)));
13020 wide_int common = mask & wi::to_wide (arg1);
13021 if (common == mask)
13023 tem_type = signed_type_for (TREE_TYPE (tem));
13024 tem = fold_convert_loc (loc, tem_type, tem);
13026 else if (common == 0)
13028 tem_type = unsigned_type_for (TREE_TYPE (tem));
13029 tem = fold_convert_loc (loc, tem_type, tem);
13031 else
13032 tem = NULL;
13035 if (tem)
13036 return
13037 fold_convert_loc (loc, type,
13038 fold_build2_loc (loc, BIT_AND_EXPR,
13039 TREE_TYPE (tem), tem,
13040 fold_convert_loc (loc,
13041 TREE_TYPE (tem),
13042 arg1)));
13045 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13046 already handled above. */
13047 if (TREE_CODE (arg0) == BIT_AND_EXPR
13048 && integer_onep (TREE_OPERAND (arg0, 1))
13049 && integer_zerop (op2)
13050 && integer_pow2p (arg1))
13052 tree tem = TREE_OPERAND (arg0, 0);
13053 STRIP_NOPS (tem);
13054 if (TREE_CODE (tem) == RSHIFT_EXPR
13055 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13056 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
13057 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
13058 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13059 fold_convert_loc (loc, type,
13060 TREE_OPERAND (tem, 0)),
13061 op1);
13064 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13065 is probably obsolete because the first operand should be a
13066 truth value (that's why we have the two cases above), but let's
13067 leave it in until we can confirm this for all front-ends. */
13068 if (integer_zerop (op2)
13069 && TREE_CODE (arg0) == NE_EXPR
13070 && integer_zerop (TREE_OPERAND (arg0, 1))
13071 && integer_pow2p (arg1)
13072 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13073 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13074 arg1, OEP_ONLY_CONST)
13075 /* operand_equal_p compares just value, not precision, so e.g.
13076 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
13077 second operand 32-bit -128, which is not a power of two (or vice
13078 versa. */
13079 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
13080 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
13082 /* Disable the transformations below for vectors, since
13083 fold_binary_op_with_conditional_arg may undo them immediately,
13084 yielding an infinite loop. */
13085 if (code == VEC_COND_EXPR)
13086 return NULL_TREE;
13088 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13089 if (integer_zerop (op2)
13090 && truth_value_p (TREE_CODE (arg0))
13091 && truth_value_p (TREE_CODE (arg1))
13092 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13093 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13094 : TRUTH_ANDIF_EXPR,
13095 type, fold_convert_loc (loc, type, arg0), op1);
13097 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13098 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13099 && truth_value_p (TREE_CODE (arg0))
13100 && truth_value_p (TREE_CODE (arg1))
13101 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13103 location_t loc0 = expr_location_or (arg0, loc);
13104 /* Only perform transformation if ARG0 is easily inverted. */
13105 tem = fold_invert_truthvalue (loc0, arg0);
13106 if (tem)
13107 return fold_build2_loc (loc, code == VEC_COND_EXPR
13108 ? BIT_IOR_EXPR
13109 : TRUTH_ORIF_EXPR,
13110 type, fold_convert_loc (loc, type, tem),
13111 op1);
13114 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13115 if (integer_zerop (arg1)
13116 && truth_value_p (TREE_CODE (arg0))
13117 && truth_value_p (TREE_CODE (op2))
13118 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13120 location_t loc0 = expr_location_or (arg0, loc);
13121 /* Only perform transformation if ARG0 is easily inverted. */
13122 tem = fold_invert_truthvalue (loc0, arg0);
13123 if (tem)
13124 return fold_build2_loc (loc, code == VEC_COND_EXPR
13125 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13126 type, fold_convert_loc (loc, type, tem),
13127 op2);
13130 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13131 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13132 && truth_value_p (TREE_CODE (arg0))
13133 && truth_value_p (TREE_CODE (op2))
13134 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13135 return fold_build2_loc (loc, code == VEC_COND_EXPR
13136 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13137 type, fold_convert_loc (loc, type, arg0), op2);
13139 return NULL_TREE;
13141 case CALL_EXPR:
13142 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13143 of fold_ternary on them. */
13144 gcc_unreachable ();
13146 case BIT_FIELD_REF:
13147 if (TREE_CODE (arg0) == VECTOR_CST
13148 && (type == TREE_TYPE (TREE_TYPE (arg0))
13149 || (VECTOR_TYPE_P (type)
13150 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
13151 && tree_fits_uhwi_p (op1)
13152 && tree_fits_uhwi_p (op2))
13154 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13155 unsigned HOST_WIDE_INT width
13156 = (TREE_CODE (eltype) == BOOLEAN_TYPE
13157 ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
13158 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13159 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13161 if (n != 0
13162 && (idx % width) == 0
13163 && (n % width) == 0
13164 && known_le ((idx + n) / width,
13165 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
13167 idx = idx / width;
13168 n = n / width;
13170 if (TREE_CODE (arg0) == VECTOR_CST)
13172 if (n == 1)
13174 tem = VECTOR_CST_ELT (arg0, idx);
13175 if (VECTOR_TYPE_P (type))
13176 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
13177 return tem;
13180 tree_vector_builder vals (type, n, 1);
13181 for (unsigned i = 0; i < n; ++i)
13182 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
13183 return vals.build ();
13188 /* On constants we can use native encode/interpret to constant
13189 fold (nearly) all BIT_FIELD_REFs. */
13190 if (CONSTANT_CLASS_P (arg0)
13191 && can_native_interpret_type_p (type)
13192 && BITS_PER_UNIT == 8
13193 && tree_fits_uhwi_p (op1)
13194 && tree_fits_uhwi_p (op2))
13196 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13197 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13198 /* Limit us to a reasonable amount of work. To relax the
13199 other limitations we need bit-shifting of the buffer
13200 and rounding up the size. */
13201 if (bitpos % BITS_PER_UNIT == 0
13202 && bitsize % BITS_PER_UNIT == 0
13203 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
13205 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
13206 unsigned HOST_WIDE_INT len
13207 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
13208 bitpos / BITS_PER_UNIT);
13209 if (len > 0
13210 && len * BITS_PER_UNIT >= bitsize)
13212 tree v = native_interpret_expr (type, b,
13213 bitsize / BITS_PER_UNIT);
13214 if (v)
13215 return v;
13220 return NULL_TREE;
13222 case VEC_PERM_EXPR:
13223 /* Perform constant folding of BIT_INSERT_EXPR. */
13224 if (TREE_CODE (arg2) == VECTOR_CST
13225 && TREE_CODE (op0) == VECTOR_CST
13226 && TREE_CODE (op1) == VECTOR_CST)
13228 /* Build a vector of integers from the tree mask. */
13229 vec_perm_builder builder;
13230 if (!tree_to_vec_perm_builder (&builder, arg2))
13231 return NULL_TREE;
13233 /* Create a vec_perm_indices for the integer vector. */
13234 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
13235 bool single_arg = (op0 == op1);
13236 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
13237 return fold_vec_perm (type, op0, op1, sel);
13239 return NULL_TREE;
13241 case BIT_INSERT_EXPR:
13242 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13243 if (TREE_CODE (arg0) == INTEGER_CST
13244 && TREE_CODE (arg1) == INTEGER_CST)
13246 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13247 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
13248 wide_int tem = (wi::to_wide (arg0)
13249 & wi::shifted_mask (bitpos, bitsize, true,
13250 TYPE_PRECISION (type)));
13251 wide_int tem2
13252 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
13253 bitsize), bitpos);
13254 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
13256 else if (TREE_CODE (arg0) == VECTOR_CST
13257 && CONSTANT_CLASS_P (arg1)
13258 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
13259 TREE_TYPE (arg1)))
13261 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13262 unsigned HOST_WIDE_INT elsize
13263 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
13264 if (bitpos % elsize == 0)
13266 unsigned k = bitpos / elsize;
13267 unsigned HOST_WIDE_INT nelts;
13268 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
13269 return arg0;
13270 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
13272 tree_vector_builder elts (type, nelts, 1);
13273 elts.quick_grow (nelts);
13274 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
13275 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
13276 return elts.build ();
13280 return NULL_TREE;
13282 default:
13283 return NULL_TREE;
13284 } /* switch (code) */
13287 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13288 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13289 constructor element index of the value returned. If the element is
13290 not found NULL_TREE is returned and *CTOR_IDX is updated to
13291 the index of the element after the ACCESS_INDEX position (which
13292 may be outside of the CTOR array). */
13294 tree
13295 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
13296 unsigned *ctor_idx)
13298 tree index_type = NULL_TREE;
13299 signop index_sgn = UNSIGNED;
13300 offset_int low_bound = 0;
13302 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
13304 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
13305 if (domain_type && TYPE_MIN_VALUE (domain_type))
13307 /* Static constructors for variably sized objects makes no sense. */
13308 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
13309 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
13310 /* ??? When it is obvious that the range is signed, treat it so. */
13311 if (TYPE_UNSIGNED (index_type)
13312 && TYPE_MAX_VALUE (domain_type)
13313 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
13314 TYPE_MIN_VALUE (domain_type)))
13316 index_sgn = SIGNED;
13317 low_bound
13318 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
13319 SIGNED);
13321 else
13323 index_sgn = TYPE_SIGN (index_type);
13324 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
13329 if (index_type)
13330 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
13331 index_sgn);
13333 offset_int index = low_bound;
13334 if (index_type)
13335 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13337 offset_int max_index = index;
13338 unsigned cnt;
13339 tree cfield, cval;
13340 bool first_p = true;
13342 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
13344 /* Array constructor might explicitly set index, or specify a range,
13345 or leave index NULL meaning that it is next index after previous
13346 one. */
13347 if (cfield)
13349 if (TREE_CODE (cfield) == INTEGER_CST)
13350 max_index = index
13351 = offset_int::from (wi::to_wide (cfield), index_sgn);
13352 else
13354 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
13355 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
13356 index_sgn);
13357 max_index
13358 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
13359 index_sgn);
13360 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
13363 else if (!first_p)
13365 index = max_index + 1;
13366 if (index_type)
13367 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13368 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
13369 max_index = index;
13371 else
13372 first_p = false;
13374 /* Do we have match? */
13375 if (wi::cmp (access_index, index, index_sgn) >= 0)
13377 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
13379 if (ctor_idx)
13380 *ctor_idx = cnt;
13381 return cval;
13384 else if (in_gimple_form)
13385 /* We're past the element we search for. Note during parsing
13386 the elements might not be sorted.
13387 ??? We should use a binary search and a flag on the
13388 CONSTRUCTOR as to whether elements are sorted in declaration
13389 order. */
13390 break;
13392 if (ctor_idx)
13393 *ctor_idx = cnt;
13394 return NULL_TREE;
13397 /* Perform constant folding and related simplification of EXPR.
13398 The related simplifications include x*1 => x, x*0 => 0, etc.,
13399 and application of the associative law.
13400 NOP_EXPR conversions may be removed freely (as long as we
13401 are careful not to change the type of the overall expression).
13402 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13403 but we can constant-fold them if they have constant operands. */
13405 #ifdef ENABLE_FOLD_CHECKING
13406 # define fold(x) fold_1 (x)
13407 static tree fold_1 (tree);
13408 static
13409 #endif
13410 tree
13411 fold (tree expr)
13413 const tree t = expr;
13414 enum tree_code code = TREE_CODE (t);
13415 enum tree_code_class kind = TREE_CODE_CLASS (code);
13416 tree tem;
13417 location_t loc = EXPR_LOCATION (expr);
13419 /* Return right away if a constant. */
13420 if (kind == tcc_constant)
13421 return t;
13423 /* CALL_EXPR-like objects with variable numbers of operands are
13424 treated specially. */
13425 if (kind == tcc_vl_exp)
13427 if (code == CALL_EXPR)
13429 tem = fold_call_expr (loc, expr, false);
13430 return tem ? tem : expr;
13432 return expr;
13435 if (IS_EXPR_CODE_CLASS (kind))
13437 tree type = TREE_TYPE (t);
13438 tree op0, op1, op2;
13440 switch (TREE_CODE_LENGTH (code))
13442 case 1:
13443 op0 = TREE_OPERAND (t, 0);
13444 tem = fold_unary_loc (loc, code, type, op0);
13445 return tem ? tem : expr;
13446 case 2:
13447 op0 = TREE_OPERAND (t, 0);
13448 op1 = TREE_OPERAND (t, 1);
13449 tem = fold_binary_loc (loc, code, type, op0, op1);
13450 return tem ? tem : expr;
13451 case 3:
13452 op0 = TREE_OPERAND (t, 0);
13453 op1 = TREE_OPERAND (t, 1);
13454 op2 = TREE_OPERAND (t, 2);
13455 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13456 return tem ? tem : expr;
13457 default:
13458 break;
13462 switch (code)
13464 case ARRAY_REF:
13466 tree op0 = TREE_OPERAND (t, 0);
13467 tree op1 = TREE_OPERAND (t, 1);
13469 if (TREE_CODE (op1) == INTEGER_CST
13470 && TREE_CODE (op0) == CONSTRUCTOR
13471 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13473 tree val = get_array_ctor_element_at_index (op0,
13474 wi::to_offset (op1));
13475 if (val)
13476 return val;
13479 return t;
13482 /* Return a VECTOR_CST if possible. */
13483 case CONSTRUCTOR:
13485 tree type = TREE_TYPE (t);
13486 if (TREE_CODE (type) != VECTOR_TYPE)
13487 return t;
13489 unsigned i;
13490 tree val;
13491 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
13492 if (! CONSTANT_CLASS_P (val))
13493 return t;
13495 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
13498 case CONST_DECL:
13499 return fold (DECL_INITIAL (t));
13501 default:
13502 return t;
13503 } /* switch (code) */
13506 #ifdef ENABLE_FOLD_CHECKING
13507 #undef fold
13509 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13510 hash_table<nofree_ptr_hash<const tree_node> > *);
13511 static void fold_check_failed (const_tree, const_tree);
13512 void print_fold_checksum (const_tree);
13514 /* When --enable-checking=fold, compute a digest of expr before
13515 and after actual fold call to see if fold did not accidentally
13516 change original expr. */
13518 tree
13519 fold (tree expr)
13521 tree ret;
13522 struct md5_ctx ctx;
13523 unsigned char checksum_before[16], checksum_after[16];
13524 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13526 md5_init_ctx (&ctx);
13527 fold_checksum_tree (expr, &ctx, &ht);
13528 md5_finish_ctx (&ctx, checksum_before);
13529 ht.empty ();
13531 ret = fold_1 (expr);
13533 md5_init_ctx (&ctx);
13534 fold_checksum_tree (expr, &ctx, &ht);
13535 md5_finish_ctx (&ctx, checksum_after);
13537 if (memcmp (checksum_before, checksum_after, 16))
13538 fold_check_failed (expr, ret);
13540 return ret;
13543 void
13544 print_fold_checksum (const_tree expr)
13546 struct md5_ctx ctx;
13547 unsigned char checksum[16], cnt;
13548 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13550 md5_init_ctx (&ctx);
13551 fold_checksum_tree (expr, &ctx, &ht);
13552 md5_finish_ctx (&ctx, checksum);
13553 for (cnt = 0; cnt < 16; ++cnt)
13554 fprintf (stderr, "%02x", checksum[cnt]);
13555 putc ('\n', stderr);
13558 static void
13559 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13561 internal_error ("fold check: original tree changed by fold");
13564 static void
13565 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13566 hash_table<nofree_ptr_hash <const tree_node> > *ht)
13568 const tree_node **slot;
13569 enum tree_code code;
13570 union tree_node *buf;
13571 int i, len;
13573 recursive_label:
13574 if (expr == NULL)
13575 return;
13576 slot = ht->find_slot (expr, INSERT);
13577 if (*slot != NULL)
13578 return;
13579 *slot = expr;
13580 code = TREE_CODE (expr);
13581 if (TREE_CODE_CLASS (code) == tcc_declaration
13582 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13584 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13585 size_t sz = tree_size (expr);
13586 buf = XALLOCAVAR (union tree_node, sz);
13587 memcpy ((char *) buf, expr, sz);
13588 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13589 buf->decl_with_vis.symtab_node = NULL;
13590 buf->base.nowarning_flag = 0;
13591 expr = (tree) buf;
13593 else if (TREE_CODE_CLASS (code) == tcc_type
13594 && (TYPE_POINTER_TO (expr)
13595 || TYPE_REFERENCE_TO (expr)
13596 || TYPE_CACHED_VALUES_P (expr)
13597 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13598 || TYPE_NEXT_VARIANT (expr)
13599 || TYPE_ALIAS_SET_KNOWN_P (expr)))
13601 /* Allow these fields to be modified. */
13602 tree tmp;
13603 size_t sz = tree_size (expr);
13604 buf = XALLOCAVAR (union tree_node, sz);
13605 memcpy ((char *) buf, expr, sz);
13606 expr = tmp = (tree) buf;
13607 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13608 TYPE_POINTER_TO (tmp) = NULL;
13609 TYPE_REFERENCE_TO (tmp) = NULL;
13610 TYPE_NEXT_VARIANT (tmp) = NULL;
13611 TYPE_ALIAS_SET (tmp) = -1;
13612 if (TYPE_CACHED_VALUES_P (tmp))
13614 TYPE_CACHED_VALUES_P (tmp) = 0;
13615 TYPE_CACHED_VALUES (tmp) = NULL;
13618 else if (warning_suppressed_p (expr) && (DECL_P (expr) || EXPR_P (expr)))
13620 /* Allow the no-warning bit to be set. Perhaps we shouldn't allow
13621 that and change builtins.cc etc. instead - see PR89543. */
13622 size_t sz = tree_size (expr);
13623 buf = XALLOCAVAR (union tree_node, sz);
13624 memcpy ((char *) buf, expr, sz);
13625 buf->base.nowarning_flag = 0;
13626 expr = (tree) buf;
13628 md5_process_bytes (expr, tree_size (expr), ctx);
13629 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13630 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13631 if (TREE_CODE_CLASS (code) != tcc_type
13632 && TREE_CODE_CLASS (code) != tcc_declaration
13633 && code != TREE_LIST
13634 && code != SSA_NAME
13635 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13636 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13637 switch (TREE_CODE_CLASS (code))
13639 case tcc_constant:
13640 switch (code)
13642 case STRING_CST:
13643 md5_process_bytes (TREE_STRING_POINTER (expr),
13644 TREE_STRING_LENGTH (expr), ctx);
13645 break;
13646 case COMPLEX_CST:
13647 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13648 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13649 break;
13650 case VECTOR_CST:
13651 len = vector_cst_encoded_nelts (expr);
13652 for (i = 0; i < len; ++i)
13653 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13654 break;
13655 default:
13656 break;
13658 break;
13659 case tcc_exceptional:
13660 switch (code)
13662 case TREE_LIST:
13663 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13664 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13665 expr = TREE_CHAIN (expr);
13666 goto recursive_label;
13667 break;
13668 case TREE_VEC:
13669 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13670 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13671 break;
13672 default:
13673 break;
13675 break;
13676 case tcc_expression:
13677 case tcc_reference:
13678 case tcc_comparison:
13679 case tcc_unary:
13680 case tcc_binary:
13681 case tcc_statement:
13682 case tcc_vl_exp:
13683 len = TREE_OPERAND_LENGTH (expr);
13684 for (i = 0; i < len; ++i)
13685 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13686 break;
13687 case tcc_declaration:
13688 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13689 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13690 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13692 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13693 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13694 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13695 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13696 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13699 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13701 if (TREE_CODE (expr) == FUNCTION_DECL)
13703 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13704 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13706 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13708 break;
13709 case tcc_type:
13710 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13711 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13712 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13713 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13714 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13715 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13716 if (INTEGRAL_TYPE_P (expr)
13717 || SCALAR_FLOAT_TYPE_P (expr))
13719 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13720 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13722 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13723 if (TREE_CODE (expr) == RECORD_TYPE
13724 || TREE_CODE (expr) == UNION_TYPE
13725 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13726 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13727 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13728 break;
13729 default:
13730 break;
13734 /* Helper function for outputting the checksum of a tree T. When
13735 debugging with gdb, you can "define mynext" to be "next" followed
13736 by "call debug_fold_checksum (op0)", then just trace down till the
13737 outputs differ. */
13739 DEBUG_FUNCTION void
13740 debug_fold_checksum (const_tree t)
13742 int i;
13743 unsigned char checksum[16];
13744 struct md5_ctx ctx;
13745 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13747 md5_init_ctx (&ctx);
13748 fold_checksum_tree (t, &ctx, &ht);
13749 md5_finish_ctx (&ctx, checksum);
13750 ht.empty ();
13752 for (i = 0; i < 16; i++)
13753 fprintf (stderr, "%d ", checksum[i]);
13755 fprintf (stderr, "\n");
13758 #endif
13760 /* Fold a unary tree expression with code CODE of type TYPE with an
13761 operand OP0. LOC is the location of the resulting expression.
13762 Return a folded expression if successful. Otherwise, return a tree
13763 expression with code CODE of type TYPE with an operand OP0. */
13765 tree
13766 fold_build1_loc (location_t loc,
13767 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13769 tree tem;
13770 #ifdef ENABLE_FOLD_CHECKING
13771 unsigned char checksum_before[16], checksum_after[16];
13772 struct md5_ctx ctx;
13773 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13775 md5_init_ctx (&ctx);
13776 fold_checksum_tree (op0, &ctx, &ht);
13777 md5_finish_ctx (&ctx, checksum_before);
13778 ht.empty ();
13779 #endif
13781 tem = fold_unary_loc (loc, code, type, op0);
13782 if (!tem)
13783 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13785 #ifdef ENABLE_FOLD_CHECKING
13786 md5_init_ctx (&ctx);
13787 fold_checksum_tree (op0, &ctx, &ht);
13788 md5_finish_ctx (&ctx, checksum_after);
13790 if (memcmp (checksum_before, checksum_after, 16))
13791 fold_check_failed (op0, tem);
13792 #endif
13793 return tem;
13796 /* Fold a binary tree expression with code CODE of type TYPE with
13797 operands OP0 and OP1. LOC is the location of the resulting
13798 expression. Return a folded expression if successful. Otherwise,
13799 return a tree expression with code CODE of type TYPE with operands
13800 OP0 and OP1. */
13802 tree
13803 fold_build2_loc (location_t loc,
13804 enum tree_code code, tree type, tree op0, tree op1
13805 MEM_STAT_DECL)
13807 tree tem;
13808 #ifdef ENABLE_FOLD_CHECKING
13809 unsigned char checksum_before_op0[16],
13810 checksum_before_op1[16],
13811 checksum_after_op0[16],
13812 checksum_after_op1[16];
13813 struct md5_ctx ctx;
13814 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13816 md5_init_ctx (&ctx);
13817 fold_checksum_tree (op0, &ctx, &ht);
13818 md5_finish_ctx (&ctx, checksum_before_op0);
13819 ht.empty ();
13821 md5_init_ctx (&ctx);
13822 fold_checksum_tree (op1, &ctx, &ht);
13823 md5_finish_ctx (&ctx, checksum_before_op1);
13824 ht.empty ();
13825 #endif
13827 tem = fold_binary_loc (loc, code, type, op0, op1);
13828 if (!tem)
13829 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13831 #ifdef ENABLE_FOLD_CHECKING
13832 md5_init_ctx (&ctx);
13833 fold_checksum_tree (op0, &ctx, &ht);
13834 md5_finish_ctx (&ctx, checksum_after_op0);
13835 ht.empty ();
13837 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13838 fold_check_failed (op0, tem);
13840 md5_init_ctx (&ctx);
13841 fold_checksum_tree (op1, &ctx, &ht);
13842 md5_finish_ctx (&ctx, checksum_after_op1);
13844 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13845 fold_check_failed (op1, tem);
13846 #endif
13847 return tem;
13850 /* Fold a ternary tree expression with code CODE of type TYPE with
13851 operands OP0, OP1, and OP2. Return a folded expression if
13852 successful. Otherwise, return a tree expression with code CODE of
13853 type TYPE with operands OP0, OP1, and OP2. */
13855 tree
13856 fold_build3_loc (location_t loc, enum tree_code code, tree type,
13857 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13859 tree tem;
13860 #ifdef ENABLE_FOLD_CHECKING
13861 unsigned char checksum_before_op0[16],
13862 checksum_before_op1[16],
13863 checksum_before_op2[16],
13864 checksum_after_op0[16],
13865 checksum_after_op1[16],
13866 checksum_after_op2[16];
13867 struct md5_ctx ctx;
13868 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13870 md5_init_ctx (&ctx);
13871 fold_checksum_tree (op0, &ctx, &ht);
13872 md5_finish_ctx (&ctx, checksum_before_op0);
13873 ht.empty ();
13875 md5_init_ctx (&ctx);
13876 fold_checksum_tree (op1, &ctx, &ht);
13877 md5_finish_ctx (&ctx, checksum_before_op1);
13878 ht.empty ();
13880 md5_init_ctx (&ctx);
13881 fold_checksum_tree (op2, &ctx, &ht);
13882 md5_finish_ctx (&ctx, checksum_before_op2);
13883 ht.empty ();
13884 #endif
13886 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13887 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13888 if (!tem)
13889 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13891 #ifdef ENABLE_FOLD_CHECKING
13892 md5_init_ctx (&ctx);
13893 fold_checksum_tree (op0, &ctx, &ht);
13894 md5_finish_ctx (&ctx, checksum_after_op0);
13895 ht.empty ();
13897 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13898 fold_check_failed (op0, tem);
13900 md5_init_ctx (&ctx);
13901 fold_checksum_tree (op1, &ctx, &ht);
13902 md5_finish_ctx (&ctx, checksum_after_op1);
13903 ht.empty ();
13905 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13906 fold_check_failed (op1, tem);
13908 md5_init_ctx (&ctx);
13909 fold_checksum_tree (op2, &ctx, &ht);
13910 md5_finish_ctx (&ctx, checksum_after_op2);
13912 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13913 fold_check_failed (op2, tem);
13914 #endif
13915 return tem;
13918 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13919 arguments in ARGARRAY, and a null static chain.
13920 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13921 of type TYPE from the given operands as constructed by build_call_array. */
13923 tree
13924 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13925 int nargs, tree *argarray)
13927 tree tem;
13928 #ifdef ENABLE_FOLD_CHECKING
13929 unsigned char checksum_before_fn[16],
13930 checksum_before_arglist[16],
13931 checksum_after_fn[16],
13932 checksum_after_arglist[16];
13933 struct md5_ctx ctx;
13934 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13935 int i;
13937 md5_init_ctx (&ctx);
13938 fold_checksum_tree (fn, &ctx, &ht);
13939 md5_finish_ctx (&ctx, checksum_before_fn);
13940 ht.empty ();
13942 md5_init_ctx (&ctx);
13943 for (i = 0; i < nargs; i++)
13944 fold_checksum_tree (argarray[i], &ctx, &ht);
13945 md5_finish_ctx (&ctx, checksum_before_arglist);
13946 ht.empty ();
13947 #endif
13949 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13950 if (!tem)
13951 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13953 #ifdef ENABLE_FOLD_CHECKING
13954 md5_init_ctx (&ctx);
13955 fold_checksum_tree (fn, &ctx, &ht);
13956 md5_finish_ctx (&ctx, checksum_after_fn);
13957 ht.empty ();
13959 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13960 fold_check_failed (fn, tem);
13962 md5_init_ctx (&ctx);
13963 for (i = 0; i < nargs; i++)
13964 fold_checksum_tree (argarray[i], &ctx, &ht);
13965 md5_finish_ctx (&ctx, checksum_after_arglist);
13967 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13968 fold_check_failed (NULL_TREE, tem);
13969 #endif
13970 return tem;
13973 /* Perform constant folding and related simplification of initializer
13974 expression EXPR. These behave identically to "fold_buildN" but ignore
13975 potential run-time traps and exceptions that fold must preserve. */
13977 #define START_FOLD_INIT \
13978 int saved_signaling_nans = flag_signaling_nans;\
13979 int saved_trapping_math = flag_trapping_math;\
13980 int saved_rounding_math = flag_rounding_math;\
13981 int saved_trapv = flag_trapv;\
13982 int saved_folding_initializer = folding_initializer;\
13983 flag_signaling_nans = 0;\
13984 flag_trapping_math = 0;\
13985 flag_rounding_math = 0;\
13986 flag_trapv = 0;\
13987 folding_initializer = 1;
13989 #define END_FOLD_INIT \
13990 flag_signaling_nans = saved_signaling_nans;\
13991 flag_trapping_math = saved_trapping_math;\
13992 flag_rounding_math = saved_rounding_math;\
13993 flag_trapv = saved_trapv;\
13994 folding_initializer = saved_folding_initializer;
13996 tree
13997 fold_init (tree expr)
13999 tree result;
14000 START_FOLD_INIT;
14002 result = fold (expr);
14004 END_FOLD_INIT;
14005 return result;
14008 tree
14009 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14010 tree type, tree op)
14012 tree result;
14013 START_FOLD_INIT;
14015 result = fold_build1_loc (loc, code, type, op);
14017 END_FOLD_INIT;
14018 return result;
14021 tree
14022 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14023 tree type, tree op0, tree op1)
14025 tree result;
14026 START_FOLD_INIT;
14028 result = fold_build2_loc (loc, code, type, op0, op1);
14030 END_FOLD_INIT;
14031 return result;
14034 tree
14035 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14036 int nargs, tree *argarray)
14038 tree result;
14039 START_FOLD_INIT;
14041 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14043 END_FOLD_INIT;
14044 return result;
14047 tree
14048 fold_binary_initializer_loc (location_t loc, tree_code code, tree type,
14049 tree lhs, tree rhs)
14051 tree result;
14052 START_FOLD_INIT;
14054 result = fold_binary_loc (loc, code, type, lhs, rhs);
14056 END_FOLD_INIT;
14057 return result;
14060 #undef START_FOLD_INIT
14061 #undef END_FOLD_INIT
14063 /* Determine if first argument is a multiple of second argument. Return 0 if
14064 it is not, or we cannot easily determined it to be.
14066 An example of the sort of thing we care about (at this point; this routine
14067 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14068 fold cases do now) is discovering that
14070 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14072 is a multiple of
14074 SAVE_EXPR (J * 8)
14076 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14078 This code also handles discovering that
14080 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14082 is a multiple of 8 so we don't have to worry about dealing with a
14083 possible remainder.
14085 Note that we *look* inside a SAVE_EXPR only to determine how it was
14086 calculated; it is not safe for fold to do much of anything else with the
14087 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14088 at run time. For example, the latter example above *cannot* be implemented
14089 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14090 evaluation time of the original SAVE_EXPR is not necessarily the same at
14091 the time the new expression is evaluated. The only optimization of this
14092 sort that would be valid is changing
14094 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14096 divided by 8 to
14098 SAVE_EXPR (I) * SAVE_EXPR (J)
14100 (where the same SAVE_EXPR (J) is used in the original and the
14101 transformed version).
14103 NOWRAP specifies whether all outer operations in TYPE should
14104 be considered not wrapping. Any type conversion within TOP acts
14105 as a barrier and we will fall back to NOWRAP being false.
14106 NOWRAP is mostly used to treat expressions in TYPE_SIZE and friends
14107 as not wrapping even though they are generally using unsigned arithmetic. */
14110 multiple_of_p (tree type, const_tree top, const_tree bottom, bool nowrap)
14112 gimple *stmt;
14113 tree op1, op2;
14115 if (operand_equal_p (top, bottom, 0))
14116 return 1;
14118 if (TREE_CODE (type) != INTEGER_TYPE)
14119 return 0;
14121 switch (TREE_CODE (top))
14123 case BIT_AND_EXPR:
14124 /* Bitwise and provides a power of two multiple. If the mask is
14125 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14126 if (!integer_pow2p (bottom))
14127 return 0;
14128 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14129 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14131 case MULT_EXPR:
14132 /* If the multiplication can wrap we cannot recurse further unless
14133 the bottom is a power of two which is where wrapping does not
14134 matter. */
14135 if (!nowrap
14136 && !TYPE_OVERFLOW_UNDEFINED (type)
14137 && !integer_pow2p (bottom))
14138 return 0;
14139 if (TREE_CODE (bottom) == INTEGER_CST)
14141 op1 = TREE_OPERAND (top, 0);
14142 op2 = TREE_OPERAND (top, 1);
14143 if (TREE_CODE (op1) == INTEGER_CST)
14144 std::swap (op1, op2);
14145 if (TREE_CODE (op2) == INTEGER_CST)
14147 if (multiple_of_p (type, op2, bottom, nowrap))
14148 return 1;
14149 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
14150 if (multiple_of_p (type, bottom, op2, nowrap))
14152 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
14153 wi::to_widest (op2));
14154 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
14156 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
14157 return multiple_of_p (type, op1, op2, nowrap);
14160 return multiple_of_p (type, op1, bottom, nowrap);
14163 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14164 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14166 case LSHIFT_EXPR:
14167 /* Handle X << CST as X * (1 << CST) and only process the constant. */
14168 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14170 op1 = TREE_OPERAND (top, 1);
14171 if (wi::to_widest (op1) < TYPE_PRECISION (type))
14173 wide_int mul_op
14174 = wi::one (TYPE_PRECISION (type)) << wi::to_wide (op1);
14175 return multiple_of_p (type,
14176 wide_int_to_tree (type, mul_op), bottom,
14177 nowrap);
14180 return 0;
14182 case MINUS_EXPR:
14183 case PLUS_EXPR:
14184 /* If the addition or subtraction can wrap we cannot recurse further
14185 unless bottom is a power of two which is where wrapping does not
14186 matter. */
14187 if (!nowrap
14188 && !TYPE_OVERFLOW_UNDEFINED (type)
14189 && !integer_pow2p (bottom))
14190 return 0;
14192 /* Handle cases like op0 + 0xfffffffd as op0 - 3 if the expression has
14193 unsigned type. For example, (X / 3) + 0xfffffffd is multiple of 3,
14194 but 0xfffffffd is not. */
14195 op1 = TREE_OPERAND (top, 1);
14196 if (TREE_CODE (top) == PLUS_EXPR
14197 && nowrap
14198 && TYPE_UNSIGNED (type)
14199 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
14200 op1 = fold_build1 (NEGATE_EXPR, type, op1);
14202 /* It is impossible to prove if op0 +- op1 is multiple of bottom
14203 precisely, so be conservative here checking if both op0 and op1
14204 are multiple of bottom. Note we check the second operand first
14205 since it's usually simpler. */
14206 return (multiple_of_p (type, op1, bottom, nowrap)
14207 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14209 CASE_CONVERT:
14210 /* Can't handle conversions from non-integral or wider integral type. */
14211 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14212 || (TYPE_PRECISION (type)
14213 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14214 return 0;
14215 /* NOWRAP only extends to operations in the outermost type so
14216 make sure to strip it off here. */
14217 return multiple_of_p (TREE_TYPE (TREE_OPERAND (top, 0)),
14218 TREE_OPERAND (top, 0), bottom, false);
14220 case SAVE_EXPR:
14221 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap);
14223 case COND_EXPR:
14224 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14225 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom, nowrap));
14227 case INTEGER_CST:
14228 if (TREE_CODE (bottom) != INTEGER_CST || integer_zerop (bottom))
14229 return 0;
14230 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14231 SIGNED);
14233 case SSA_NAME:
14234 if (TREE_CODE (bottom) == INTEGER_CST
14235 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
14236 && gimple_code (stmt) == GIMPLE_ASSIGN)
14238 enum tree_code code = gimple_assign_rhs_code (stmt);
14240 /* Check for special cases to see if top is defined as multiple
14241 of bottom:
14243 top = (X & ~(bottom - 1) ; bottom is power of 2
14247 Y = X % bottom
14248 top = X - Y. */
14249 if (code == BIT_AND_EXPR
14250 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14251 && TREE_CODE (op2) == INTEGER_CST
14252 && integer_pow2p (bottom)
14253 && wi::multiple_of_p (wi::to_widest (op2),
14254 wi::to_widest (bottom), UNSIGNED))
14255 return 1;
14257 op1 = gimple_assign_rhs1 (stmt);
14258 if (code == MINUS_EXPR
14259 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14260 && TREE_CODE (op2) == SSA_NAME
14261 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
14262 && gimple_code (stmt) == GIMPLE_ASSIGN
14263 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
14264 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
14265 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
14266 return 1;
14269 /* fall through */
14271 default:
14272 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
14273 return multiple_p (wi::to_poly_widest (top),
14274 wi::to_poly_widest (bottom));
14276 return 0;
14280 /* Return true if expression X cannot be (or contain) a NaN or infinity.
14281 This function returns true for integer expressions, and returns
14282 false if uncertain. */
14284 bool
14285 tree_expr_finite_p (const_tree x)
14287 machine_mode mode = element_mode (x);
14288 if (!HONOR_NANS (mode) && !HONOR_INFINITIES (mode))
14289 return true;
14290 switch (TREE_CODE (x))
14292 case REAL_CST:
14293 return real_isfinite (TREE_REAL_CST_PTR (x));
14294 case COMPLEX_CST:
14295 return tree_expr_finite_p (TREE_REALPART (x))
14296 && tree_expr_finite_p (TREE_IMAGPART (x));
14297 case FLOAT_EXPR:
14298 return true;
14299 case ABS_EXPR:
14300 case CONVERT_EXPR:
14301 case NON_LVALUE_EXPR:
14302 case NEGATE_EXPR:
14303 case SAVE_EXPR:
14304 return tree_expr_finite_p (TREE_OPERAND (x, 0));
14305 case MIN_EXPR:
14306 case MAX_EXPR:
14307 return tree_expr_finite_p (TREE_OPERAND (x, 0))
14308 && tree_expr_finite_p (TREE_OPERAND (x, 1));
14309 case COND_EXPR:
14310 return tree_expr_finite_p (TREE_OPERAND (x, 1))
14311 && tree_expr_finite_p (TREE_OPERAND (x, 2));
14312 case CALL_EXPR:
14313 switch (get_call_combined_fn (x))
14315 CASE_CFN_FABS:
14316 CASE_CFN_FABS_FN:
14317 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0));
14318 CASE_CFN_FMAX:
14319 CASE_CFN_FMAX_FN:
14320 CASE_CFN_FMIN:
14321 CASE_CFN_FMIN_FN:
14322 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0))
14323 && tree_expr_finite_p (CALL_EXPR_ARG (x, 1));
14324 default:
14325 return false;
14328 default:
14329 return false;
14333 /* Return true if expression X evaluates to an infinity.
14334 This function returns false for integer expressions. */
14336 bool
14337 tree_expr_infinite_p (const_tree x)
14339 if (!HONOR_INFINITIES (x))
14340 return false;
14341 switch (TREE_CODE (x))
14343 case REAL_CST:
14344 return real_isinf (TREE_REAL_CST_PTR (x));
14345 case ABS_EXPR:
14346 case NEGATE_EXPR:
14347 case NON_LVALUE_EXPR:
14348 case SAVE_EXPR:
14349 return tree_expr_infinite_p (TREE_OPERAND (x, 0));
14350 case COND_EXPR:
14351 return tree_expr_infinite_p (TREE_OPERAND (x, 1))
14352 && tree_expr_infinite_p (TREE_OPERAND (x, 2));
14353 default:
14354 return false;
14358 /* Return true if expression X could evaluate to an infinity.
14359 This function returns false for integer expressions, and returns
14360 true if uncertain. */
14362 bool
14363 tree_expr_maybe_infinite_p (const_tree x)
14365 if (!HONOR_INFINITIES (x))
14366 return false;
14367 switch (TREE_CODE (x))
14369 case REAL_CST:
14370 return real_isinf (TREE_REAL_CST_PTR (x));
14371 case FLOAT_EXPR:
14372 return false;
14373 case ABS_EXPR:
14374 case NEGATE_EXPR:
14375 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 0));
14376 case COND_EXPR:
14377 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 1))
14378 || tree_expr_maybe_infinite_p (TREE_OPERAND (x, 2));
14379 default:
14380 return true;
14384 /* Return true if expression X evaluates to a signaling NaN.
14385 This function returns false for integer expressions. */
14387 bool
14388 tree_expr_signaling_nan_p (const_tree x)
14390 if (!HONOR_SNANS (x))
14391 return false;
14392 switch (TREE_CODE (x))
14394 case REAL_CST:
14395 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14396 case NON_LVALUE_EXPR:
14397 case SAVE_EXPR:
14398 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 0));
14399 case COND_EXPR:
14400 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 1))
14401 && tree_expr_signaling_nan_p (TREE_OPERAND (x, 2));
14402 default:
14403 return false;
14407 /* Return true if expression X could evaluate to a signaling NaN.
14408 This function returns false for integer expressions, and returns
14409 true if uncertain. */
14411 bool
14412 tree_expr_maybe_signaling_nan_p (const_tree x)
14414 if (!HONOR_SNANS (x))
14415 return false;
14416 switch (TREE_CODE (x))
14418 case REAL_CST:
14419 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14420 case FLOAT_EXPR:
14421 return false;
14422 case ABS_EXPR:
14423 case CONVERT_EXPR:
14424 case NEGATE_EXPR:
14425 case NON_LVALUE_EXPR:
14426 case SAVE_EXPR:
14427 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0));
14428 case MIN_EXPR:
14429 case MAX_EXPR:
14430 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0))
14431 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1));
14432 case COND_EXPR:
14433 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1))
14434 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 2));
14435 case CALL_EXPR:
14436 switch (get_call_combined_fn (x))
14438 CASE_CFN_FABS:
14439 CASE_CFN_FABS_FN:
14440 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0));
14441 CASE_CFN_FMAX:
14442 CASE_CFN_FMAX_FN:
14443 CASE_CFN_FMIN:
14444 CASE_CFN_FMIN_FN:
14445 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0))
14446 || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 1));
14447 default:
14448 return true;
14450 default:
14451 return true;
14455 /* Return true if expression X evaluates to a NaN.
14456 This function returns false for integer expressions. */
14458 bool
14459 tree_expr_nan_p (const_tree x)
14461 if (!HONOR_NANS (x))
14462 return false;
14463 switch (TREE_CODE (x))
14465 case REAL_CST:
14466 return real_isnan (TREE_REAL_CST_PTR (x));
14467 case NON_LVALUE_EXPR:
14468 case SAVE_EXPR:
14469 return tree_expr_nan_p (TREE_OPERAND (x, 0));
14470 case COND_EXPR:
14471 return tree_expr_nan_p (TREE_OPERAND (x, 1))
14472 && tree_expr_nan_p (TREE_OPERAND (x, 2));
14473 default:
14474 return false;
14478 /* Return true if expression X could evaluate to a NaN.
14479 This function returns false for integer expressions, and returns
14480 true if uncertain. */
14482 bool
14483 tree_expr_maybe_nan_p (const_tree x)
14485 if (!HONOR_NANS (x))
14486 return false;
14487 switch (TREE_CODE (x))
14489 case REAL_CST:
14490 return real_isnan (TREE_REAL_CST_PTR (x));
14491 case FLOAT_EXPR:
14492 return false;
14493 case PLUS_EXPR:
14494 case MINUS_EXPR:
14495 case MULT_EXPR:
14496 return !tree_expr_finite_p (TREE_OPERAND (x, 0))
14497 || !tree_expr_finite_p (TREE_OPERAND (x, 1));
14498 case ABS_EXPR:
14499 case CONVERT_EXPR:
14500 case NEGATE_EXPR:
14501 case NON_LVALUE_EXPR:
14502 case SAVE_EXPR:
14503 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0));
14504 case MIN_EXPR:
14505 case MAX_EXPR:
14506 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0))
14507 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 1));
14508 case COND_EXPR:
14509 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 1))
14510 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 2));
14511 case CALL_EXPR:
14512 switch (get_call_combined_fn (x))
14514 CASE_CFN_FABS:
14515 CASE_CFN_FABS_FN:
14516 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0));
14517 CASE_CFN_FMAX:
14518 CASE_CFN_FMAX_FN:
14519 CASE_CFN_FMIN:
14520 CASE_CFN_FMIN_FN:
14521 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0))
14522 || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 1));
14523 default:
14524 return true;
14526 default:
14527 return true;
14531 /* Return true if expression X could evaluate to -0.0.
14532 This function returns true if uncertain. */
14534 bool
14535 tree_expr_maybe_real_minus_zero_p (const_tree x)
14537 if (!HONOR_SIGNED_ZEROS (x))
14538 return false;
14539 switch (TREE_CODE (x))
14541 case REAL_CST:
14542 return REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (x));
14543 case INTEGER_CST:
14544 case FLOAT_EXPR:
14545 case ABS_EXPR:
14546 return false;
14547 case NON_LVALUE_EXPR:
14548 case SAVE_EXPR:
14549 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 0));
14550 case COND_EXPR:
14551 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 1))
14552 || tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 2));
14553 case CALL_EXPR:
14554 switch (get_call_combined_fn (x))
14556 CASE_CFN_FABS:
14557 CASE_CFN_FABS_FN:
14558 return false;
14559 default:
14560 break;
14562 default:
14563 break;
14565 /* Ideally !(tree_expr_nonzero_p (X) || tree_expr_nonnegative_p (X))
14566 * but currently those predicates require tree and not const_tree. */
14567 return true;
14570 #define tree_expr_nonnegative_warnv_p(X, Y) \
14571 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14573 #define RECURSE(X) \
14574 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14576 /* Return true if CODE or TYPE is known to be non-negative. */
14578 static bool
14579 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14581 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14582 && truth_value_p (code))
14583 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14584 have a signed:1 type (where the value is -1 and 0). */
14585 return true;
14586 return false;
14589 /* Return true if (CODE OP0) is known to be non-negative. If the return
14590 value is based on the assumption that signed overflow is undefined,
14591 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14592 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14594 bool
14595 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14596 bool *strict_overflow_p, int depth)
14598 if (TYPE_UNSIGNED (type))
14599 return true;
14601 switch (code)
14603 case ABS_EXPR:
14604 /* We can't return 1 if flag_wrapv is set because
14605 ABS_EXPR<INT_MIN> = INT_MIN. */
14606 if (!ANY_INTEGRAL_TYPE_P (type))
14607 return true;
14608 if (TYPE_OVERFLOW_UNDEFINED (type))
14610 *strict_overflow_p = true;
14611 return true;
14613 break;
14615 case NON_LVALUE_EXPR:
14616 case FLOAT_EXPR:
14617 case FIX_TRUNC_EXPR:
14618 return RECURSE (op0);
14620 CASE_CONVERT:
14622 tree inner_type = TREE_TYPE (op0);
14623 tree outer_type = type;
14625 if (TREE_CODE (outer_type) == REAL_TYPE)
14627 if (TREE_CODE (inner_type) == REAL_TYPE)
14628 return RECURSE (op0);
14629 if (INTEGRAL_TYPE_P (inner_type))
14631 if (TYPE_UNSIGNED (inner_type))
14632 return true;
14633 return RECURSE (op0);
14636 else if (INTEGRAL_TYPE_P (outer_type))
14638 if (TREE_CODE (inner_type) == REAL_TYPE)
14639 return RECURSE (op0);
14640 if (INTEGRAL_TYPE_P (inner_type))
14641 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14642 && TYPE_UNSIGNED (inner_type);
14645 break;
14647 default:
14648 return tree_simple_nonnegative_warnv_p (code, type);
14651 /* We don't know sign of `t', so be conservative and return false. */
14652 return false;
14655 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14656 value is based on the assumption that signed overflow is undefined,
14657 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14658 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14660 bool
14661 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14662 tree op1, bool *strict_overflow_p,
14663 int depth)
14665 if (TYPE_UNSIGNED (type))
14666 return true;
14668 switch (code)
14670 case POINTER_PLUS_EXPR:
14671 case PLUS_EXPR:
14672 if (FLOAT_TYPE_P (type))
14673 return RECURSE (op0) && RECURSE (op1);
14675 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14676 both unsigned and at least 2 bits shorter than the result. */
14677 if (TREE_CODE (type) == INTEGER_TYPE
14678 && TREE_CODE (op0) == NOP_EXPR
14679 && TREE_CODE (op1) == NOP_EXPR)
14681 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14682 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14683 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14684 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14686 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14687 TYPE_PRECISION (inner2)) + 1;
14688 return prec < TYPE_PRECISION (type);
14691 break;
14693 case MULT_EXPR:
14694 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14696 /* x * x is always non-negative for floating point x
14697 or without overflow. */
14698 if (operand_equal_p (op0, op1, 0)
14699 || (RECURSE (op0) && RECURSE (op1)))
14701 if (ANY_INTEGRAL_TYPE_P (type)
14702 && TYPE_OVERFLOW_UNDEFINED (type))
14703 *strict_overflow_p = true;
14704 return true;
14708 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14709 both unsigned and their total bits is shorter than the result. */
14710 if (TREE_CODE (type) == INTEGER_TYPE
14711 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14712 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14714 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14715 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14716 : TREE_TYPE (op0);
14717 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14718 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14719 : TREE_TYPE (op1);
14721 bool unsigned0 = TYPE_UNSIGNED (inner0);
14722 bool unsigned1 = TYPE_UNSIGNED (inner1);
14724 if (TREE_CODE (op0) == INTEGER_CST)
14725 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14727 if (TREE_CODE (op1) == INTEGER_CST)
14728 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14730 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14731 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14733 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14734 ? tree_int_cst_min_precision (op0, UNSIGNED)
14735 : TYPE_PRECISION (inner0);
14737 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14738 ? tree_int_cst_min_precision (op1, UNSIGNED)
14739 : TYPE_PRECISION (inner1);
14741 return precision0 + precision1 < TYPE_PRECISION (type);
14744 return false;
14746 case BIT_AND_EXPR:
14747 return RECURSE (op0) || RECURSE (op1);
14749 case MAX_EXPR:
14750 /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
14751 things. */
14752 if (tree_expr_maybe_nan_p (op0) || tree_expr_maybe_nan_p (op1))
14753 return RECURSE (op0) && RECURSE (op1);
14754 return RECURSE (op0) || RECURSE (op1);
14756 case BIT_IOR_EXPR:
14757 case BIT_XOR_EXPR:
14758 case MIN_EXPR:
14759 case RDIV_EXPR:
14760 case TRUNC_DIV_EXPR:
14761 case CEIL_DIV_EXPR:
14762 case FLOOR_DIV_EXPR:
14763 case ROUND_DIV_EXPR:
14764 return RECURSE (op0) && RECURSE (op1);
14766 case TRUNC_MOD_EXPR:
14767 return RECURSE (op0);
14769 case FLOOR_MOD_EXPR:
14770 return RECURSE (op1);
14772 case CEIL_MOD_EXPR:
14773 case ROUND_MOD_EXPR:
14774 default:
14775 return tree_simple_nonnegative_warnv_p (code, type);
14778 /* We don't know sign of `t', so be conservative and return false. */
14779 return false;
14782 /* Return true if T is known to be non-negative. If the return
14783 value is based on the assumption that signed overflow is undefined,
14784 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14785 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14787 bool
14788 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14790 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14791 return true;
14793 switch (TREE_CODE (t))
14795 case INTEGER_CST:
14796 return tree_int_cst_sgn (t) >= 0;
14798 case REAL_CST:
14799 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14801 case FIXED_CST:
14802 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14804 case COND_EXPR:
14805 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14807 case SSA_NAME:
14808 /* Limit the depth of recursion to avoid quadratic behavior.
14809 This is expected to catch almost all occurrences in practice.
14810 If this code misses important cases that unbounded recursion
14811 would not, passes that need this information could be revised
14812 to provide it through dataflow propagation. */
14813 return (!name_registered_for_update_p (t)
14814 && depth < param_max_ssa_name_query_depth
14815 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
14816 strict_overflow_p, depth));
14818 default:
14819 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14823 /* Return true if T is known to be non-negative. If the return
14824 value is based on the assumption that signed overflow is undefined,
14825 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14826 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14828 bool
14829 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
14830 bool *strict_overflow_p, int depth)
14832 switch (fn)
14834 CASE_CFN_ACOS:
14835 CASE_CFN_ACOS_FN:
14836 CASE_CFN_ACOSH:
14837 CASE_CFN_ACOSH_FN:
14838 CASE_CFN_CABS:
14839 CASE_CFN_CABS_FN:
14840 CASE_CFN_COSH:
14841 CASE_CFN_COSH_FN:
14842 CASE_CFN_ERFC:
14843 CASE_CFN_ERFC_FN:
14844 CASE_CFN_EXP:
14845 CASE_CFN_EXP_FN:
14846 CASE_CFN_EXP10:
14847 CASE_CFN_EXP2:
14848 CASE_CFN_EXP2_FN:
14849 CASE_CFN_FABS:
14850 CASE_CFN_FABS_FN:
14851 CASE_CFN_FDIM:
14852 CASE_CFN_FDIM_FN:
14853 CASE_CFN_HYPOT:
14854 CASE_CFN_HYPOT_FN:
14855 CASE_CFN_POW10:
14856 CASE_CFN_FFS:
14857 CASE_CFN_PARITY:
14858 CASE_CFN_POPCOUNT:
14859 CASE_CFN_CLZ:
14860 CASE_CFN_CLRSB:
14861 case CFN_BUILT_IN_BSWAP16:
14862 case CFN_BUILT_IN_BSWAP32:
14863 case CFN_BUILT_IN_BSWAP64:
14864 case CFN_BUILT_IN_BSWAP128:
14865 /* Always true. */
14866 return true;
14868 CASE_CFN_SQRT:
14869 CASE_CFN_SQRT_FN:
14870 /* sqrt(-0.0) is -0.0. */
14871 if (!HONOR_SIGNED_ZEROS (type))
14872 return true;
14873 return RECURSE (arg0);
14875 CASE_CFN_ASINH:
14876 CASE_CFN_ASINH_FN:
14877 CASE_CFN_ATAN:
14878 CASE_CFN_ATAN_FN:
14879 CASE_CFN_ATANH:
14880 CASE_CFN_ATANH_FN:
14881 CASE_CFN_CBRT:
14882 CASE_CFN_CBRT_FN:
14883 CASE_CFN_CEIL:
14884 CASE_CFN_CEIL_FN:
14885 CASE_CFN_ERF:
14886 CASE_CFN_ERF_FN:
14887 CASE_CFN_EXPM1:
14888 CASE_CFN_EXPM1_FN:
14889 CASE_CFN_FLOOR:
14890 CASE_CFN_FLOOR_FN:
14891 CASE_CFN_FMOD:
14892 CASE_CFN_FMOD_FN:
14893 CASE_CFN_FREXP:
14894 CASE_CFN_FREXP_FN:
14895 CASE_CFN_ICEIL:
14896 CASE_CFN_IFLOOR:
14897 CASE_CFN_IRINT:
14898 CASE_CFN_IROUND:
14899 CASE_CFN_LCEIL:
14900 CASE_CFN_LDEXP:
14901 CASE_CFN_LFLOOR:
14902 CASE_CFN_LLCEIL:
14903 CASE_CFN_LLFLOOR:
14904 CASE_CFN_LLRINT:
14905 CASE_CFN_LLRINT_FN:
14906 CASE_CFN_LLROUND:
14907 CASE_CFN_LLROUND_FN:
14908 CASE_CFN_LRINT:
14909 CASE_CFN_LRINT_FN:
14910 CASE_CFN_LROUND:
14911 CASE_CFN_LROUND_FN:
14912 CASE_CFN_MODF:
14913 CASE_CFN_MODF_FN:
14914 CASE_CFN_NEARBYINT:
14915 CASE_CFN_NEARBYINT_FN:
14916 CASE_CFN_RINT:
14917 CASE_CFN_RINT_FN:
14918 CASE_CFN_ROUND:
14919 CASE_CFN_ROUND_FN:
14920 CASE_CFN_ROUNDEVEN:
14921 CASE_CFN_ROUNDEVEN_FN:
14922 CASE_CFN_SCALB:
14923 CASE_CFN_SCALBLN:
14924 CASE_CFN_SCALBLN_FN:
14925 CASE_CFN_SCALBN:
14926 CASE_CFN_SCALBN_FN:
14927 CASE_CFN_SIGNBIT:
14928 CASE_CFN_SIGNIFICAND:
14929 CASE_CFN_SINH:
14930 CASE_CFN_SINH_FN:
14931 CASE_CFN_TANH:
14932 CASE_CFN_TANH_FN:
14933 CASE_CFN_TRUNC:
14934 CASE_CFN_TRUNC_FN:
14935 /* True if the 1st argument is nonnegative. */
14936 return RECURSE (arg0);
14938 CASE_CFN_FMAX:
14939 CASE_CFN_FMAX_FN:
14940 /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
14941 things. In the presence of sNaNs, we're only guaranteed to be
14942 non-negative if both operands are non-negative. In the presence
14943 of qNaNs, we're non-negative if either operand is non-negative
14944 and can't be a qNaN, or if both operands are non-negative. */
14945 if (tree_expr_maybe_signaling_nan_p (arg0) ||
14946 tree_expr_maybe_signaling_nan_p (arg1))
14947 return RECURSE (arg0) && RECURSE (arg1);
14948 return RECURSE (arg0) ? (!tree_expr_maybe_nan_p (arg0)
14949 || RECURSE (arg1))
14950 : (RECURSE (arg1)
14951 && !tree_expr_maybe_nan_p (arg1));
14953 CASE_CFN_FMIN:
14954 CASE_CFN_FMIN_FN:
14955 /* True if the 1st AND 2nd arguments are nonnegative. */
14956 return RECURSE (arg0) && RECURSE (arg1);
14958 CASE_CFN_COPYSIGN:
14959 CASE_CFN_COPYSIGN_FN:
14960 /* True if the 2nd argument is nonnegative. */
14961 return RECURSE (arg1);
14963 CASE_CFN_POWI:
14964 /* True if the 1st argument is nonnegative or the second
14965 argument is an even integer. */
14966 if (TREE_CODE (arg1) == INTEGER_CST
14967 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14968 return true;
14969 return RECURSE (arg0);
14971 CASE_CFN_POW:
14972 CASE_CFN_POW_FN:
14973 /* True if the 1st argument is nonnegative or the second
14974 argument is an even integer valued real. */
14975 if (TREE_CODE (arg1) == REAL_CST)
14977 REAL_VALUE_TYPE c;
14978 HOST_WIDE_INT n;
14980 c = TREE_REAL_CST (arg1);
14981 n = real_to_integer (&c);
14982 if ((n & 1) == 0)
14984 REAL_VALUE_TYPE cint;
14985 real_from_integer (&cint, VOIDmode, n, SIGNED);
14986 if (real_identical (&c, &cint))
14987 return true;
14990 return RECURSE (arg0);
14992 default:
14993 break;
14995 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
14998 /* Return true if T is known to be non-negative. If the return
14999 value is based on the assumption that signed overflow is undefined,
15000 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15001 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15003 static bool
15004 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15006 enum tree_code code = TREE_CODE (t);
15007 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15008 return true;
15010 switch (code)
15012 case TARGET_EXPR:
15014 tree temp = TARGET_EXPR_SLOT (t);
15015 t = TARGET_EXPR_INITIAL (t);
15017 /* If the initializer is non-void, then it's a normal expression
15018 that will be assigned to the slot. */
15019 if (!VOID_TYPE_P (TREE_TYPE (t)))
15020 return RECURSE (t);
15022 /* Otherwise, the initializer sets the slot in some way. One common
15023 way is an assignment statement at the end of the initializer. */
15024 while (1)
15026 if (TREE_CODE (t) == BIND_EXPR)
15027 t = expr_last (BIND_EXPR_BODY (t));
15028 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15029 || TREE_CODE (t) == TRY_CATCH_EXPR)
15030 t = expr_last (TREE_OPERAND (t, 0));
15031 else if (TREE_CODE (t) == STATEMENT_LIST)
15032 t = expr_last (t);
15033 else
15034 break;
15036 if (TREE_CODE (t) == MODIFY_EXPR
15037 && TREE_OPERAND (t, 0) == temp)
15038 return RECURSE (TREE_OPERAND (t, 1));
15040 return false;
15043 case CALL_EXPR:
15045 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15046 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15048 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15049 get_call_combined_fn (t),
15050 arg0,
15051 arg1,
15052 strict_overflow_p, depth);
15054 case COMPOUND_EXPR:
15055 case MODIFY_EXPR:
15056 return RECURSE (TREE_OPERAND (t, 1));
15058 case BIND_EXPR:
15059 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
15061 case SAVE_EXPR:
15062 return RECURSE (TREE_OPERAND (t, 0));
15064 default:
15065 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
15069 #undef RECURSE
15070 #undef tree_expr_nonnegative_warnv_p
15072 /* Return true if T is known to be non-negative. If the return
15073 value is based on the assumption that signed overflow is undefined,
15074 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15075 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15077 bool
15078 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15080 enum tree_code code;
15081 if (t == error_mark_node)
15082 return false;
15084 code = TREE_CODE (t);
15085 switch (TREE_CODE_CLASS (code))
15087 case tcc_binary:
15088 case tcc_comparison:
15089 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15090 TREE_TYPE (t),
15091 TREE_OPERAND (t, 0),
15092 TREE_OPERAND (t, 1),
15093 strict_overflow_p, depth);
15095 case tcc_unary:
15096 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15097 TREE_TYPE (t),
15098 TREE_OPERAND (t, 0),
15099 strict_overflow_p, depth);
15101 case tcc_constant:
15102 case tcc_declaration:
15103 case tcc_reference:
15104 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15106 default:
15107 break;
15110 switch (code)
15112 case TRUTH_AND_EXPR:
15113 case TRUTH_OR_EXPR:
15114 case TRUTH_XOR_EXPR:
15115 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15116 TREE_TYPE (t),
15117 TREE_OPERAND (t, 0),
15118 TREE_OPERAND (t, 1),
15119 strict_overflow_p, depth);
15120 case TRUTH_NOT_EXPR:
15121 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15122 TREE_TYPE (t),
15123 TREE_OPERAND (t, 0),
15124 strict_overflow_p, depth);
15126 case COND_EXPR:
15127 case CONSTRUCTOR:
15128 case OBJ_TYPE_REF:
15129 case ADDR_EXPR:
15130 case WITH_SIZE_EXPR:
15131 case SSA_NAME:
15132 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15134 default:
15135 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
15139 /* Return true if `t' is known to be non-negative. Handle warnings
15140 about undefined signed overflow. */
15142 bool
15143 tree_expr_nonnegative_p (tree t)
15145 bool ret, strict_overflow_p;
15147 strict_overflow_p = false;
15148 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15149 if (strict_overflow_p)
15150 fold_overflow_warning (("assuming signed overflow does not occur when "
15151 "determining that expression is always "
15152 "non-negative"),
15153 WARN_STRICT_OVERFLOW_MISC);
15154 return ret;
15158 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15159 For floating point we further ensure that T is not denormal.
15160 Similar logic is present in nonzero_address in rtlanal.h.
15162 If the return value is based on the assumption that signed overflow
15163 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15164 change *STRICT_OVERFLOW_P. */
15166 bool
15167 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15168 bool *strict_overflow_p)
15170 switch (code)
15172 case ABS_EXPR:
15173 return tree_expr_nonzero_warnv_p (op0,
15174 strict_overflow_p);
15176 case NOP_EXPR:
15178 tree inner_type = TREE_TYPE (op0);
15179 tree outer_type = type;
15181 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15182 && tree_expr_nonzero_warnv_p (op0,
15183 strict_overflow_p));
15185 break;
15187 case NON_LVALUE_EXPR:
15188 return tree_expr_nonzero_warnv_p (op0,
15189 strict_overflow_p);
15191 default:
15192 break;
15195 return false;
15198 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15199 For floating point we further ensure that T is not denormal.
15200 Similar logic is present in nonzero_address in rtlanal.h.
15202 If the return value is based on the assumption that signed overflow
15203 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15204 change *STRICT_OVERFLOW_P. */
15206 bool
15207 tree_binary_nonzero_warnv_p (enum tree_code code,
15208 tree type,
15209 tree op0,
15210 tree op1, bool *strict_overflow_p)
15212 bool sub_strict_overflow_p;
15213 switch (code)
15215 case POINTER_PLUS_EXPR:
15216 case PLUS_EXPR:
15217 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15219 /* With the presence of negative values it is hard
15220 to say something. */
15221 sub_strict_overflow_p = false;
15222 if (!tree_expr_nonnegative_warnv_p (op0,
15223 &sub_strict_overflow_p)
15224 || !tree_expr_nonnegative_warnv_p (op1,
15225 &sub_strict_overflow_p))
15226 return false;
15227 /* One of operands must be positive and the other non-negative. */
15228 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15229 overflows, on a twos-complement machine the sum of two
15230 nonnegative numbers can never be zero. */
15231 return (tree_expr_nonzero_warnv_p (op0,
15232 strict_overflow_p)
15233 || tree_expr_nonzero_warnv_p (op1,
15234 strict_overflow_p));
15236 break;
15238 case MULT_EXPR:
15239 if (TYPE_OVERFLOW_UNDEFINED (type))
15241 if (tree_expr_nonzero_warnv_p (op0,
15242 strict_overflow_p)
15243 && tree_expr_nonzero_warnv_p (op1,
15244 strict_overflow_p))
15246 *strict_overflow_p = true;
15247 return true;
15250 break;
15252 case MIN_EXPR:
15253 sub_strict_overflow_p = false;
15254 if (tree_expr_nonzero_warnv_p (op0,
15255 &sub_strict_overflow_p)
15256 && tree_expr_nonzero_warnv_p (op1,
15257 &sub_strict_overflow_p))
15259 if (sub_strict_overflow_p)
15260 *strict_overflow_p = true;
15262 break;
15264 case MAX_EXPR:
15265 sub_strict_overflow_p = false;
15266 if (tree_expr_nonzero_warnv_p (op0,
15267 &sub_strict_overflow_p))
15269 if (sub_strict_overflow_p)
15270 *strict_overflow_p = true;
15272 /* When both operands are nonzero, then MAX must be too. */
15273 if (tree_expr_nonzero_warnv_p (op1,
15274 strict_overflow_p))
15275 return true;
15277 /* MAX where operand 0 is positive is positive. */
15278 return tree_expr_nonnegative_warnv_p (op0,
15279 strict_overflow_p);
15281 /* MAX where operand 1 is positive is positive. */
15282 else if (tree_expr_nonzero_warnv_p (op1,
15283 &sub_strict_overflow_p)
15284 && tree_expr_nonnegative_warnv_p (op1,
15285 &sub_strict_overflow_p))
15287 if (sub_strict_overflow_p)
15288 *strict_overflow_p = true;
15289 return true;
15291 break;
15293 case BIT_IOR_EXPR:
15294 return (tree_expr_nonzero_warnv_p (op1,
15295 strict_overflow_p)
15296 || tree_expr_nonzero_warnv_p (op0,
15297 strict_overflow_p));
15299 default:
15300 break;
15303 return false;
15306 /* Return true when T is an address and is known to be nonzero.
15307 For floating point we further ensure that T is not denormal.
15308 Similar logic is present in nonzero_address in rtlanal.h.
15310 If the return value is based on the assumption that signed overflow
15311 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15312 change *STRICT_OVERFLOW_P. */
15314 bool
15315 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15317 bool sub_strict_overflow_p;
15318 switch (TREE_CODE (t))
15320 case INTEGER_CST:
15321 return !integer_zerop (t);
15323 case ADDR_EXPR:
15325 tree base = TREE_OPERAND (t, 0);
15327 if (!DECL_P (base))
15328 base = get_base_address (base);
15330 if (base && TREE_CODE (base) == TARGET_EXPR)
15331 base = TARGET_EXPR_SLOT (base);
15333 if (!base)
15334 return false;
15336 /* For objects in symbol table check if we know they are non-zero.
15337 Don't do anything for variables and functions before symtab is built;
15338 it is quite possible that they will be declared weak later. */
15339 int nonzero_addr = maybe_nonzero_address (base);
15340 if (nonzero_addr >= 0)
15341 return nonzero_addr;
15343 /* Constants are never weak. */
15344 if (CONSTANT_CLASS_P (base))
15345 return true;
15347 return false;
15350 case COND_EXPR:
15351 sub_strict_overflow_p = false;
15352 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15353 &sub_strict_overflow_p)
15354 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15355 &sub_strict_overflow_p))
15357 if (sub_strict_overflow_p)
15358 *strict_overflow_p = true;
15359 return true;
15361 break;
15363 case SSA_NAME:
15364 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
15365 break;
15366 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
15368 default:
15369 break;
15371 return false;
15374 #define integer_valued_real_p(X) \
15375 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15377 #define RECURSE(X) \
15378 ((integer_valued_real_p) (X, depth + 1))
15380 /* Return true if the floating point result of (CODE OP0) has an
15381 integer value. We also allow +Inf, -Inf and NaN to be considered
15382 integer values. Return false for signaling NaN.
15384 DEPTH is the current nesting depth of the query. */
15386 bool
15387 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
15389 switch (code)
15391 case FLOAT_EXPR:
15392 return true;
15394 case ABS_EXPR:
15395 return RECURSE (op0);
15397 CASE_CONVERT:
15399 tree type = TREE_TYPE (op0);
15400 if (TREE_CODE (type) == INTEGER_TYPE)
15401 return true;
15402 if (TREE_CODE (type) == REAL_TYPE)
15403 return RECURSE (op0);
15404 break;
15407 default:
15408 break;
15410 return false;
15413 /* Return true if the floating point result of (CODE OP0 OP1) has an
15414 integer value. We also allow +Inf, -Inf and NaN to be considered
15415 integer values. Return false for signaling NaN.
15417 DEPTH is the current nesting depth of the query. */
15419 bool
15420 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
15422 switch (code)
15424 case PLUS_EXPR:
15425 case MINUS_EXPR:
15426 case MULT_EXPR:
15427 case MIN_EXPR:
15428 case MAX_EXPR:
15429 return RECURSE (op0) && RECURSE (op1);
15431 default:
15432 break;
15434 return false;
15437 /* Return true if the floating point result of calling FNDECL with arguments
15438 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15439 considered integer values. Return false for signaling NaN. If FNDECL
15440 takes fewer than 2 arguments, the remaining ARGn are null.
15442 DEPTH is the current nesting depth of the query. */
15444 bool
15445 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
15447 switch (fn)
15449 CASE_CFN_CEIL:
15450 CASE_CFN_CEIL_FN:
15451 CASE_CFN_FLOOR:
15452 CASE_CFN_FLOOR_FN:
15453 CASE_CFN_NEARBYINT:
15454 CASE_CFN_NEARBYINT_FN:
15455 CASE_CFN_RINT:
15456 CASE_CFN_RINT_FN:
15457 CASE_CFN_ROUND:
15458 CASE_CFN_ROUND_FN:
15459 CASE_CFN_ROUNDEVEN:
15460 CASE_CFN_ROUNDEVEN_FN:
15461 CASE_CFN_TRUNC:
15462 CASE_CFN_TRUNC_FN:
15463 return true;
15465 CASE_CFN_FMIN:
15466 CASE_CFN_FMIN_FN:
15467 CASE_CFN_FMAX:
15468 CASE_CFN_FMAX_FN:
15469 return RECURSE (arg0) && RECURSE (arg1);
15471 default:
15472 break;
15474 return false;
15477 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15478 has an integer value. We also allow +Inf, -Inf and NaN to be
15479 considered integer values. Return false for signaling NaN.
15481 DEPTH is the current nesting depth of the query. */
15483 bool
15484 integer_valued_real_single_p (tree t, int depth)
15486 switch (TREE_CODE (t))
15488 case REAL_CST:
15489 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
15491 case COND_EXPR:
15492 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15494 case SSA_NAME:
15495 /* Limit the depth of recursion to avoid quadratic behavior.
15496 This is expected to catch almost all occurrences in practice.
15497 If this code misses important cases that unbounded recursion
15498 would not, passes that need this information could be revised
15499 to provide it through dataflow propagation. */
15500 return (!name_registered_for_update_p (t)
15501 && depth < param_max_ssa_name_query_depth
15502 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
15503 depth));
15505 default:
15506 break;
15508 return false;
15511 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15512 has an integer value. We also allow +Inf, -Inf and NaN to be
15513 considered integer values. Return false for signaling NaN.
15515 DEPTH is the current nesting depth of the query. */
15517 static bool
15518 integer_valued_real_invalid_p (tree t, int depth)
15520 switch (TREE_CODE (t))
15522 case COMPOUND_EXPR:
15523 case MODIFY_EXPR:
15524 case BIND_EXPR:
15525 return RECURSE (TREE_OPERAND (t, 1));
15527 case SAVE_EXPR:
15528 return RECURSE (TREE_OPERAND (t, 0));
15530 default:
15531 break;
15533 return false;
15536 #undef RECURSE
15537 #undef integer_valued_real_p
15539 /* Return true if the floating point expression T has an integer value.
15540 We also allow +Inf, -Inf and NaN to be considered integer values.
15541 Return false for signaling NaN.
15543 DEPTH is the current nesting depth of the query. */
15545 bool
15546 integer_valued_real_p (tree t, int depth)
15548 if (t == error_mark_node)
15549 return false;
15551 STRIP_ANY_LOCATION_WRAPPER (t);
15553 tree_code code = TREE_CODE (t);
15554 switch (TREE_CODE_CLASS (code))
15556 case tcc_binary:
15557 case tcc_comparison:
15558 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
15559 TREE_OPERAND (t, 1), depth);
15561 case tcc_unary:
15562 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
15564 case tcc_constant:
15565 case tcc_declaration:
15566 case tcc_reference:
15567 return integer_valued_real_single_p (t, depth);
15569 default:
15570 break;
15573 switch (code)
15575 case COND_EXPR:
15576 case SSA_NAME:
15577 return integer_valued_real_single_p (t, depth);
15579 case CALL_EXPR:
15581 tree arg0 = (call_expr_nargs (t) > 0
15582 ? CALL_EXPR_ARG (t, 0)
15583 : NULL_TREE);
15584 tree arg1 = (call_expr_nargs (t) > 1
15585 ? CALL_EXPR_ARG (t, 1)
15586 : NULL_TREE);
15587 return integer_valued_real_call_p (get_call_combined_fn (t),
15588 arg0, arg1, depth);
15591 default:
15592 return integer_valued_real_invalid_p (t, depth);
15596 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15597 attempt to fold the expression to a constant without modifying TYPE,
15598 OP0 or OP1.
15600 If the expression could be simplified to a constant, then return
15601 the constant. If the expression would not be simplified to a
15602 constant, then return NULL_TREE. */
15604 tree
15605 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15607 tree tem = fold_binary (code, type, op0, op1);
15608 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15611 /* Given the components of a unary expression CODE, TYPE and OP0,
15612 attempt to fold the expression to a constant without modifying
15613 TYPE or OP0.
15615 If the expression could be simplified to a constant, then return
15616 the constant. If the expression would not be simplified to a
15617 constant, then return NULL_TREE. */
15619 tree
15620 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15622 tree tem = fold_unary (code, type, op0);
15623 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15626 /* If EXP represents referencing an element in a constant string
15627 (either via pointer arithmetic or array indexing), return the
15628 tree representing the value accessed, otherwise return NULL. */
15630 tree
15631 fold_read_from_constant_string (tree exp)
15633 if ((TREE_CODE (exp) == INDIRECT_REF
15634 || TREE_CODE (exp) == ARRAY_REF)
15635 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15637 tree exp1 = TREE_OPERAND (exp, 0);
15638 tree index;
15639 tree string;
15640 location_t loc = EXPR_LOCATION (exp);
15642 if (TREE_CODE (exp) == INDIRECT_REF)
15643 string = string_constant (exp1, &index, NULL, NULL);
15644 else
15646 tree low_bound = array_ref_low_bound (exp);
15647 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15649 /* Optimize the special-case of a zero lower bound.
15651 We convert the low_bound to sizetype to avoid some problems
15652 with constant folding. (E.g. suppose the lower bound is 1,
15653 and its mode is QI. Without the conversion,l (ARRAY
15654 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15655 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15656 if (! integer_zerop (low_bound))
15657 index = size_diffop_loc (loc, index,
15658 fold_convert_loc (loc, sizetype, low_bound));
15660 string = exp1;
15663 scalar_int_mode char_mode;
15664 if (string
15665 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15666 && TREE_CODE (string) == STRING_CST
15667 && tree_fits_uhwi_p (index)
15668 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15669 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
15670 &char_mode)
15671 && GET_MODE_SIZE (char_mode) == 1)
15672 return build_int_cst_type (TREE_TYPE (exp),
15673 (TREE_STRING_POINTER (string)
15674 [TREE_INT_CST_LOW (index)]));
15676 return NULL;
15679 /* Folds a read from vector element at IDX of vector ARG. */
15681 tree
15682 fold_read_from_vector (tree arg, poly_uint64 idx)
15684 unsigned HOST_WIDE_INT i;
15685 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
15686 && known_ge (idx, 0u)
15687 && idx.is_constant (&i))
15689 if (TREE_CODE (arg) == VECTOR_CST)
15690 return VECTOR_CST_ELT (arg, i);
15691 else if (TREE_CODE (arg) == CONSTRUCTOR)
15693 if (CONSTRUCTOR_NELTS (arg)
15694 && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg, 0)->value)))
15695 return NULL_TREE;
15696 if (i >= CONSTRUCTOR_NELTS (arg))
15697 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
15698 return CONSTRUCTOR_ELT (arg, i)->value;
15701 return NULL_TREE;
15704 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15705 an integer constant, real, or fixed-point constant.
15707 TYPE is the type of the result. */
15709 static tree
15710 fold_negate_const (tree arg0, tree type)
15712 tree t = NULL_TREE;
15714 switch (TREE_CODE (arg0))
15716 case REAL_CST:
15717 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15718 break;
15720 case FIXED_CST:
15722 FIXED_VALUE_TYPE f;
15723 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15724 &(TREE_FIXED_CST (arg0)), NULL,
15725 TYPE_SATURATING (type));
15726 t = build_fixed (type, f);
15727 /* Propagate overflow flags. */
15728 if (overflow_p | TREE_OVERFLOW (arg0))
15729 TREE_OVERFLOW (t) = 1;
15730 break;
15733 default:
15734 if (poly_int_tree_p (arg0))
15736 wi::overflow_type overflow;
15737 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
15738 t = force_fit_type (type, res, 1,
15739 (overflow && ! TYPE_UNSIGNED (type))
15740 || TREE_OVERFLOW (arg0));
15741 break;
15744 gcc_unreachable ();
15747 return t;
15750 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15751 an integer constant or real constant.
15753 TYPE is the type of the result. */
15755 tree
15756 fold_abs_const (tree arg0, tree type)
15758 tree t = NULL_TREE;
15760 switch (TREE_CODE (arg0))
15762 case INTEGER_CST:
15764 /* If the value is unsigned or non-negative, then the absolute value
15765 is the same as the ordinary value. */
15766 wide_int val = wi::to_wide (arg0);
15767 wi::overflow_type overflow = wi::OVF_NONE;
15768 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
15771 /* If the value is negative, then the absolute value is
15772 its negation. */
15773 else
15774 val = wi::neg (val, &overflow);
15776 /* Force to the destination type, set TREE_OVERFLOW for signed
15777 TYPE only. */
15778 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
15780 break;
15782 case REAL_CST:
15783 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15784 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15785 else
15786 t = arg0;
15787 break;
15789 default:
15790 gcc_unreachable ();
15793 return t;
15796 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15797 constant. TYPE is the type of the result. */
15799 static tree
15800 fold_not_const (const_tree arg0, tree type)
15802 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15804 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
15807 /* Given CODE, a relational operator, the target type, TYPE and two
15808 constant operands OP0 and OP1, return the result of the
15809 relational operation. If the result is not a compile time
15810 constant, then return NULL_TREE. */
15812 static tree
15813 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15815 int result, invert;
15817 /* From here on, the only cases we handle are when the result is
15818 known to be a constant. */
15820 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15822 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15823 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15825 /* Handle the cases where either operand is a NaN. */
15826 if (real_isnan (c0) || real_isnan (c1))
15828 switch (code)
15830 case EQ_EXPR:
15831 case ORDERED_EXPR:
15832 result = 0;
15833 break;
15835 case NE_EXPR:
15836 case UNORDERED_EXPR:
15837 case UNLT_EXPR:
15838 case UNLE_EXPR:
15839 case UNGT_EXPR:
15840 case UNGE_EXPR:
15841 case UNEQ_EXPR:
15842 result = 1;
15843 break;
15845 case LT_EXPR:
15846 case LE_EXPR:
15847 case GT_EXPR:
15848 case GE_EXPR:
15849 case LTGT_EXPR:
15850 if (flag_trapping_math)
15851 return NULL_TREE;
15852 result = 0;
15853 break;
15855 default:
15856 gcc_unreachable ();
15859 return constant_boolean_node (result, type);
15862 return constant_boolean_node (real_compare (code, c0, c1), type);
15865 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15867 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15868 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15869 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15872 /* Handle equality/inequality of complex constants. */
15873 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15875 tree rcond = fold_relational_const (code, type,
15876 TREE_REALPART (op0),
15877 TREE_REALPART (op1));
15878 tree icond = fold_relational_const (code, type,
15879 TREE_IMAGPART (op0),
15880 TREE_IMAGPART (op1));
15881 if (code == EQ_EXPR)
15882 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15883 else if (code == NE_EXPR)
15884 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15885 else
15886 return NULL_TREE;
15889 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15891 if (!VECTOR_TYPE_P (type))
15893 /* Have vector comparison with scalar boolean result. */
15894 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
15895 && known_eq (VECTOR_CST_NELTS (op0),
15896 VECTOR_CST_NELTS (op1)));
15897 unsigned HOST_WIDE_INT nunits;
15898 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
15899 return NULL_TREE;
15900 for (unsigned i = 0; i < nunits; i++)
15902 tree elem0 = VECTOR_CST_ELT (op0, i);
15903 tree elem1 = VECTOR_CST_ELT (op1, i);
15904 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
15905 if (tmp == NULL_TREE)
15906 return NULL_TREE;
15907 if (integer_zerop (tmp))
15908 return constant_boolean_node (code == NE_EXPR, type);
15910 return constant_boolean_node (code == EQ_EXPR, type);
15912 tree_vector_builder elts;
15913 if (!elts.new_binary_operation (type, op0, op1, false))
15914 return NULL_TREE;
15915 unsigned int count = elts.encoded_nelts ();
15916 for (unsigned i = 0; i < count; i++)
15918 tree elem_type = TREE_TYPE (type);
15919 tree elem0 = VECTOR_CST_ELT (op0, i);
15920 tree elem1 = VECTOR_CST_ELT (op1, i);
15922 tree tem = fold_relational_const (code, elem_type,
15923 elem0, elem1);
15925 if (tem == NULL_TREE)
15926 return NULL_TREE;
15928 elts.quick_push (build_int_cst (elem_type,
15929 integer_zerop (tem) ? 0 : -1));
15932 return elts.build ();
15935 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15937 To compute GT, swap the arguments and do LT.
15938 To compute GE, do LT and invert the result.
15939 To compute LE, swap the arguments, do LT and invert the result.
15940 To compute NE, do EQ and invert the result.
15942 Therefore, the code below must handle only EQ and LT. */
15944 if (code == LE_EXPR || code == GT_EXPR)
15946 std::swap (op0, op1);
15947 code = swap_tree_comparison (code);
15950 /* Note that it is safe to invert for real values here because we
15951 have already handled the one case that it matters. */
15953 invert = 0;
15954 if (code == NE_EXPR || code == GE_EXPR)
15956 invert = 1;
15957 code = invert_tree_comparison (code, false);
15960 /* Compute a result for LT or EQ if args permit;
15961 Otherwise return T. */
15962 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15964 if (code == EQ_EXPR)
15965 result = tree_int_cst_equal (op0, op1);
15966 else
15967 result = tree_int_cst_lt (op0, op1);
15969 else
15970 return NULL_TREE;
15972 if (invert)
15973 result ^= 1;
15974 return constant_boolean_node (result, type);
15977 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15978 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15979 itself. */
15981 tree
15982 fold_build_cleanup_point_expr (tree type, tree expr)
15984 /* If the expression does not have side effects then we don't have to wrap
15985 it with a cleanup point expression. */
15986 if (!TREE_SIDE_EFFECTS (expr))
15987 return expr;
15989 /* If the expression is a return, check to see if the expression inside the
15990 return has no side effects or the right hand side of the modify expression
15991 inside the return. If either don't have side effects set we don't need to
15992 wrap the expression in a cleanup point expression. Note we don't check the
15993 left hand side of the modify because it should always be a return decl. */
15994 if (TREE_CODE (expr) == RETURN_EXPR)
15996 tree op = TREE_OPERAND (expr, 0);
15997 if (!op || !TREE_SIDE_EFFECTS (op))
15998 return expr;
15999 op = TREE_OPERAND (op, 1);
16000 if (!TREE_SIDE_EFFECTS (op))
16001 return expr;
16004 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
16007 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16008 of an indirection through OP0, or NULL_TREE if no simplification is
16009 possible. */
16011 tree
16012 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16014 tree sub = op0;
16015 tree subtype;
16016 poly_uint64 const_op01;
16018 STRIP_NOPS (sub);
16019 subtype = TREE_TYPE (sub);
16020 if (!POINTER_TYPE_P (subtype)
16021 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
16022 return NULL_TREE;
16024 if (TREE_CODE (sub) == ADDR_EXPR)
16026 tree op = TREE_OPERAND (sub, 0);
16027 tree optype = TREE_TYPE (op);
16029 /* *&CONST_DECL -> to the value of the const decl. */
16030 if (TREE_CODE (op) == CONST_DECL)
16031 return DECL_INITIAL (op);
16032 /* *&p => p; make sure to handle *&"str"[cst] here. */
16033 if (type == optype)
16035 tree fop = fold_read_from_constant_string (op);
16036 if (fop)
16037 return fop;
16038 else
16039 return op;
16041 /* *(foo *)&fooarray => fooarray[0] */
16042 else if (TREE_CODE (optype) == ARRAY_TYPE
16043 && type == TREE_TYPE (optype)
16044 && (!in_gimple_form
16045 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16047 tree type_domain = TYPE_DOMAIN (optype);
16048 tree min_val = size_zero_node;
16049 if (type_domain && TYPE_MIN_VALUE (type_domain))
16050 min_val = TYPE_MIN_VALUE (type_domain);
16051 if (in_gimple_form
16052 && TREE_CODE (min_val) != INTEGER_CST)
16053 return NULL_TREE;
16054 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16055 NULL_TREE, NULL_TREE);
16057 /* *(foo *)&complexfoo => __real__ complexfoo */
16058 else if (TREE_CODE (optype) == COMPLEX_TYPE
16059 && type == TREE_TYPE (optype))
16060 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16061 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16062 else if (VECTOR_TYPE_P (optype)
16063 && type == TREE_TYPE (optype))
16065 tree part_width = TYPE_SIZE (type);
16066 tree index = bitsize_int (0);
16067 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
16068 index);
16072 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16073 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
16075 tree op00 = TREE_OPERAND (sub, 0);
16076 tree op01 = TREE_OPERAND (sub, 1);
16078 STRIP_NOPS (op00);
16079 if (TREE_CODE (op00) == ADDR_EXPR)
16081 tree op00type;
16082 op00 = TREE_OPERAND (op00, 0);
16083 op00type = TREE_TYPE (op00);
16085 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16086 if (VECTOR_TYPE_P (op00type)
16087 && type == TREE_TYPE (op00type)
16088 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
16089 but we want to treat offsets with MSB set as negative.
16090 For the code below negative offsets are invalid and
16091 TYPE_SIZE of the element is something unsigned, so
16092 check whether op01 fits into poly_int64, which implies
16093 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
16094 then just use poly_uint64 because we want to treat the
16095 value as unsigned. */
16096 && tree_fits_poly_int64_p (op01))
16098 tree part_width = TYPE_SIZE (type);
16099 poly_uint64 max_offset
16100 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
16101 * TYPE_VECTOR_SUBPARTS (op00type));
16102 if (known_lt (const_op01, max_offset))
16104 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
16105 return fold_build3_loc (loc,
16106 BIT_FIELD_REF, type, op00,
16107 part_width, index);
16110 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16111 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16112 && type == TREE_TYPE (op00type))
16114 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
16115 const_op01))
16116 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16118 /* ((foo *)&fooarray)[1] => fooarray[1] */
16119 else if (TREE_CODE (op00type) == ARRAY_TYPE
16120 && type == TREE_TYPE (op00type))
16122 tree type_domain = TYPE_DOMAIN (op00type);
16123 tree min_val = size_zero_node;
16124 if (type_domain && TYPE_MIN_VALUE (type_domain))
16125 min_val = TYPE_MIN_VALUE (type_domain);
16126 poly_uint64 type_size, index;
16127 if (poly_int_tree_p (min_val)
16128 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
16129 && multiple_p (const_op01, type_size, &index))
16131 poly_offset_int off = index + wi::to_poly_offset (min_val);
16132 op01 = wide_int_to_tree (sizetype, off);
16133 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16134 NULL_TREE, NULL_TREE);
16140 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16141 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16142 && type == TREE_TYPE (TREE_TYPE (subtype))
16143 && (!in_gimple_form
16144 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16146 tree type_domain;
16147 tree min_val = size_zero_node;
16148 sub = build_fold_indirect_ref_loc (loc, sub);
16149 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16150 if (type_domain && TYPE_MIN_VALUE (type_domain))
16151 min_val = TYPE_MIN_VALUE (type_domain);
16152 if (in_gimple_form
16153 && TREE_CODE (min_val) != INTEGER_CST)
16154 return NULL_TREE;
16155 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16156 NULL_TREE);
16159 return NULL_TREE;
16162 /* Builds an expression for an indirection through T, simplifying some
16163 cases. */
16165 tree
16166 build_fold_indirect_ref_loc (location_t loc, tree t)
16168 tree type = TREE_TYPE (TREE_TYPE (t));
16169 tree sub = fold_indirect_ref_1 (loc, type, t);
16171 if (sub)
16172 return sub;
16174 return build1_loc (loc, INDIRECT_REF, type, t);
16177 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16179 tree
16180 fold_indirect_ref_loc (location_t loc, tree t)
16182 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16184 if (sub)
16185 return sub;
16186 else
16187 return t;
16190 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16191 whose result is ignored. The type of the returned tree need not be
16192 the same as the original expression. */
16194 tree
16195 fold_ignored_result (tree t)
16197 if (!TREE_SIDE_EFFECTS (t))
16198 return integer_zero_node;
16200 for (;;)
16201 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16203 case tcc_unary:
16204 t = TREE_OPERAND (t, 0);
16205 break;
16207 case tcc_binary:
16208 case tcc_comparison:
16209 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16210 t = TREE_OPERAND (t, 0);
16211 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16212 t = TREE_OPERAND (t, 1);
16213 else
16214 return t;
16215 break;
16217 case tcc_expression:
16218 switch (TREE_CODE (t))
16220 case COMPOUND_EXPR:
16221 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16222 return t;
16223 t = TREE_OPERAND (t, 0);
16224 break;
16226 case COND_EXPR:
16227 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16228 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16229 return t;
16230 t = TREE_OPERAND (t, 0);
16231 break;
16233 default:
16234 return t;
16236 break;
16238 default:
16239 return t;
16243 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16245 tree
16246 round_up_loc (location_t loc, tree value, unsigned int divisor)
16248 tree div = NULL_TREE;
16250 if (divisor == 1)
16251 return value;
16253 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16254 have to do anything. Only do this when we are not given a const,
16255 because in that case, this check is more expensive than just
16256 doing it. */
16257 if (TREE_CODE (value) != INTEGER_CST)
16259 div = build_int_cst (TREE_TYPE (value), divisor);
16261 if (multiple_of_p (TREE_TYPE (value), value, div))
16262 return value;
16265 /* If divisor is a power of two, simplify this to bit manipulation. */
16266 if (pow2_or_zerop (divisor))
16268 if (TREE_CODE (value) == INTEGER_CST)
16270 wide_int val = wi::to_wide (value);
16271 bool overflow_p;
16273 if ((val & (divisor - 1)) == 0)
16274 return value;
16276 overflow_p = TREE_OVERFLOW (value);
16277 val += divisor - 1;
16278 val &= (int) -divisor;
16279 if (val == 0)
16280 overflow_p = true;
16282 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16284 else
16286 tree t;
16288 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16289 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16290 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16291 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16294 else
16296 if (!div)
16297 div = build_int_cst (TREE_TYPE (value), divisor);
16298 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16299 value = size_binop_loc (loc, MULT_EXPR, value, div);
16302 return value;
16305 /* Likewise, but round down. */
16307 tree
16308 round_down_loc (location_t loc, tree value, int divisor)
16310 tree div = NULL_TREE;
16312 gcc_assert (divisor > 0);
16313 if (divisor == 1)
16314 return value;
16316 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16317 have to do anything. Only do this when we are not given a const,
16318 because in that case, this check is more expensive than just
16319 doing it. */
16320 if (TREE_CODE (value) != INTEGER_CST)
16322 div = build_int_cst (TREE_TYPE (value), divisor);
16324 if (multiple_of_p (TREE_TYPE (value), value, div))
16325 return value;
16328 /* If divisor is a power of two, simplify this to bit manipulation. */
16329 if (pow2_or_zerop (divisor))
16331 tree t;
16333 t = build_int_cst (TREE_TYPE (value), -divisor);
16334 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16336 else
16338 if (!div)
16339 div = build_int_cst (TREE_TYPE (value), divisor);
16340 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16341 value = size_binop_loc (loc, MULT_EXPR, value, div);
16344 return value;
16347 /* Returns the pointer to the base of the object addressed by EXP and
16348 extracts the information about the offset of the access, storing it
16349 to PBITPOS and POFFSET. */
16351 static tree
16352 split_address_to_core_and_offset (tree exp,
16353 poly_int64_pod *pbitpos, tree *poffset)
16355 tree core;
16356 machine_mode mode;
16357 int unsignedp, reversep, volatilep;
16358 poly_int64 bitsize;
16359 location_t loc = EXPR_LOCATION (exp);
16361 if (TREE_CODE (exp) == SSA_NAME)
16362 if (gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (exp)))
16363 if (gimple_assign_rhs_code (def) == ADDR_EXPR)
16364 exp = gimple_assign_rhs1 (def);
16366 if (TREE_CODE (exp) == ADDR_EXPR)
16368 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16369 poffset, &mode, &unsignedp, &reversep,
16370 &volatilep);
16371 core = build_fold_addr_expr_loc (loc, core);
16373 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
16375 core = TREE_OPERAND (exp, 0);
16376 STRIP_NOPS (core);
16377 *pbitpos = 0;
16378 *poffset = TREE_OPERAND (exp, 1);
16379 if (poly_int_tree_p (*poffset))
16381 poly_offset_int tem
16382 = wi::sext (wi::to_poly_offset (*poffset),
16383 TYPE_PRECISION (TREE_TYPE (*poffset)));
16384 tem <<= LOG2_BITS_PER_UNIT;
16385 if (tem.to_shwi (pbitpos))
16386 *poffset = NULL_TREE;
16389 else
16391 core = exp;
16392 *pbitpos = 0;
16393 *poffset = NULL_TREE;
16396 return core;
16399 /* Returns true if addresses of E1 and E2 differ by a constant, false
16400 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16402 bool
16403 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
16405 tree core1, core2;
16406 poly_int64 bitpos1, bitpos2;
16407 tree toffset1, toffset2, tdiff, type;
16409 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16410 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16412 poly_int64 bytepos1, bytepos2;
16413 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
16414 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
16415 || !operand_equal_p (core1, core2, 0))
16416 return false;
16418 if (toffset1 && toffset2)
16420 type = TREE_TYPE (toffset1);
16421 if (type != TREE_TYPE (toffset2))
16422 toffset2 = fold_convert (type, toffset2);
16424 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16425 if (!cst_and_fits_in_hwi (tdiff))
16426 return false;
16428 *diff = int_cst_value (tdiff);
16430 else if (toffset1 || toffset2)
16432 /* If only one of the offsets is non-constant, the difference cannot
16433 be a constant. */
16434 return false;
16436 else
16437 *diff = 0;
16439 *diff += bytepos1 - bytepos2;
16440 return true;
16443 /* Return OFF converted to a pointer offset type suitable as offset for
16444 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16445 tree
16446 convert_to_ptrofftype_loc (location_t loc, tree off)
16448 if (ptrofftype_p (TREE_TYPE (off)))
16449 return off;
16450 return fold_convert_loc (loc, sizetype, off);
16453 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16454 tree
16455 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16457 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16458 ptr, convert_to_ptrofftype_loc (loc, off));
16461 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16462 tree
16463 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16465 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16466 ptr, size_int (off));
16469 /* Return a pointer to a NUL-terminated string containing the sequence
16470 of bytes corresponding to the representation of the object referred to
16471 by SRC (or a subsequence of such bytes within it if SRC is a reference
16472 to an initialized constant array plus some constant offset).
16473 Set *STRSIZE the number of bytes in the constant sequence including
16474 the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16475 where A is the array that stores the constant sequence that SRC points
16476 to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16477 need not point to a string or even an array of characters but may point
16478 to an object of any type. */
16480 const char *
16481 getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
16483 /* The offset into the array A storing the string, and A's byte size. */
16484 tree offset_node;
16485 tree mem_size;
16487 if (strsize)
16488 *strsize = 0;
16490 if (strsize)
16491 src = byte_representation (src, &offset_node, &mem_size, NULL);
16492 else
16493 src = string_constant (src, &offset_node, &mem_size, NULL);
16494 if (!src)
16495 return NULL;
16497 unsigned HOST_WIDE_INT offset = 0;
16498 if (offset_node != NULL_TREE)
16500 if (!tree_fits_uhwi_p (offset_node))
16501 return NULL;
16502 else
16503 offset = tree_to_uhwi (offset_node);
16506 if (!tree_fits_uhwi_p (mem_size))
16507 return NULL;
16509 /* ARRAY_SIZE is the byte size of the array the constant sequence
16510 is stored in and equal to sizeof A. INIT_BYTES is the number
16511 of bytes in the constant sequence used to initialize the array,
16512 including any embedded NULs as well as the terminating NUL (for
16513 strings), but not including any trailing zeros/NULs past
16514 the terminating one appended implicitly to a string literal to
16515 zero out the remainder of the array it's stored in. For example,
16516 given:
16517 const char a[7] = "abc\0d";
16518 n = strlen (a + 1);
16519 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16520 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16521 is equal to strlen (A) + 1. */
16522 const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
16523 unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
16524 const char *string = TREE_STRING_POINTER (src);
16526 /* Ideally this would turn into a gcc_checking_assert over time. */
16527 if (init_bytes > array_size)
16528 init_bytes = array_size;
16530 if (init_bytes == 0 || offset >= array_size)
16531 return NULL;
16533 if (strsize)
16535 /* Compute and store the number of characters from the beginning
16536 of the substring at OFFSET to the end, including the terminating
16537 nul. Offsets past the initial length refer to null strings. */
16538 if (offset < init_bytes)
16539 *strsize = init_bytes - offset;
16540 else
16541 *strsize = 1;
16543 else
16545 tree eltype = TREE_TYPE (TREE_TYPE (src));
16546 /* Support only properly NUL-terminated single byte strings. */
16547 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
16548 return NULL;
16549 if (string[init_bytes - 1] != '\0')
16550 return NULL;
16553 return offset < init_bytes ? string + offset : "";
16556 /* Return a pointer to a NUL-terminated string corresponding to
16557 the expression STR referencing a constant string, possibly
16558 involving a constant offset. Return null if STR either doesn't
16559 reference a constant string or if it involves a nonconstant
16560 offset. */
16562 const char *
16563 c_getstr (tree str)
16565 return getbyterep (str, NULL);
16568 /* Given a tree T, compute which bits in T may be nonzero. */
16570 wide_int
16571 tree_nonzero_bits (const_tree t)
16573 switch (TREE_CODE (t))
16575 case INTEGER_CST:
16576 return wi::to_wide (t);
16577 case SSA_NAME:
16578 return get_nonzero_bits (t);
16579 case NON_LVALUE_EXPR:
16580 case SAVE_EXPR:
16581 return tree_nonzero_bits (TREE_OPERAND (t, 0));
16582 case BIT_AND_EXPR:
16583 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16584 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16585 case BIT_IOR_EXPR:
16586 case BIT_XOR_EXPR:
16587 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16588 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16589 case COND_EXPR:
16590 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
16591 tree_nonzero_bits (TREE_OPERAND (t, 2)));
16592 CASE_CONVERT:
16593 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16594 TYPE_PRECISION (TREE_TYPE (t)),
16595 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
16596 case PLUS_EXPR:
16597 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
16599 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
16600 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
16601 if (wi::bit_and (nzbits1, nzbits2) == 0)
16602 return wi::bit_or (nzbits1, nzbits2);
16604 break;
16605 case LSHIFT_EXPR:
16606 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16608 tree type = TREE_TYPE (t);
16609 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16610 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16611 TYPE_PRECISION (type));
16612 return wi::neg_p (arg1)
16613 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
16614 : wi::lshift (nzbits, arg1);
16616 break;
16617 case RSHIFT_EXPR:
16618 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16620 tree type = TREE_TYPE (t);
16621 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16622 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16623 TYPE_PRECISION (type));
16624 return wi::neg_p (arg1)
16625 ? wi::lshift (nzbits, -arg1)
16626 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
16628 break;
16629 default:
16630 break;
16633 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
16636 /* Helper function for address compare simplifications in match.pd.
16637 OP0 and OP1 are ADDR_EXPR operands being compared by CODE.
16638 TYPE is the type of comparison operands.
16639 BASE0, BASE1, OFF0 and OFF1 are set by the function.
16640 GENERIC is true if GENERIC folding and false for GIMPLE folding.
16641 Returns 0 if OP0 is known to be unequal to OP1 regardless of OFF{0,1},
16642 1 if bases are known to be equal and OP0 cmp OP1 depends on OFF0 cmp OFF1,
16643 and 2 if unknown. */
16646 address_compare (tree_code code, tree type, tree op0, tree op1,
16647 tree &base0, tree &base1, poly_int64 &off0, poly_int64 &off1,
16648 bool generic)
16650 if (TREE_CODE (op0) == SSA_NAME)
16651 op0 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op0));
16652 if (TREE_CODE (op1) == SSA_NAME)
16653 op1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op1));
16654 gcc_checking_assert (TREE_CODE (op0) == ADDR_EXPR);
16655 gcc_checking_assert (TREE_CODE (op1) == ADDR_EXPR);
16656 base0 = get_addr_base_and_unit_offset (TREE_OPERAND (op0, 0), &off0);
16657 base1 = get_addr_base_and_unit_offset (TREE_OPERAND (op1, 0), &off1);
16658 if (base0 && TREE_CODE (base0) == MEM_REF)
16660 off0 += mem_ref_offset (base0).force_shwi ();
16661 base0 = TREE_OPERAND (base0, 0);
16663 if (base1 && TREE_CODE (base1) == MEM_REF)
16665 off1 += mem_ref_offset (base1).force_shwi ();
16666 base1 = TREE_OPERAND (base1, 0);
16668 if (base0 == NULL_TREE || base1 == NULL_TREE)
16669 return 2;
16671 int equal = 2;
16672 /* Punt in GENERIC on variables with value expressions;
16673 the value expressions might point to fields/elements
16674 of other vars etc. */
16675 if (generic
16676 && ((VAR_P (base0) && DECL_HAS_VALUE_EXPR_P (base0))
16677 || (VAR_P (base1) && DECL_HAS_VALUE_EXPR_P (base1))))
16678 return 2;
16679 else if (decl_in_symtab_p (base0) && decl_in_symtab_p (base1))
16681 symtab_node *node0 = symtab_node::get_create (base0);
16682 symtab_node *node1 = symtab_node::get_create (base1);
16683 equal = node0->equal_address_to (node1);
16685 else if ((DECL_P (base0)
16686 || TREE_CODE (base0) == SSA_NAME
16687 || TREE_CODE (base0) == STRING_CST)
16688 && (DECL_P (base1)
16689 || TREE_CODE (base1) == SSA_NAME
16690 || TREE_CODE (base1) == STRING_CST))
16691 equal = (base0 == base1);
16692 /* Assume different STRING_CSTs with the same content will be
16693 merged. */
16694 if (equal == 0
16695 && TREE_CODE (base0) == STRING_CST
16696 && TREE_CODE (base1) == STRING_CST
16697 && TREE_STRING_LENGTH (base0) == TREE_STRING_LENGTH (base1)
16698 && memcmp (TREE_STRING_POINTER (base0), TREE_STRING_POINTER (base1),
16699 TREE_STRING_LENGTH (base0)) == 0)
16700 equal = 1;
16701 if (equal == 1)
16703 if (code == EQ_EXPR
16704 || code == NE_EXPR
16705 /* If the offsets are equal we can ignore overflow. */
16706 || known_eq (off0, off1)
16707 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
16708 /* Or if we compare using pointers to decls or strings. */
16709 || (POINTER_TYPE_P (type)
16710 && (DECL_P (base0) || TREE_CODE (base0) == STRING_CST)))
16711 return 1;
16712 return 2;
16714 if (equal != 0)
16715 return equal;
16716 if (code != EQ_EXPR && code != NE_EXPR)
16717 return 2;
16719 /* At this point we know (or assume) the two pointers point at
16720 different objects. */
16721 HOST_WIDE_INT ioff0 = -1, ioff1 = -1;
16722 off0.is_constant (&ioff0);
16723 off1.is_constant (&ioff1);
16724 /* Punt on non-zero offsets from functions. */
16725 if ((TREE_CODE (base0) == FUNCTION_DECL && ioff0)
16726 || (TREE_CODE (base1) == FUNCTION_DECL && ioff1))
16727 return 2;
16728 /* Or if the bases are neither decls nor string literals. */
16729 if (!DECL_P (base0) && TREE_CODE (base0) != STRING_CST)
16730 return 2;
16731 if (!DECL_P (base1) && TREE_CODE (base1) != STRING_CST)
16732 return 2;
16733 /* For initializers, assume addresses of different functions are
16734 different. */
16735 if (folding_initializer
16736 && TREE_CODE (base0) == FUNCTION_DECL
16737 && TREE_CODE (base1) == FUNCTION_DECL)
16738 return 0;
16740 /* Compute whether one address points to the start of one
16741 object and another one to the end of another one. */
16742 poly_int64 size0 = 0, size1 = 0;
16743 if (TREE_CODE (base0) == STRING_CST)
16745 if (ioff0 < 0 || ioff0 > TREE_STRING_LENGTH (base0))
16746 equal = 2;
16747 else
16748 size0 = TREE_STRING_LENGTH (base0);
16750 else if (TREE_CODE (base0) == FUNCTION_DECL)
16751 size0 = 1;
16752 else
16754 tree sz0 = DECL_SIZE_UNIT (base0);
16755 if (!tree_fits_poly_int64_p (sz0))
16756 equal = 2;
16757 else
16758 size0 = tree_to_poly_int64 (sz0);
16760 if (TREE_CODE (base1) == STRING_CST)
16762 if (ioff1 < 0 || ioff1 > TREE_STRING_LENGTH (base1))
16763 equal = 2;
16764 else
16765 size1 = TREE_STRING_LENGTH (base1);
16767 else if (TREE_CODE (base1) == FUNCTION_DECL)
16768 size1 = 1;
16769 else
16771 tree sz1 = DECL_SIZE_UNIT (base1);
16772 if (!tree_fits_poly_int64_p (sz1))
16773 equal = 2;
16774 else
16775 size1 = tree_to_poly_int64 (sz1);
16777 if (equal == 0)
16779 /* If one offset is pointing (or could be) to the beginning of one
16780 object and the other is pointing to one past the last byte of the
16781 other object, punt. */
16782 if (maybe_eq (off0, 0) && maybe_eq (off1, size1))
16783 equal = 2;
16784 else if (maybe_eq (off1, 0) && maybe_eq (off0, size0))
16785 equal = 2;
16786 /* If both offsets are the same, there are some cases we know that are
16787 ok. Either if we know they aren't zero, or if we know both sizes
16788 are no zero. */
16789 if (equal == 2
16790 && known_eq (off0, off1)
16791 && (known_ne (off0, 0)
16792 || (known_ne (size0, 0) && known_ne (size1, 0))))
16793 equal = 0;
16796 /* At this point, equal is 2 if either one or both pointers are out of
16797 bounds of their object, or one points to start of its object and the
16798 other points to end of its object. This is unspecified behavior
16799 e.g. in C++. Otherwise equal is 0. */
16800 if (folding_cxx_constexpr && equal)
16801 return equal;
16803 /* When both pointers point to string literals, even when equal is 0,
16804 due to tail merging of string literals the pointers might be the same. */
16805 if (TREE_CODE (base0) == STRING_CST && TREE_CODE (base1) == STRING_CST)
16807 if (ioff0 < 0
16808 || ioff1 < 0
16809 || ioff0 > TREE_STRING_LENGTH (base0)
16810 || ioff1 > TREE_STRING_LENGTH (base1))
16811 return 2;
16813 /* If the bytes in the string literals starting at the pointers
16814 differ, the pointers need to be different. */
16815 if (memcmp (TREE_STRING_POINTER (base0) + ioff0,
16816 TREE_STRING_POINTER (base1) + ioff1,
16817 MIN (TREE_STRING_LENGTH (base0) - ioff0,
16818 TREE_STRING_LENGTH (base1) - ioff1)) == 0)
16820 HOST_WIDE_INT ioffmin = MIN (ioff0, ioff1);
16821 if (memcmp (TREE_STRING_POINTER (base0) + ioff0 - ioffmin,
16822 TREE_STRING_POINTER (base1) + ioff1 - ioffmin,
16823 ioffmin) == 0)
16824 /* If even the bytes in the string literal before the
16825 pointers are the same, the string literals could be
16826 tail merged. */
16827 return 2;
16829 return 0;
16832 if (folding_cxx_constexpr)
16833 return 0;
16835 /* If this is a pointer comparison, ignore for now even
16836 valid equalities where one pointer is the offset zero
16837 of one object and the other to one past end of another one. */
16838 if (!INTEGRAL_TYPE_P (type))
16839 return 0;
16841 /* Assume that string literals can't be adjacent to variables
16842 (automatic or global). */
16843 if (TREE_CODE (base0) == STRING_CST || TREE_CODE (base1) == STRING_CST)
16844 return 0;
16846 /* Assume that automatic variables can't be adjacent to global
16847 variables. */
16848 if (is_global_var (base0) != is_global_var (base1))
16849 return 0;
16851 return equal;
16854 /* Return the single non-zero element of a CONSTRUCTOR or NULL_TREE. */
16855 tree
16856 ctor_single_nonzero_element (const_tree t)
16858 unsigned HOST_WIDE_INT idx;
16859 constructor_elt *ce;
16860 tree elt = NULL_TREE;
16862 if (TREE_CODE (t) != CONSTRUCTOR)
16863 return NULL_TREE;
16864 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
16865 if (!integer_zerop (ce->value) && !real_zerop (ce->value))
16867 if (elt)
16868 return NULL_TREE;
16869 elt = ce->value;
16871 return elt;
16874 #if CHECKING_P
16876 namespace selftest {
16878 /* Helper functions for writing tests of folding trees. */
16880 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
16882 static void
16883 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
16884 tree constant)
16886 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
16889 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
16890 wrapping WRAPPED_EXPR. */
16892 static void
16893 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
16894 tree wrapped_expr)
16896 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
16897 ASSERT_NE (wrapped_expr, result);
16898 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
16899 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
16902 /* Verify that various arithmetic binary operations are folded
16903 correctly. */
16905 static void
16906 test_arithmetic_folding ()
16908 tree type = integer_type_node;
16909 tree x = create_tmp_var_raw (type, "x");
16910 tree zero = build_zero_cst (type);
16911 tree one = build_int_cst (type, 1);
16913 /* Addition. */
16914 /* 1 <-- (0 + 1) */
16915 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
16916 one);
16917 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
16918 one);
16920 /* (nonlvalue)x <-- (x + 0) */
16921 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
16924 /* Subtraction. */
16925 /* 0 <-- (x - x) */
16926 assert_binop_folds_to_const (x, MINUS_EXPR, x,
16927 zero);
16928 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
16931 /* Multiplication. */
16932 /* 0 <-- (x * 0) */
16933 assert_binop_folds_to_const (x, MULT_EXPR, zero,
16934 zero);
16936 /* (nonlvalue)x <-- (x * 1) */
16937 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
16941 /* Verify that various binary operations on vectors are folded
16942 correctly. */
16944 static void
16945 test_vector_folding ()
16947 tree inner_type = integer_type_node;
16948 tree type = build_vector_type (inner_type, 4);
16949 tree zero = build_zero_cst (type);
16950 tree one = build_one_cst (type);
16951 tree index = build_index_vector (type, 0, 1);
16953 /* Verify equality tests that return a scalar boolean result. */
16954 tree res_type = boolean_type_node;
16955 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
16956 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
16957 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
16958 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
16959 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
16960 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16961 index, one)));
16962 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
16963 index, index)));
16964 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16965 index, index)));
16968 /* Verify folding of VEC_DUPLICATE_EXPRs. */
16970 static void
16971 test_vec_duplicate_folding ()
16973 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
16974 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
16975 /* This will be 1 if VEC_MODE isn't a vector mode. */
16976 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
16978 tree type = build_vector_type (ssizetype, nunits);
16979 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
16980 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
16981 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
16984 /* Run all of the selftests within this file. */
16986 void
16987 fold_const_cc_tests ()
16989 test_arithmetic_folding ();
16990 test_vector_folding ();
16991 test_vec_duplicate_folding ();
16994 } // namespace selftest
16996 #endif /* CHECKING_P */