Daily bump.
[official-gcc.git] / gcc / fold-const.c
blob90d82257ae71f706d2144707f21e407482bb63c5
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-vrp.h"
79 #include "tree-ssanames.h"
80 #include "selftest.h"
81 #include "stringpool.h"
82 #include "attribs.h"
83 #include "tree-vector-builder.h"
84 #include "vec-perm-indices.h"
85 #include "asan.h"
86 #include "gimple-range.h"
88 /* Nonzero if we are folding constants inside an initializer; zero
89 otherwise. */
90 int folding_initializer = 0;
92 /* The following constants represent a bit based encoding of GCC's
93 comparison operators. This encoding simplifies transformations
94 on relational comparison operators, such as AND and OR. */
95 enum comparison_code {
96 COMPCODE_FALSE = 0,
97 COMPCODE_LT = 1,
98 COMPCODE_EQ = 2,
99 COMPCODE_LE = 3,
100 COMPCODE_GT = 4,
101 COMPCODE_LTGT = 5,
102 COMPCODE_GE = 6,
103 COMPCODE_ORD = 7,
104 COMPCODE_UNORD = 8,
105 COMPCODE_UNLT = 9,
106 COMPCODE_UNEQ = 10,
107 COMPCODE_UNLE = 11,
108 COMPCODE_UNGT = 12,
109 COMPCODE_NE = 13,
110 COMPCODE_UNGE = 14,
111 COMPCODE_TRUE = 15
114 static bool negate_expr_p (tree);
115 static tree negate_expr (tree);
116 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
117 static enum comparison_code comparison_to_compcode (enum tree_code);
118 static enum tree_code compcode_to_comparison (enum comparison_code);
119 static bool twoval_comparison_p (tree, tree *, tree *);
120 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
121 static tree optimize_bit_field_compare (location_t, enum tree_code,
122 tree, tree, tree);
123 static bool simple_operand_p (const_tree);
124 static bool simple_operand_p_2 (tree);
125 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
126 static tree range_predecessor (tree);
127 static tree range_successor (tree);
128 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
129 static tree fold_cond_expr_with_comparison (location_t, tree, enum tree_code,
130 tree, tree, tree, tree);
131 static tree unextend (tree, int, int, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static tree fold_binary_op_with_conditional_arg (location_t,
135 enum tree_code, tree,
136 tree, tree,
137 tree, tree, int);
138 static tree fold_negate_const (tree, tree);
139 static tree fold_not_const (const_tree, tree);
140 static tree fold_relational_const (enum tree_code, tree, tree, tree);
141 static tree fold_convert_const (enum tree_code, tree, tree);
142 static tree fold_view_convert_expr (tree, tree);
143 static tree fold_negate_expr (location_t, tree);
146 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
147 Otherwise, return LOC. */
149 static location_t
150 expr_location_or (tree t, location_t loc)
152 location_t tloc = EXPR_LOCATION (t);
153 return tloc == UNKNOWN_LOCATION ? loc : tloc;
156 /* Similar to protected_set_expr_location, but never modify x in place,
157 if location can and needs to be set, unshare it. */
159 static inline tree
160 protected_set_expr_location_unshare (tree x, location_t loc)
162 if (CAN_HAVE_LOCATION_P (x)
163 && EXPR_LOCATION (x) != loc
164 && !(TREE_CODE (x) == SAVE_EXPR
165 || TREE_CODE (x) == TARGET_EXPR
166 || TREE_CODE (x) == BIND_EXPR))
168 x = copy_node (x);
169 SET_EXPR_LOCATION (x, loc);
171 return x;
174 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
175 division and returns the quotient. Otherwise returns
176 NULL_TREE. */
178 tree
179 div_if_zero_remainder (const_tree arg1, const_tree arg2)
181 widest_int quo;
183 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
184 SIGNED, &quo))
185 return wide_int_to_tree (TREE_TYPE (arg1), quo);
187 return NULL_TREE;
190 /* This is nonzero if we should defer warnings about undefined
191 overflow. This facility exists because these warnings are a
192 special case. The code to estimate loop iterations does not want
193 to issue any warnings, since it works with expressions which do not
194 occur in user code. Various bits of cleanup code call fold(), but
195 only use the result if it has certain characteristics (e.g., is a
196 constant); that code only wants to issue a warning if the result is
197 used. */
199 static int fold_deferring_overflow_warnings;
201 /* If a warning about undefined overflow is deferred, this is the
202 warning. Note that this may cause us to turn two warnings into
203 one, but that is fine since it is sufficient to only give one
204 warning per expression. */
206 static const char* fold_deferred_overflow_warning;
208 /* If a warning about undefined overflow is deferred, this is the
209 level at which the warning should be emitted. */
211 static enum warn_strict_overflow_code fold_deferred_overflow_code;
213 /* Start deferring overflow warnings. We could use a stack here to
214 permit nested calls, but at present it is not necessary. */
216 void
217 fold_defer_overflow_warnings (void)
219 ++fold_deferring_overflow_warnings;
222 /* Stop deferring overflow warnings. If there is a pending warning,
223 and ISSUE is true, then issue the warning if appropriate. STMT is
224 the statement with which the warning should be associated (used for
225 location information); STMT may be NULL. CODE is the level of the
226 warning--a warn_strict_overflow_code value. This function will use
227 the smaller of CODE and the deferred code when deciding whether to
228 issue the warning. CODE may be zero to mean to always use the
229 deferred code. */
231 void
232 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
234 const char *warnmsg;
235 location_t locus;
237 gcc_assert (fold_deferring_overflow_warnings > 0);
238 --fold_deferring_overflow_warnings;
239 if (fold_deferring_overflow_warnings > 0)
241 if (fold_deferred_overflow_warning != NULL
242 && code != 0
243 && code < (int) fold_deferred_overflow_code)
244 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
245 return;
248 warnmsg = fold_deferred_overflow_warning;
249 fold_deferred_overflow_warning = NULL;
251 if (!issue || warnmsg == NULL)
252 return;
254 if (warning_suppressed_p (stmt, OPT_Wstrict_overflow))
255 return;
257 /* Use the smallest code level when deciding to issue the
258 warning. */
259 if (code == 0 || code > (int) fold_deferred_overflow_code)
260 code = fold_deferred_overflow_code;
262 if (!issue_strict_overflow_warning (code))
263 return;
265 if (stmt == NULL)
266 locus = input_location;
267 else
268 locus = gimple_location (stmt);
269 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
272 /* Stop deferring overflow warnings, ignoring any deferred
273 warnings. */
275 void
276 fold_undefer_and_ignore_overflow_warnings (void)
278 fold_undefer_overflow_warnings (false, NULL, 0);
281 /* Whether we are deferring overflow warnings. */
283 bool
284 fold_deferring_overflow_warnings_p (void)
286 return fold_deferring_overflow_warnings > 0;
289 /* This is called when we fold something based on the fact that signed
290 overflow is undefined. */
292 void
293 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
295 if (fold_deferring_overflow_warnings > 0)
297 if (fold_deferred_overflow_warning == NULL
298 || wc < fold_deferred_overflow_code)
300 fold_deferred_overflow_warning = gmsgid;
301 fold_deferred_overflow_code = wc;
304 else if (issue_strict_overflow_warning (wc))
305 warning (OPT_Wstrict_overflow, gmsgid);
308 /* Return true if the built-in mathematical function specified by CODE
309 is odd, i.e. -f(x) == f(-x). */
311 bool
312 negate_mathfn_p (combined_fn fn)
314 switch (fn)
316 CASE_CFN_ASIN:
317 CASE_CFN_ASINH:
318 CASE_CFN_ATAN:
319 CASE_CFN_ATANH:
320 CASE_CFN_CASIN:
321 CASE_CFN_CASINH:
322 CASE_CFN_CATAN:
323 CASE_CFN_CATANH:
324 CASE_CFN_CBRT:
325 CASE_CFN_CPROJ:
326 CASE_CFN_CSIN:
327 CASE_CFN_CSINH:
328 CASE_CFN_CTAN:
329 CASE_CFN_CTANH:
330 CASE_CFN_ERF:
331 CASE_CFN_LLROUND:
332 CASE_CFN_LROUND:
333 CASE_CFN_ROUND:
334 CASE_CFN_ROUNDEVEN:
335 CASE_CFN_ROUNDEVEN_FN:
336 CASE_CFN_SIN:
337 CASE_CFN_SINH:
338 CASE_CFN_TAN:
339 CASE_CFN_TANH:
340 CASE_CFN_TRUNC:
341 return true;
343 CASE_CFN_LLRINT:
344 CASE_CFN_LRINT:
345 CASE_CFN_NEARBYINT:
346 CASE_CFN_RINT:
347 return !flag_rounding_math;
349 default:
350 break;
352 return false;
355 /* Check whether we may negate an integer constant T without causing
356 overflow. */
358 bool
359 may_negate_without_overflow_p (const_tree t)
361 tree type;
363 gcc_assert (TREE_CODE (t) == INTEGER_CST);
365 type = TREE_TYPE (t);
366 if (TYPE_UNSIGNED (type))
367 return false;
369 return !wi::only_sign_bit_p (wi::to_wide (t));
372 /* Determine whether an expression T can be cheaply negated using
373 the function negate_expr without introducing undefined overflow. */
375 static bool
376 negate_expr_p (tree t)
378 tree type;
380 if (t == 0)
381 return false;
383 type = TREE_TYPE (t);
385 STRIP_SIGN_NOPS (t);
386 switch (TREE_CODE (t))
388 case INTEGER_CST:
389 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
390 return true;
392 /* Check that -CST will not overflow type. */
393 return may_negate_without_overflow_p (t);
394 case BIT_NOT_EXPR:
395 return (INTEGRAL_TYPE_P (type)
396 && TYPE_OVERFLOW_WRAPS (type));
398 case FIXED_CST:
399 return true;
401 case NEGATE_EXPR:
402 return !TYPE_OVERFLOW_SANITIZED (type);
404 case REAL_CST:
405 /* We want to canonicalize to positive real constants. Pretend
406 that only negative ones can be easily negated. */
407 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
409 case COMPLEX_CST:
410 return negate_expr_p (TREE_REALPART (t))
411 && negate_expr_p (TREE_IMAGPART (t));
413 case VECTOR_CST:
415 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
416 return true;
418 /* Steps don't prevent negation. */
419 unsigned int count = vector_cst_encoded_nelts (t);
420 for (unsigned int i = 0; i < count; ++i)
421 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
422 return false;
424 return true;
427 case COMPLEX_EXPR:
428 return negate_expr_p (TREE_OPERAND (t, 0))
429 && negate_expr_p (TREE_OPERAND (t, 1));
431 case CONJ_EXPR:
432 return negate_expr_p (TREE_OPERAND (t, 0));
434 case PLUS_EXPR:
435 if (HONOR_SIGN_DEPENDENT_ROUNDING (type)
436 || HONOR_SIGNED_ZEROS (type)
437 || (ANY_INTEGRAL_TYPE_P (type)
438 && ! TYPE_OVERFLOW_WRAPS (type)))
439 return false;
440 /* -(A + B) -> (-B) - A. */
441 if (negate_expr_p (TREE_OPERAND (t, 1)))
442 return true;
443 /* -(A + B) -> (-A) - B. */
444 return negate_expr_p (TREE_OPERAND (t, 0));
446 case MINUS_EXPR:
447 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
448 return !HONOR_SIGN_DEPENDENT_ROUNDING (type)
449 && !HONOR_SIGNED_ZEROS (type)
450 && (! ANY_INTEGRAL_TYPE_P (type)
451 || TYPE_OVERFLOW_WRAPS (type));
453 case MULT_EXPR:
454 if (TYPE_UNSIGNED (type))
455 break;
456 /* INT_MIN/n * n doesn't overflow while negating one operand it does
457 if n is a (negative) power of two. */
458 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
459 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
460 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
461 && (wi::popcount
462 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
463 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
464 && (wi::popcount
465 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
466 break;
468 /* Fall through. */
470 case RDIV_EXPR:
471 if (! HONOR_SIGN_DEPENDENT_ROUNDING (t))
472 return negate_expr_p (TREE_OPERAND (t, 1))
473 || negate_expr_p (TREE_OPERAND (t, 0));
474 break;
476 case TRUNC_DIV_EXPR:
477 case ROUND_DIV_EXPR:
478 case EXACT_DIV_EXPR:
479 if (TYPE_UNSIGNED (type))
480 break;
481 /* In general we can't negate A in A / B, because if A is INT_MIN and
482 B is not 1 we change the sign of the result. */
483 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
484 && negate_expr_p (TREE_OPERAND (t, 0)))
485 return true;
486 /* In general we can't negate B in A / B, because if A is INT_MIN and
487 B is 1, we may turn this into INT_MIN / -1 which is undefined
488 and actually traps on some architectures. */
489 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
490 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
491 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
492 && ! integer_onep (TREE_OPERAND (t, 1))))
493 return negate_expr_p (TREE_OPERAND (t, 1));
494 break;
496 case NOP_EXPR:
497 /* Negate -((double)float) as (double)(-float). */
498 if (TREE_CODE (type) == REAL_TYPE)
500 tree tem = strip_float_extensions (t);
501 if (tem != t)
502 return negate_expr_p (tem);
504 break;
506 case CALL_EXPR:
507 /* Negate -f(x) as f(-x). */
508 if (negate_mathfn_p (get_call_combined_fn (t)))
509 return negate_expr_p (CALL_EXPR_ARG (t, 0));
510 break;
512 case RSHIFT_EXPR:
513 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
514 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
516 tree op1 = TREE_OPERAND (t, 1);
517 if (wi::to_wide (op1) == element_precision (type) - 1)
518 return true;
520 break;
522 default:
523 break;
525 return false;
528 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
529 simplification is possible.
530 If negate_expr_p would return true for T, NULL_TREE will never be
531 returned. */
533 static tree
534 fold_negate_expr_1 (location_t loc, tree t)
536 tree type = TREE_TYPE (t);
537 tree tem;
539 switch (TREE_CODE (t))
541 /* Convert - (~A) to A + 1. */
542 case BIT_NOT_EXPR:
543 if (INTEGRAL_TYPE_P (type))
544 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
545 build_one_cst (type));
546 break;
548 case INTEGER_CST:
549 tem = fold_negate_const (t, type);
550 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
551 || (ANY_INTEGRAL_TYPE_P (type)
552 && !TYPE_OVERFLOW_TRAPS (type)
553 && TYPE_OVERFLOW_WRAPS (type))
554 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
555 return tem;
556 break;
558 case POLY_INT_CST:
559 case REAL_CST:
560 case FIXED_CST:
561 tem = fold_negate_const (t, type);
562 return tem;
564 case COMPLEX_CST:
566 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
567 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
568 if (rpart && ipart)
569 return build_complex (type, rpart, ipart);
571 break;
573 case VECTOR_CST:
575 tree_vector_builder elts;
576 elts.new_unary_operation (type, t, true);
577 unsigned int count = elts.encoded_nelts ();
578 for (unsigned int i = 0; i < count; ++i)
580 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
581 if (elt == NULL_TREE)
582 return NULL_TREE;
583 elts.quick_push (elt);
586 return elts.build ();
589 case COMPLEX_EXPR:
590 if (negate_expr_p (t))
591 return fold_build2_loc (loc, COMPLEX_EXPR, type,
592 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
593 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
594 break;
596 case CONJ_EXPR:
597 if (negate_expr_p (t))
598 return fold_build1_loc (loc, CONJ_EXPR, type,
599 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
600 break;
602 case NEGATE_EXPR:
603 if (!TYPE_OVERFLOW_SANITIZED (type))
604 return TREE_OPERAND (t, 0);
605 break;
607 case PLUS_EXPR:
608 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
609 && !HONOR_SIGNED_ZEROS (type))
611 /* -(A + B) -> (-B) - A. */
612 if (negate_expr_p (TREE_OPERAND (t, 1)))
614 tem = negate_expr (TREE_OPERAND (t, 1));
615 return fold_build2_loc (loc, MINUS_EXPR, type,
616 tem, TREE_OPERAND (t, 0));
619 /* -(A + B) -> (-A) - B. */
620 if (negate_expr_p (TREE_OPERAND (t, 0)))
622 tem = negate_expr (TREE_OPERAND (t, 0));
623 return fold_build2_loc (loc, MINUS_EXPR, type,
624 tem, TREE_OPERAND (t, 1));
627 break;
629 case MINUS_EXPR:
630 /* - (A - B) -> B - A */
631 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
632 && !HONOR_SIGNED_ZEROS (type))
633 return fold_build2_loc (loc, MINUS_EXPR, type,
634 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
635 break;
637 case MULT_EXPR:
638 if (TYPE_UNSIGNED (type))
639 break;
641 /* Fall through. */
643 case RDIV_EXPR:
644 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
646 tem = TREE_OPERAND (t, 1);
647 if (negate_expr_p (tem))
648 return fold_build2_loc (loc, TREE_CODE (t), type,
649 TREE_OPERAND (t, 0), negate_expr (tem));
650 tem = TREE_OPERAND (t, 0);
651 if (negate_expr_p (tem))
652 return fold_build2_loc (loc, TREE_CODE (t), type,
653 negate_expr (tem), TREE_OPERAND (t, 1));
655 break;
657 case TRUNC_DIV_EXPR:
658 case ROUND_DIV_EXPR:
659 case EXACT_DIV_EXPR:
660 if (TYPE_UNSIGNED (type))
661 break;
662 /* In general we can't negate A in A / B, because if A is INT_MIN and
663 B is not 1 we change the sign of the result. */
664 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
665 && negate_expr_p (TREE_OPERAND (t, 0)))
666 return fold_build2_loc (loc, TREE_CODE (t), type,
667 negate_expr (TREE_OPERAND (t, 0)),
668 TREE_OPERAND (t, 1));
669 /* In general we can't negate B in A / B, because if A is INT_MIN and
670 B is 1, we may turn this into INT_MIN / -1 which is undefined
671 and actually traps on some architectures. */
672 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
673 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
674 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
675 && ! integer_onep (TREE_OPERAND (t, 1))))
676 && negate_expr_p (TREE_OPERAND (t, 1)))
677 return fold_build2_loc (loc, TREE_CODE (t), type,
678 TREE_OPERAND (t, 0),
679 negate_expr (TREE_OPERAND (t, 1)));
680 break;
682 case NOP_EXPR:
683 /* Convert -((double)float) into (double)(-float). */
684 if (TREE_CODE (type) == REAL_TYPE)
686 tem = strip_float_extensions (t);
687 if (tem != t && negate_expr_p (tem))
688 return fold_convert_loc (loc, type, negate_expr (tem));
690 break;
692 case CALL_EXPR:
693 /* Negate -f(x) as f(-x). */
694 if (negate_mathfn_p (get_call_combined_fn (t))
695 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
697 tree fndecl, arg;
699 fndecl = get_callee_fndecl (t);
700 arg = negate_expr (CALL_EXPR_ARG (t, 0));
701 return build_call_expr_loc (loc, fndecl, 1, arg);
703 break;
705 case RSHIFT_EXPR:
706 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
707 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
709 tree op1 = TREE_OPERAND (t, 1);
710 if (wi::to_wide (op1) == element_precision (type) - 1)
712 tree ntype = TYPE_UNSIGNED (type)
713 ? signed_type_for (type)
714 : unsigned_type_for (type);
715 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
716 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
717 return fold_convert_loc (loc, type, temp);
720 break;
722 default:
723 break;
726 return NULL_TREE;
729 /* A wrapper for fold_negate_expr_1. */
731 static tree
732 fold_negate_expr (location_t loc, tree t)
734 tree type = TREE_TYPE (t);
735 STRIP_SIGN_NOPS (t);
736 tree tem = fold_negate_expr_1 (loc, t);
737 if (tem == NULL_TREE)
738 return NULL_TREE;
739 return fold_convert_loc (loc, type, tem);
742 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
743 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
744 return NULL_TREE. */
746 static tree
747 negate_expr (tree t)
749 tree type, tem;
750 location_t loc;
752 if (t == NULL_TREE)
753 return NULL_TREE;
755 loc = EXPR_LOCATION (t);
756 type = TREE_TYPE (t);
757 STRIP_SIGN_NOPS (t);
759 tem = fold_negate_expr (loc, t);
760 if (!tem)
761 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
762 return fold_convert_loc (loc, type, tem);
765 /* Split a tree IN into a constant, literal and variable parts that could be
766 combined with CODE to make IN. "constant" means an expression with
767 TREE_CONSTANT but that isn't an actual constant. CODE must be a
768 commutative arithmetic operation. Store the constant part into *CONP,
769 the literal in *LITP and return the variable part. If a part isn't
770 present, set it to null. If the tree does not decompose in this way,
771 return the entire tree as the variable part and the other parts as null.
773 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
774 case, we negate an operand that was subtracted. Except if it is a
775 literal for which we use *MINUS_LITP instead.
777 If NEGATE_P is true, we are negating all of IN, again except a literal
778 for which we use *MINUS_LITP instead. If a variable part is of pointer
779 type, it is negated after converting to TYPE. This prevents us from
780 generating illegal MINUS pointer expression. LOC is the location of
781 the converted variable part.
783 If IN is itself a literal or constant, return it as appropriate.
785 Note that we do not guarantee that any of the three values will be the
786 same type as IN, but they will have the same signedness and mode. */
788 static tree
789 split_tree (tree in, tree type, enum tree_code code,
790 tree *minus_varp, tree *conp, tree *minus_conp,
791 tree *litp, tree *minus_litp, int negate_p)
793 tree var = 0;
794 *minus_varp = 0;
795 *conp = 0;
796 *minus_conp = 0;
797 *litp = 0;
798 *minus_litp = 0;
800 /* Strip any conversions that don't change the machine mode or signedness. */
801 STRIP_SIGN_NOPS (in);
803 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
804 || TREE_CODE (in) == FIXED_CST)
805 *litp = in;
806 else if (TREE_CODE (in) == code
807 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
808 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
809 /* We can associate addition and subtraction together (even
810 though the C standard doesn't say so) for integers because
811 the value is not affected. For reals, the value might be
812 affected, so we can't. */
813 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
814 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
815 || (code == MINUS_EXPR
816 && (TREE_CODE (in) == PLUS_EXPR
817 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
819 tree op0 = TREE_OPERAND (in, 0);
820 tree op1 = TREE_OPERAND (in, 1);
821 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
822 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
824 /* First see if either of the operands is a literal, then a constant. */
825 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
826 || TREE_CODE (op0) == FIXED_CST)
827 *litp = op0, op0 = 0;
828 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
829 || TREE_CODE (op1) == FIXED_CST)
830 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
832 if (op0 != 0 && TREE_CONSTANT (op0))
833 *conp = op0, op0 = 0;
834 else if (op1 != 0 && TREE_CONSTANT (op1))
835 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
837 /* If we haven't dealt with either operand, this is not a case we can
838 decompose. Otherwise, VAR is either of the ones remaining, if any. */
839 if (op0 != 0 && op1 != 0)
840 var = in;
841 else if (op0 != 0)
842 var = op0;
843 else
844 var = op1, neg_var_p = neg1_p;
846 /* Now do any needed negations. */
847 if (neg_litp_p)
848 *minus_litp = *litp, *litp = 0;
849 if (neg_conp_p && *conp)
850 *minus_conp = *conp, *conp = 0;
851 if (neg_var_p && var)
852 *minus_varp = var, var = 0;
854 else if (TREE_CONSTANT (in))
855 *conp = in;
856 else if (TREE_CODE (in) == BIT_NOT_EXPR
857 && code == PLUS_EXPR)
859 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
860 when IN is constant. */
861 *litp = build_minus_one_cst (type);
862 *minus_varp = TREE_OPERAND (in, 0);
864 else
865 var = in;
867 if (negate_p)
869 if (*litp)
870 *minus_litp = *litp, *litp = 0;
871 else if (*minus_litp)
872 *litp = *minus_litp, *minus_litp = 0;
873 if (*conp)
874 *minus_conp = *conp, *conp = 0;
875 else if (*minus_conp)
876 *conp = *minus_conp, *minus_conp = 0;
877 if (var)
878 *minus_varp = var, var = 0;
879 else if (*minus_varp)
880 var = *minus_varp, *minus_varp = 0;
883 if (*litp
884 && TREE_OVERFLOW_P (*litp))
885 *litp = drop_tree_overflow (*litp);
886 if (*minus_litp
887 && TREE_OVERFLOW_P (*minus_litp))
888 *minus_litp = drop_tree_overflow (*minus_litp);
890 return var;
893 /* Re-associate trees split by the above function. T1 and T2 are
894 either expressions to associate or null. Return the new
895 expression, if any. LOC is the location of the new expression. If
896 we build an operation, do it in TYPE and with CODE. */
898 static tree
899 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
901 if (t1 == 0)
903 gcc_assert (t2 == 0 || code != MINUS_EXPR);
904 return t2;
906 else if (t2 == 0)
907 return t1;
909 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
910 try to fold this since we will have infinite recursion. But do
911 deal with any NEGATE_EXPRs. */
912 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
913 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
914 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
916 if (code == PLUS_EXPR)
918 if (TREE_CODE (t1) == NEGATE_EXPR)
919 return build2_loc (loc, MINUS_EXPR, type,
920 fold_convert_loc (loc, type, t2),
921 fold_convert_loc (loc, type,
922 TREE_OPERAND (t1, 0)));
923 else if (TREE_CODE (t2) == NEGATE_EXPR)
924 return build2_loc (loc, MINUS_EXPR, type,
925 fold_convert_loc (loc, type, t1),
926 fold_convert_loc (loc, type,
927 TREE_OPERAND (t2, 0)));
928 else if (integer_zerop (t2))
929 return fold_convert_loc (loc, type, t1);
931 else if (code == MINUS_EXPR)
933 if (integer_zerop (t2))
934 return fold_convert_loc (loc, type, t1);
937 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
938 fold_convert_loc (loc, type, t2));
941 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
942 fold_convert_loc (loc, type, t2));
945 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
946 for use in int_const_binop, size_binop and size_diffop. */
948 static bool
949 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
951 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
952 return false;
953 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
954 return false;
956 switch (code)
958 case LSHIFT_EXPR:
959 case RSHIFT_EXPR:
960 case LROTATE_EXPR:
961 case RROTATE_EXPR:
962 return true;
964 default:
965 break;
968 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
969 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
970 && TYPE_MODE (type1) == TYPE_MODE (type2);
973 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
974 a new constant in RES. Return FALSE if we don't know how to
975 evaluate CODE at compile-time. */
977 bool
978 wide_int_binop (wide_int &res,
979 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
980 signop sign, wi::overflow_type *overflow)
982 wide_int tmp;
983 *overflow = wi::OVF_NONE;
984 switch (code)
986 case BIT_IOR_EXPR:
987 res = wi::bit_or (arg1, arg2);
988 break;
990 case BIT_XOR_EXPR:
991 res = wi::bit_xor (arg1, arg2);
992 break;
994 case BIT_AND_EXPR:
995 res = wi::bit_and (arg1, arg2);
996 break;
998 case LSHIFT_EXPR:
999 if (wi::neg_p (arg2))
1000 return false;
1001 res = wi::lshift (arg1, arg2);
1002 break;
1004 case RSHIFT_EXPR:
1005 if (wi::neg_p (arg2))
1006 return false;
1007 /* It's unclear from the C standard whether shifts can overflow.
1008 The following code ignores overflow; perhaps a C standard
1009 interpretation ruling is needed. */
1010 res = wi::rshift (arg1, arg2, sign);
1011 break;
1013 case RROTATE_EXPR:
1014 case LROTATE_EXPR:
1015 if (wi::neg_p (arg2))
1017 tmp = -arg2;
1018 if (code == RROTATE_EXPR)
1019 code = LROTATE_EXPR;
1020 else
1021 code = RROTATE_EXPR;
1023 else
1024 tmp = arg2;
1026 if (code == RROTATE_EXPR)
1027 res = wi::rrotate (arg1, tmp);
1028 else
1029 res = wi::lrotate (arg1, tmp);
1030 break;
1032 case PLUS_EXPR:
1033 res = wi::add (arg1, arg2, sign, overflow);
1034 break;
1036 case MINUS_EXPR:
1037 res = wi::sub (arg1, arg2, sign, overflow);
1038 break;
1040 case MULT_EXPR:
1041 res = wi::mul (arg1, arg2, sign, overflow);
1042 break;
1044 case MULT_HIGHPART_EXPR:
1045 res = wi::mul_high (arg1, arg2, sign);
1046 break;
1048 case TRUNC_DIV_EXPR:
1049 case EXACT_DIV_EXPR:
1050 if (arg2 == 0)
1051 return false;
1052 res = wi::div_trunc (arg1, arg2, sign, overflow);
1053 break;
1055 case FLOOR_DIV_EXPR:
1056 if (arg2 == 0)
1057 return false;
1058 res = wi::div_floor (arg1, arg2, sign, overflow);
1059 break;
1061 case CEIL_DIV_EXPR:
1062 if (arg2 == 0)
1063 return false;
1064 res = wi::div_ceil (arg1, arg2, sign, overflow);
1065 break;
1067 case ROUND_DIV_EXPR:
1068 if (arg2 == 0)
1069 return false;
1070 res = wi::div_round (arg1, arg2, sign, overflow);
1071 break;
1073 case TRUNC_MOD_EXPR:
1074 if (arg2 == 0)
1075 return false;
1076 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1077 break;
1079 case FLOOR_MOD_EXPR:
1080 if (arg2 == 0)
1081 return false;
1082 res = wi::mod_floor (arg1, arg2, sign, overflow);
1083 break;
1085 case CEIL_MOD_EXPR:
1086 if (arg2 == 0)
1087 return false;
1088 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1089 break;
1091 case ROUND_MOD_EXPR:
1092 if (arg2 == 0)
1093 return false;
1094 res = wi::mod_round (arg1, arg2, sign, overflow);
1095 break;
1097 case MIN_EXPR:
1098 res = wi::min (arg1, arg2, sign);
1099 break;
1101 case MAX_EXPR:
1102 res = wi::max (arg1, arg2, sign);
1103 break;
1105 default:
1106 return false;
1108 return true;
1111 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1112 produce a new constant in RES. Return FALSE if we don't know how
1113 to evaluate CODE at compile-time. */
1115 static bool
1116 poly_int_binop (poly_wide_int &res, enum tree_code code,
1117 const_tree arg1, const_tree arg2,
1118 signop sign, wi::overflow_type *overflow)
1120 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1121 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1122 switch (code)
1124 case PLUS_EXPR:
1125 res = wi::add (wi::to_poly_wide (arg1),
1126 wi::to_poly_wide (arg2), sign, overflow);
1127 break;
1129 case MINUS_EXPR:
1130 res = wi::sub (wi::to_poly_wide (arg1),
1131 wi::to_poly_wide (arg2), sign, overflow);
1132 break;
1134 case MULT_EXPR:
1135 if (TREE_CODE (arg2) == INTEGER_CST)
1136 res = wi::mul (wi::to_poly_wide (arg1),
1137 wi::to_wide (arg2), sign, overflow);
1138 else if (TREE_CODE (arg1) == INTEGER_CST)
1139 res = wi::mul (wi::to_poly_wide (arg2),
1140 wi::to_wide (arg1), sign, overflow);
1141 else
1142 return NULL_TREE;
1143 break;
1145 case LSHIFT_EXPR:
1146 if (TREE_CODE (arg2) == INTEGER_CST)
1147 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1148 else
1149 return false;
1150 break;
1152 case BIT_IOR_EXPR:
1153 if (TREE_CODE (arg2) != INTEGER_CST
1154 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1155 &res))
1156 return false;
1157 break;
1159 default:
1160 return false;
1162 return true;
1165 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1166 produce a new constant. Return NULL_TREE if we don't know how to
1167 evaluate CODE at compile-time. */
1169 tree
1170 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1171 int overflowable)
1173 poly_wide_int poly_res;
1174 tree type = TREE_TYPE (arg1);
1175 signop sign = TYPE_SIGN (type);
1176 wi::overflow_type overflow = wi::OVF_NONE;
1178 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1180 wide_int warg1 = wi::to_wide (arg1), res;
1181 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1182 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1183 return NULL_TREE;
1184 poly_res = res;
1186 else if (!poly_int_tree_p (arg1)
1187 || !poly_int_tree_p (arg2)
1188 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1189 return NULL_TREE;
1190 return force_fit_type (type, poly_res, overflowable,
1191 (((sign == SIGNED || overflowable == -1)
1192 && overflow)
1193 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1196 /* Return true if binary operation OP distributes over addition in operand
1197 OPNO, with the other operand being held constant. OPNO counts from 1. */
1199 static bool
1200 distributes_over_addition_p (tree_code op, int opno)
1202 switch (op)
1204 case PLUS_EXPR:
1205 case MINUS_EXPR:
1206 case MULT_EXPR:
1207 return true;
1209 case LSHIFT_EXPR:
1210 return opno == 1;
1212 default:
1213 return false;
1217 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1218 constant. We assume ARG1 and ARG2 have the same data type, or at least
1219 are the same kind of constant and the same machine mode. Return zero if
1220 combining the constants is not allowed in the current operating mode. */
1222 static tree
1223 const_binop (enum tree_code code, tree arg1, tree arg2)
1225 /* Sanity check for the recursive cases. */
1226 if (!arg1 || !arg2)
1227 return NULL_TREE;
1229 STRIP_NOPS (arg1);
1230 STRIP_NOPS (arg2);
1232 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1234 if (code == POINTER_PLUS_EXPR)
1235 return int_const_binop (PLUS_EXPR,
1236 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1238 return int_const_binop (code, arg1, arg2);
1241 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1243 machine_mode mode;
1244 REAL_VALUE_TYPE d1;
1245 REAL_VALUE_TYPE d2;
1246 REAL_VALUE_TYPE value;
1247 REAL_VALUE_TYPE result;
1248 bool inexact;
1249 tree t, type;
1251 /* The following codes are handled by real_arithmetic. */
1252 switch (code)
1254 case PLUS_EXPR:
1255 case MINUS_EXPR:
1256 case MULT_EXPR:
1257 case RDIV_EXPR:
1258 case MIN_EXPR:
1259 case MAX_EXPR:
1260 break;
1262 default:
1263 return NULL_TREE;
1266 d1 = TREE_REAL_CST (arg1);
1267 d2 = TREE_REAL_CST (arg2);
1269 type = TREE_TYPE (arg1);
1270 mode = TYPE_MODE (type);
1272 /* Don't perform operation if we honor signaling NaNs and
1273 either operand is a signaling NaN. */
1274 if (HONOR_SNANS (mode)
1275 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1276 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1277 return NULL_TREE;
1279 /* Don't perform operation if it would raise a division
1280 by zero exception. */
1281 if (code == RDIV_EXPR
1282 && real_equal (&d2, &dconst0)
1283 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1284 return NULL_TREE;
1286 /* If either operand is a NaN, just return it. Otherwise, set up
1287 for floating-point trap; we return an overflow. */
1288 if (REAL_VALUE_ISNAN (d1))
1290 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1291 is off. */
1292 d1.signalling = 0;
1293 t = build_real (type, d1);
1294 return t;
1296 else if (REAL_VALUE_ISNAN (d2))
1298 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1299 is off. */
1300 d2.signalling = 0;
1301 t = build_real (type, d2);
1302 return t;
1305 inexact = real_arithmetic (&value, code, &d1, &d2);
1306 real_convert (&result, mode, &value);
1308 /* Don't constant fold this floating point operation if
1309 the result has overflowed and flag_trapping_math. */
1310 if (flag_trapping_math
1311 && MODE_HAS_INFINITIES (mode)
1312 && REAL_VALUE_ISINF (result)
1313 && !REAL_VALUE_ISINF (d1)
1314 && !REAL_VALUE_ISINF (d2))
1315 return NULL_TREE;
1317 /* Don't constant fold this floating point operation if the
1318 result may dependent upon the run-time rounding mode and
1319 flag_rounding_math is set, or if GCC's software emulation
1320 is unable to accurately represent the result. */
1321 if ((flag_rounding_math
1322 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1323 && (inexact || !real_identical (&result, &value)))
1324 return NULL_TREE;
1326 t = build_real (type, result);
1328 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1329 return t;
1332 if (TREE_CODE (arg1) == FIXED_CST)
1334 FIXED_VALUE_TYPE f1;
1335 FIXED_VALUE_TYPE f2;
1336 FIXED_VALUE_TYPE result;
1337 tree t, type;
1338 int sat_p;
1339 bool overflow_p;
1341 /* The following codes are handled by fixed_arithmetic. */
1342 switch (code)
1344 case PLUS_EXPR:
1345 case MINUS_EXPR:
1346 case MULT_EXPR:
1347 case TRUNC_DIV_EXPR:
1348 if (TREE_CODE (arg2) != FIXED_CST)
1349 return NULL_TREE;
1350 f2 = TREE_FIXED_CST (arg2);
1351 break;
1353 case LSHIFT_EXPR:
1354 case RSHIFT_EXPR:
1356 if (TREE_CODE (arg2) != INTEGER_CST)
1357 return NULL_TREE;
1358 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1359 f2.data.high = w2.elt (1);
1360 f2.data.low = w2.ulow ();
1361 f2.mode = SImode;
1363 break;
1365 default:
1366 return NULL_TREE;
1369 f1 = TREE_FIXED_CST (arg1);
1370 type = TREE_TYPE (arg1);
1371 sat_p = TYPE_SATURATING (type);
1372 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1373 t = build_fixed (type, result);
1374 /* Propagate overflow flags. */
1375 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1376 TREE_OVERFLOW (t) = 1;
1377 return t;
1380 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1382 tree type = TREE_TYPE (arg1);
1383 tree r1 = TREE_REALPART (arg1);
1384 tree i1 = TREE_IMAGPART (arg1);
1385 tree r2 = TREE_REALPART (arg2);
1386 tree i2 = TREE_IMAGPART (arg2);
1387 tree real, imag;
1389 switch (code)
1391 case PLUS_EXPR:
1392 case MINUS_EXPR:
1393 real = const_binop (code, r1, r2);
1394 imag = const_binop (code, i1, i2);
1395 break;
1397 case MULT_EXPR:
1398 if (COMPLEX_FLOAT_TYPE_P (type))
1399 return do_mpc_arg2 (arg1, arg2, type,
1400 /* do_nonfinite= */ folding_initializer,
1401 mpc_mul);
1403 real = const_binop (MINUS_EXPR,
1404 const_binop (MULT_EXPR, r1, r2),
1405 const_binop (MULT_EXPR, i1, i2));
1406 imag = const_binop (PLUS_EXPR,
1407 const_binop (MULT_EXPR, r1, i2),
1408 const_binop (MULT_EXPR, i1, r2));
1409 break;
1411 case RDIV_EXPR:
1412 if (COMPLEX_FLOAT_TYPE_P (type))
1413 return do_mpc_arg2 (arg1, arg2, type,
1414 /* do_nonfinite= */ folding_initializer,
1415 mpc_div);
1416 /* Fallthru. */
1417 case TRUNC_DIV_EXPR:
1418 case CEIL_DIV_EXPR:
1419 case FLOOR_DIV_EXPR:
1420 case ROUND_DIV_EXPR:
1421 if (flag_complex_method == 0)
1423 /* Keep this algorithm in sync with
1424 tree-complex.c:expand_complex_div_straight().
1426 Expand complex division to scalars, straightforward algorithm.
1427 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1428 t = br*br + bi*bi
1430 tree magsquared
1431 = const_binop (PLUS_EXPR,
1432 const_binop (MULT_EXPR, r2, r2),
1433 const_binop (MULT_EXPR, i2, i2));
1434 tree t1
1435 = const_binop (PLUS_EXPR,
1436 const_binop (MULT_EXPR, r1, r2),
1437 const_binop (MULT_EXPR, i1, i2));
1438 tree t2
1439 = const_binop (MINUS_EXPR,
1440 const_binop (MULT_EXPR, i1, r2),
1441 const_binop (MULT_EXPR, r1, i2));
1443 real = const_binop (code, t1, magsquared);
1444 imag = const_binop (code, t2, magsquared);
1446 else
1448 /* Keep this algorithm in sync with
1449 tree-complex.c:expand_complex_div_wide().
1451 Expand complex division to scalars, modified algorithm to minimize
1452 overflow with wide input ranges. */
1453 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1454 fold_abs_const (r2, TREE_TYPE (type)),
1455 fold_abs_const (i2, TREE_TYPE (type)));
1457 if (integer_nonzerop (compare))
1459 /* In the TRUE branch, we compute
1460 ratio = br/bi;
1461 div = (br * ratio) + bi;
1462 tr = (ar * ratio) + ai;
1463 ti = (ai * ratio) - ar;
1464 tr = tr / div;
1465 ti = ti / div; */
1466 tree ratio = const_binop (code, r2, i2);
1467 tree div = const_binop (PLUS_EXPR, i2,
1468 const_binop (MULT_EXPR, r2, ratio));
1469 real = const_binop (MULT_EXPR, r1, ratio);
1470 real = const_binop (PLUS_EXPR, real, i1);
1471 real = const_binop (code, real, div);
1473 imag = const_binop (MULT_EXPR, i1, ratio);
1474 imag = const_binop (MINUS_EXPR, imag, r1);
1475 imag = const_binop (code, imag, div);
1477 else
1479 /* In the FALSE branch, we compute
1480 ratio = d/c;
1481 divisor = (d * ratio) + c;
1482 tr = (b * ratio) + a;
1483 ti = b - (a * ratio);
1484 tr = tr / div;
1485 ti = ti / div; */
1486 tree ratio = const_binop (code, i2, r2);
1487 tree div = const_binop (PLUS_EXPR, r2,
1488 const_binop (MULT_EXPR, i2, ratio));
1490 real = const_binop (MULT_EXPR, i1, ratio);
1491 real = const_binop (PLUS_EXPR, real, r1);
1492 real = const_binop (code, real, div);
1494 imag = const_binop (MULT_EXPR, r1, ratio);
1495 imag = const_binop (MINUS_EXPR, i1, imag);
1496 imag = const_binop (code, imag, div);
1499 break;
1501 default:
1502 return NULL_TREE;
1505 if (real && imag)
1506 return build_complex (type, real, imag);
1509 if (TREE_CODE (arg1) == VECTOR_CST
1510 && TREE_CODE (arg2) == VECTOR_CST
1511 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1512 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1514 tree type = TREE_TYPE (arg1);
1515 bool step_ok_p;
1516 if (VECTOR_CST_STEPPED_P (arg1)
1517 && VECTOR_CST_STEPPED_P (arg2))
1518 /* We can operate directly on the encoding if:
1520 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1521 implies
1522 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1524 Addition and subtraction are the supported operators
1525 for which this is true. */
1526 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1527 else if (VECTOR_CST_STEPPED_P (arg1))
1528 /* We can operate directly on stepped encodings if:
1530 a3 - a2 == a2 - a1
1531 implies:
1532 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1534 which is true if (x -> x op c) distributes over addition. */
1535 step_ok_p = distributes_over_addition_p (code, 1);
1536 else
1537 /* Similarly in reverse. */
1538 step_ok_p = distributes_over_addition_p (code, 2);
1539 tree_vector_builder elts;
1540 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1541 return NULL_TREE;
1542 unsigned int count = elts.encoded_nelts ();
1543 for (unsigned int i = 0; i < count; ++i)
1545 tree elem1 = VECTOR_CST_ELT (arg1, i);
1546 tree elem2 = VECTOR_CST_ELT (arg2, i);
1548 tree elt = const_binop (code, elem1, elem2);
1550 /* It is possible that const_binop cannot handle the given
1551 code and return NULL_TREE */
1552 if (elt == NULL_TREE)
1553 return NULL_TREE;
1554 elts.quick_push (elt);
1557 return elts.build ();
1560 /* Shifts allow a scalar offset for a vector. */
1561 if (TREE_CODE (arg1) == VECTOR_CST
1562 && TREE_CODE (arg2) == INTEGER_CST)
1564 tree type = TREE_TYPE (arg1);
1565 bool step_ok_p = distributes_over_addition_p (code, 1);
1566 tree_vector_builder elts;
1567 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1568 return NULL_TREE;
1569 unsigned int count = elts.encoded_nelts ();
1570 for (unsigned int i = 0; i < count; ++i)
1572 tree elem1 = VECTOR_CST_ELT (arg1, i);
1574 tree elt = const_binop (code, elem1, arg2);
1576 /* It is possible that const_binop cannot handle the given
1577 code and return NULL_TREE. */
1578 if (elt == NULL_TREE)
1579 return NULL_TREE;
1580 elts.quick_push (elt);
1583 return elts.build ();
1585 return NULL_TREE;
1588 /* Overload that adds a TYPE parameter to be able to dispatch
1589 to fold_relational_const. */
1591 tree
1592 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1594 if (TREE_CODE_CLASS (code) == tcc_comparison)
1595 return fold_relational_const (code, type, arg1, arg2);
1597 /* ??? Until we make the const_binop worker take the type of the
1598 result as argument put those cases that need it here. */
1599 switch (code)
1601 case VEC_SERIES_EXPR:
1602 if (CONSTANT_CLASS_P (arg1)
1603 && CONSTANT_CLASS_P (arg2))
1604 return build_vec_series (type, arg1, arg2);
1605 return NULL_TREE;
1607 case COMPLEX_EXPR:
1608 if ((TREE_CODE (arg1) == REAL_CST
1609 && TREE_CODE (arg2) == REAL_CST)
1610 || (TREE_CODE (arg1) == INTEGER_CST
1611 && TREE_CODE (arg2) == INTEGER_CST))
1612 return build_complex (type, arg1, arg2);
1613 return NULL_TREE;
1615 case POINTER_DIFF_EXPR:
1616 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1618 poly_offset_int res = (wi::to_poly_offset (arg1)
1619 - wi::to_poly_offset (arg2));
1620 return force_fit_type (type, res, 1,
1621 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1623 return NULL_TREE;
1625 case VEC_PACK_TRUNC_EXPR:
1626 case VEC_PACK_FIX_TRUNC_EXPR:
1627 case VEC_PACK_FLOAT_EXPR:
1629 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1631 if (TREE_CODE (arg1) != VECTOR_CST
1632 || TREE_CODE (arg2) != VECTOR_CST)
1633 return NULL_TREE;
1635 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1636 return NULL_TREE;
1638 out_nelts = in_nelts * 2;
1639 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1640 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1642 tree_vector_builder elts (type, out_nelts, 1);
1643 for (i = 0; i < out_nelts; i++)
1645 tree elt = (i < in_nelts
1646 ? VECTOR_CST_ELT (arg1, i)
1647 : VECTOR_CST_ELT (arg2, i - in_nelts));
1648 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1649 ? NOP_EXPR
1650 : code == VEC_PACK_FLOAT_EXPR
1651 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1652 TREE_TYPE (type), elt);
1653 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1654 return NULL_TREE;
1655 elts.quick_push (elt);
1658 return elts.build ();
1661 case VEC_WIDEN_MULT_LO_EXPR:
1662 case VEC_WIDEN_MULT_HI_EXPR:
1663 case VEC_WIDEN_MULT_EVEN_EXPR:
1664 case VEC_WIDEN_MULT_ODD_EXPR:
1666 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1668 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1669 return NULL_TREE;
1671 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1672 return NULL_TREE;
1673 out_nelts = in_nelts / 2;
1674 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1675 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1677 if (code == VEC_WIDEN_MULT_LO_EXPR)
1678 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1679 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1680 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1681 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1682 scale = 1, ofs = 0;
1683 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1684 scale = 1, ofs = 1;
1686 tree_vector_builder elts (type, out_nelts, 1);
1687 for (out = 0; out < out_nelts; out++)
1689 unsigned int in = (out << scale) + ofs;
1690 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1691 VECTOR_CST_ELT (arg1, in));
1692 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1693 VECTOR_CST_ELT (arg2, in));
1695 if (t1 == NULL_TREE || t2 == NULL_TREE)
1696 return NULL_TREE;
1697 tree elt = const_binop (MULT_EXPR, t1, t2);
1698 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1699 return NULL_TREE;
1700 elts.quick_push (elt);
1703 return elts.build ();
1706 default:;
1709 if (TREE_CODE_CLASS (code) != tcc_binary)
1710 return NULL_TREE;
1712 /* Make sure type and arg0 have the same saturating flag. */
1713 gcc_checking_assert (TYPE_SATURATING (type)
1714 == TYPE_SATURATING (TREE_TYPE (arg1)));
1716 return const_binop (code, arg1, arg2);
1719 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1720 Return zero if computing the constants is not possible. */
1722 tree
1723 const_unop (enum tree_code code, tree type, tree arg0)
1725 /* Don't perform the operation, other than NEGATE and ABS, if
1726 flag_signaling_nans is on and the operand is a signaling NaN. */
1727 if (TREE_CODE (arg0) == REAL_CST
1728 && HONOR_SNANS (arg0)
1729 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1730 && code != NEGATE_EXPR
1731 && code != ABS_EXPR
1732 && code != ABSU_EXPR)
1733 return NULL_TREE;
1735 switch (code)
1737 CASE_CONVERT:
1738 case FLOAT_EXPR:
1739 case FIX_TRUNC_EXPR:
1740 case FIXED_CONVERT_EXPR:
1741 return fold_convert_const (code, type, arg0);
1743 case ADDR_SPACE_CONVERT_EXPR:
1744 /* If the source address is 0, and the source address space
1745 cannot have a valid object at 0, fold to dest type null. */
1746 if (integer_zerop (arg0)
1747 && !(targetm.addr_space.zero_address_valid
1748 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1749 return fold_convert_const (code, type, arg0);
1750 break;
1752 case VIEW_CONVERT_EXPR:
1753 return fold_view_convert_expr (type, arg0);
1755 case NEGATE_EXPR:
1757 /* Can't call fold_negate_const directly here as that doesn't
1758 handle all cases and we might not be able to negate some
1759 constants. */
1760 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1761 if (tem && CONSTANT_CLASS_P (tem))
1762 return tem;
1763 break;
1766 case ABS_EXPR:
1767 case ABSU_EXPR:
1768 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1769 return fold_abs_const (arg0, type);
1770 break;
1772 case CONJ_EXPR:
1773 if (TREE_CODE (arg0) == COMPLEX_CST)
1775 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1776 TREE_TYPE (type));
1777 return build_complex (type, TREE_REALPART (arg0), ipart);
1779 break;
1781 case BIT_NOT_EXPR:
1782 if (TREE_CODE (arg0) == INTEGER_CST)
1783 return fold_not_const (arg0, type);
1784 else if (POLY_INT_CST_P (arg0))
1785 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1786 /* Perform BIT_NOT_EXPR on each element individually. */
1787 else if (TREE_CODE (arg0) == VECTOR_CST)
1789 tree elem;
1791 /* This can cope with stepped encodings because ~x == -1 - x. */
1792 tree_vector_builder elements;
1793 elements.new_unary_operation (type, arg0, true);
1794 unsigned int i, count = elements.encoded_nelts ();
1795 for (i = 0; i < count; ++i)
1797 elem = VECTOR_CST_ELT (arg0, i);
1798 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1799 if (elem == NULL_TREE)
1800 break;
1801 elements.quick_push (elem);
1803 if (i == count)
1804 return elements.build ();
1806 break;
1808 case TRUTH_NOT_EXPR:
1809 if (TREE_CODE (arg0) == INTEGER_CST)
1810 return constant_boolean_node (integer_zerop (arg0), type);
1811 break;
1813 case REALPART_EXPR:
1814 if (TREE_CODE (arg0) == COMPLEX_CST)
1815 return fold_convert (type, TREE_REALPART (arg0));
1816 break;
1818 case IMAGPART_EXPR:
1819 if (TREE_CODE (arg0) == COMPLEX_CST)
1820 return fold_convert (type, TREE_IMAGPART (arg0));
1821 break;
1823 case VEC_UNPACK_LO_EXPR:
1824 case VEC_UNPACK_HI_EXPR:
1825 case VEC_UNPACK_FLOAT_LO_EXPR:
1826 case VEC_UNPACK_FLOAT_HI_EXPR:
1827 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1828 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1830 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1831 enum tree_code subcode;
1833 if (TREE_CODE (arg0) != VECTOR_CST)
1834 return NULL_TREE;
1836 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1837 return NULL_TREE;
1838 out_nelts = in_nelts / 2;
1839 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1841 unsigned int offset = 0;
1842 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1843 || code == VEC_UNPACK_FLOAT_LO_EXPR
1844 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1845 offset = out_nelts;
1847 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1848 subcode = NOP_EXPR;
1849 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1850 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1851 subcode = FLOAT_EXPR;
1852 else
1853 subcode = FIX_TRUNC_EXPR;
1855 tree_vector_builder elts (type, out_nelts, 1);
1856 for (i = 0; i < out_nelts; i++)
1858 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1859 VECTOR_CST_ELT (arg0, i + offset));
1860 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1861 return NULL_TREE;
1862 elts.quick_push (elt);
1865 return elts.build ();
1868 case VEC_DUPLICATE_EXPR:
1869 if (CONSTANT_CLASS_P (arg0))
1870 return build_vector_from_val (type, arg0);
1871 return NULL_TREE;
1873 default:
1874 break;
1877 return NULL_TREE;
1880 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1881 indicates which particular sizetype to create. */
1883 tree
1884 size_int_kind (poly_int64 number, enum size_type_kind kind)
1886 return build_int_cst (sizetype_tab[(int) kind], number);
1889 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1890 is a tree code. The type of the result is taken from the operands.
1891 Both must be equivalent integer types, ala int_binop_types_match_p.
1892 If the operands are constant, so is the result. */
1894 tree
1895 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1897 tree type = TREE_TYPE (arg0);
1899 if (arg0 == error_mark_node || arg1 == error_mark_node)
1900 return error_mark_node;
1902 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1903 TREE_TYPE (arg1)));
1905 /* Handle the special case of two poly_int constants faster. */
1906 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1908 /* And some specific cases even faster than that. */
1909 if (code == PLUS_EXPR)
1911 if (integer_zerop (arg0)
1912 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1913 return arg1;
1914 if (integer_zerop (arg1)
1915 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1916 return arg0;
1918 else if (code == MINUS_EXPR)
1920 if (integer_zerop (arg1)
1921 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1922 return arg0;
1924 else if (code == MULT_EXPR)
1926 if (integer_onep (arg0)
1927 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1928 return arg1;
1931 /* Handle general case of two integer constants. For sizetype
1932 constant calculations we always want to know about overflow,
1933 even in the unsigned case. */
1934 tree res = int_const_binop (code, arg0, arg1, -1);
1935 if (res != NULL_TREE)
1936 return res;
1939 return fold_build2_loc (loc, code, type, arg0, arg1);
1942 /* Given two values, either both of sizetype or both of bitsizetype,
1943 compute the difference between the two values. Return the value
1944 in signed type corresponding to the type of the operands. */
1946 tree
1947 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1949 tree type = TREE_TYPE (arg0);
1950 tree ctype;
1952 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1953 TREE_TYPE (arg1)));
1955 /* If the type is already signed, just do the simple thing. */
1956 if (!TYPE_UNSIGNED (type))
1957 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1959 if (type == sizetype)
1960 ctype = ssizetype;
1961 else if (type == bitsizetype)
1962 ctype = sbitsizetype;
1963 else
1964 ctype = signed_type_for (type);
1966 /* If either operand is not a constant, do the conversions to the signed
1967 type and subtract. The hardware will do the right thing with any
1968 overflow in the subtraction. */
1969 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1970 return size_binop_loc (loc, MINUS_EXPR,
1971 fold_convert_loc (loc, ctype, arg0),
1972 fold_convert_loc (loc, ctype, arg1));
1974 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1975 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1976 overflow) and negate (which can't either). Special-case a result
1977 of zero while we're here. */
1978 if (tree_int_cst_equal (arg0, arg1))
1979 return build_int_cst (ctype, 0);
1980 else if (tree_int_cst_lt (arg1, arg0))
1981 return fold_convert_loc (loc, ctype,
1982 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1983 else
1984 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1985 fold_convert_loc (loc, ctype,
1986 size_binop_loc (loc,
1987 MINUS_EXPR,
1988 arg1, arg0)));
1991 /* A subroutine of fold_convert_const handling conversions of an
1992 INTEGER_CST to another integer type. */
1994 static tree
1995 fold_convert_const_int_from_int (tree type, const_tree arg1)
1997 /* Given an integer constant, make new constant with new type,
1998 appropriately sign-extended or truncated. Use widest_int
1999 so that any extension is done according ARG1's type. */
2000 return force_fit_type (type, wi::to_widest (arg1),
2001 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2002 TREE_OVERFLOW (arg1));
2005 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2006 to an integer type. */
2008 static tree
2009 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2011 bool overflow = false;
2012 tree t;
2014 /* The following code implements the floating point to integer
2015 conversion rules required by the Java Language Specification,
2016 that IEEE NaNs are mapped to zero and values that overflow
2017 the target precision saturate, i.e. values greater than
2018 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2019 are mapped to INT_MIN. These semantics are allowed by the
2020 C and C++ standards that simply state that the behavior of
2021 FP-to-integer conversion is unspecified upon overflow. */
2023 wide_int val;
2024 REAL_VALUE_TYPE r;
2025 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2027 switch (code)
2029 case FIX_TRUNC_EXPR:
2030 real_trunc (&r, VOIDmode, &x);
2031 break;
2033 default:
2034 gcc_unreachable ();
2037 /* If R is NaN, return zero and show we have an overflow. */
2038 if (REAL_VALUE_ISNAN (r))
2040 overflow = true;
2041 val = wi::zero (TYPE_PRECISION (type));
2044 /* See if R is less than the lower bound or greater than the
2045 upper bound. */
2047 if (! overflow)
2049 tree lt = TYPE_MIN_VALUE (type);
2050 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2051 if (real_less (&r, &l))
2053 overflow = true;
2054 val = wi::to_wide (lt);
2058 if (! overflow)
2060 tree ut = TYPE_MAX_VALUE (type);
2061 if (ut)
2063 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2064 if (real_less (&u, &r))
2066 overflow = true;
2067 val = wi::to_wide (ut);
2072 if (! overflow)
2073 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2075 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2076 return t;
2079 /* A subroutine of fold_convert_const handling conversions of a
2080 FIXED_CST to an integer type. */
2082 static tree
2083 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2085 tree t;
2086 double_int temp, temp_trunc;
2087 scalar_mode mode;
2089 /* Right shift FIXED_CST to temp by fbit. */
2090 temp = TREE_FIXED_CST (arg1).data;
2091 mode = TREE_FIXED_CST (arg1).mode;
2092 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2094 temp = temp.rshift (GET_MODE_FBIT (mode),
2095 HOST_BITS_PER_DOUBLE_INT,
2096 SIGNED_FIXED_POINT_MODE_P (mode));
2098 /* Left shift temp to temp_trunc by fbit. */
2099 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2100 HOST_BITS_PER_DOUBLE_INT,
2101 SIGNED_FIXED_POINT_MODE_P (mode));
2103 else
2105 temp = double_int_zero;
2106 temp_trunc = double_int_zero;
2109 /* If FIXED_CST is negative, we need to round the value toward 0.
2110 By checking if the fractional bits are not zero to add 1 to temp. */
2111 if (SIGNED_FIXED_POINT_MODE_P (mode)
2112 && temp_trunc.is_negative ()
2113 && TREE_FIXED_CST (arg1).data != temp_trunc)
2114 temp += double_int_one;
2116 /* Given a fixed-point constant, make new constant with new type,
2117 appropriately sign-extended or truncated. */
2118 t = force_fit_type (type, temp, -1,
2119 (temp.is_negative ()
2120 && (TYPE_UNSIGNED (type)
2121 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2122 | TREE_OVERFLOW (arg1));
2124 return t;
2127 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2128 to another floating point type. */
2130 static tree
2131 fold_convert_const_real_from_real (tree type, const_tree arg1)
2133 REAL_VALUE_TYPE value;
2134 tree t;
2136 /* Don't perform the operation if flag_signaling_nans is on
2137 and the operand is a signaling NaN. */
2138 if (HONOR_SNANS (arg1)
2139 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2140 return NULL_TREE;
2142 /* With flag_rounding_math we should respect the current rounding mode
2143 unless the conversion is exact. */
2144 if (HONOR_SIGN_DEPENDENT_ROUNDING (arg1)
2145 && !exact_real_truncate (TYPE_MODE (type), &TREE_REAL_CST (arg1)))
2146 return NULL_TREE;
2148 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2149 t = build_real (type, value);
2151 /* If converting an infinity or NAN to a representation that doesn't
2152 have one, set the overflow bit so that we can produce some kind of
2153 error message at the appropriate point if necessary. It's not the
2154 most user-friendly message, but it's better than nothing. */
2155 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2156 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2157 TREE_OVERFLOW (t) = 1;
2158 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2159 && !MODE_HAS_NANS (TYPE_MODE (type)))
2160 TREE_OVERFLOW (t) = 1;
2161 /* Regular overflow, conversion produced an infinity in a mode that
2162 can't represent them. */
2163 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2164 && REAL_VALUE_ISINF (value)
2165 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2166 TREE_OVERFLOW (t) = 1;
2167 else
2168 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2169 return t;
2172 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2173 to a floating point type. */
2175 static tree
2176 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2178 REAL_VALUE_TYPE value;
2179 tree t;
2181 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2182 &TREE_FIXED_CST (arg1));
2183 t = build_real (type, value);
2185 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2186 return t;
2189 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2190 to another fixed-point type. */
2192 static tree
2193 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2195 FIXED_VALUE_TYPE value;
2196 tree t;
2197 bool overflow_p;
2199 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2200 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2201 t = build_fixed (type, value);
2203 /* Propagate overflow flags. */
2204 if (overflow_p | TREE_OVERFLOW (arg1))
2205 TREE_OVERFLOW (t) = 1;
2206 return t;
2209 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2210 to a fixed-point type. */
2212 static tree
2213 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2215 FIXED_VALUE_TYPE value;
2216 tree t;
2217 bool overflow_p;
2218 double_int di;
2220 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2222 di.low = TREE_INT_CST_ELT (arg1, 0);
2223 if (TREE_INT_CST_NUNITS (arg1) == 1)
2224 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2225 else
2226 di.high = TREE_INT_CST_ELT (arg1, 1);
2228 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2229 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2230 TYPE_SATURATING (type));
2231 t = build_fixed (type, value);
2233 /* Propagate overflow flags. */
2234 if (overflow_p | TREE_OVERFLOW (arg1))
2235 TREE_OVERFLOW (t) = 1;
2236 return t;
2239 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2240 to a fixed-point type. */
2242 static tree
2243 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2245 FIXED_VALUE_TYPE value;
2246 tree t;
2247 bool overflow_p;
2249 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2250 &TREE_REAL_CST (arg1),
2251 TYPE_SATURATING (type));
2252 t = build_fixed (type, value);
2254 /* Propagate overflow flags. */
2255 if (overflow_p | TREE_OVERFLOW (arg1))
2256 TREE_OVERFLOW (t) = 1;
2257 return t;
2260 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2261 type TYPE. If no simplification can be done return NULL_TREE. */
2263 static tree
2264 fold_convert_const (enum tree_code code, tree type, tree arg1)
2266 tree arg_type = TREE_TYPE (arg1);
2267 if (arg_type == type)
2268 return arg1;
2270 /* We can't widen types, since the runtime value could overflow the
2271 original type before being extended to the new type. */
2272 if (POLY_INT_CST_P (arg1)
2273 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2274 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2275 return build_poly_int_cst (type,
2276 poly_wide_int::from (poly_int_cst_value (arg1),
2277 TYPE_PRECISION (type),
2278 TYPE_SIGN (arg_type)));
2280 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2281 || TREE_CODE (type) == OFFSET_TYPE)
2283 if (TREE_CODE (arg1) == INTEGER_CST)
2284 return fold_convert_const_int_from_int (type, arg1);
2285 else if (TREE_CODE (arg1) == REAL_CST)
2286 return fold_convert_const_int_from_real (code, type, arg1);
2287 else if (TREE_CODE (arg1) == FIXED_CST)
2288 return fold_convert_const_int_from_fixed (type, arg1);
2290 else if (TREE_CODE (type) == REAL_TYPE)
2292 if (TREE_CODE (arg1) == INTEGER_CST)
2294 tree res = build_real_from_int_cst (type, arg1);
2295 /* Avoid the folding if flag_rounding_math is on and the
2296 conversion is not exact. */
2297 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
2299 bool fail = false;
2300 wide_int w = real_to_integer (&TREE_REAL_CST (res), &fail,
2301 TYPE_PRECISION (TREE_TYPE (arg1)));
2302 if (fail || wi::ne_p (w, wi::to_wide (arg1)))
2303 return NULL_TREE;
2305 return res;
2307 else if (TREE_CODE (arg1) == REAL_CST)
2308 return fold_convert_const_real_from_real (type, arg1);
2309 else if (TREE_CODE (arg1) == FIXED_CST)
2310 return fold_convert_const_real_from_fixed (type, arg1);
2312 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2314 if (TREE_CODE (arg1) == FIXED_CST)
2315 return fold_convert_const_fixed_from_fixed (type, arg1);
2316 else if (TREE_CODE (arg1) == INTEGER_CST)
2317 return fold_convert_const_fixed_from_int (type, arg1);
2318 else if (TREE_CODE (arg1) == REAL_CST)
2319 return fold_convert_const_fixed_from_real (type, arg1);
2321 else if (TREE_CODE (type) == VECTOR_TYPE)
2323 if (TREE_CODE (arg1) == VECTOR_CST
2324 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2326 tree elttype = TREE_TYPE (type);
2327 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2328 /* We can't handle steps directly when extending, since the
2329 values need to wrap at the original precision first. */
2330 bool step_ok_p
2331 = (INTEGRAL_TYPE_P (elttype)
2332 && INTEGRAL_TYPE_P (arg1_elttype)
2333 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2334 tree_vector_builder v;
2335 if (!v.new_unary_operation (type, arg1, step_ok_p))
2336 return NULL_TREE;
2337 unsigned int len = v.encoded_nelts ();
2338 for (unsigned int i = 0; i < len; ++i)
2340 tree elt = VECTOR_CST_ELT (arg1, i);
2341 tree cvt = fold_convert_const (code, elttype, elt);
2342 if (cvt == NULL_TREE)
2343 return NULL_TREE;
2344 v.quick_push (cvt);
2346 return v.build ();
2349 return NULL_TREE;
2352 /* Construct a vector of zero elements of vector type TYPE. */
2354 static tree
2355 build_zero_vector (tree type)
2357 tree t;
2359 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2360 return build_vector_from_val (type, t);
2363 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2365 bool
2366 fold_convertible_p (const_tree type, const_tree arg)
2368 tree orig = TREE_TYPE (arg);
2370 if (type == orig)
2371 return true;
2373 if (TREE_CODE (arg) == ERROR_MARK
2374 || TREE_CODE (type) == ERROR_MARK
2375 || TREE_CODE (orig) == ERROR_MARK)
2376 return false;
2378 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2379 return true;
2381 switch (TREE_CODE (type))
2383 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2384 case POINTER_TYPE: case REFERENCE_TYPE:
2385 case OFFSET_TYPE:
2386 return (INTEGRAL_TYPE_P (orig)
2387 || (POINTER_TYPE_P (orig)
2388 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2389 || TREE_CODE (orig) == OFFSET_TYPE);
2391 case REAL_TYPE:
2392 case FIXED_POINT_TYPE:
2393 case VOID_TYPE:
2394 return TREE_CODE (type) == TREE_CODE (orig);
2396 case VECTOR_TYPE:
2397 return (VECTOR_TYPE_P (orig)
2398 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2399 TYPE_VECTOR_SUBPARTS (orig))
2400 && fold_convertible_p (TREE_TYPE (type), TREE_TYPE (orig)));
2402 default:
2403 return false;
2407 /* Convert expression ARG to type TYPE. Used by the middle-end for
2408 simple conversions in preference to calling the front-end's convert. */
2410 tree
2411 fold_convert_loc (location_t loc, tree type, tree arg)
2413 tree orig = TREE_TYPE (arg);
2414 tree tem;
2416 if (type == orig)
2417 return arg;
2419 if (TREE_CODE (arg) == ERROR_MARK
2420 || TREE_CODE (type) == ERROR_MARK
2421 || TREE_CODE (orig) == ERROR_MARK)
2422 return error_mark_node;
2424 switch (TREE_CODE (type))
2426 case POINTER_TYPE:
2427 case REFERENCE_TYPE:
2428 /* Handle conversions between pointers to different address spaces. */
2429 if (POINTER_TYPE_P (orig)
2430 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2431 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2432 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2433 /* fall through */
2435 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2436 case OFFSET_TYPE:
2437 if (TREE_CODE (arg) == INTEGER_CST)
2439 tem = fold_convert_const (NOP_EXPR, type, arg);
2440 if (tem != NULL_TREE)
2441 return tem;
2443 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2444 || TREE_CODE (orig) == OFFSET_TYPE)
2445 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2446 if (TREE_CODE (orig) == COMPLEX_TYPE)
2447 return fold_convert_loc (loc, type,
2448 fold_build1_loc (loc, REALPART_EXPR,
2449 TREE_TYPE (orig), arg));
2450 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2451 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2452 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2454 case REAL_TYPE:
2455 if (TREE_CODE (arg) == INTEGER_CST)
2457 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2458 if (tem != NULL_TREE)
2459 return tem;
2461 else if (TREE_CODE (arg) == REAL_CST)
2463 tem = fold_convert_const (NOP_EXPR, type, arg);
2464 if (tem != NULL_TREE)
2465 return tem;
2467 else if (TREE_CODE (arg) == FIXED_CST)
2469 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2470 if (tem != NULL_TREE)
2471 return tem;
2474 switch (TREE_CODE (orig))
2476 case INTEGER_TYPE:
2477 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2478 case POINTER_TYPE: case REFERENCE_TYPE:
2479 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2481 case REAL_TYPE:
2482 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2484 case FIXED_POINT_TYPE:
2485 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2487 case COMPLEX_TYPE:
2488 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2489 return fold_convert_loc (loc, type, tem);
2491 default:
2492 gcc_unreachable ();
2495 case FIXED_POINT_TYPE:
2496 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2497 || TREE_CODE (arg) == REAL_CST)
2499 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2500 if (tem != NULL_TREE)
2501 goto fold_convert_exit;
2504 switch (TREE_CODE (orig))
2506 case FIXED_POINT_TYPE:
2507 case INTEGER_TYPE:
2508 case ENUMERAL_TYPE:
2509 case BOOLEAN_TYPE:
2510 case REAL_TYPE:
2511 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2513 case COMPLEX_TYPE:
2514 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2515 return fold_convert_loc (loc, type, tem);
2517 default:
2518 gcc_unreachable ();
2521 case COMPLEX_TYPE:
2522 switch (TREE_CODE (orig))
2524 case INTEGER_TYPE:
2525 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2526 case POINTER_TYPE: case REFERENCE_TYPE:
2527 case REAL_TYPE:
2528 case FIXED_POINT_TYPE:
2529 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2530 fold_convert_loc (loc, TREE_TYPE (type), arg),
2531 fold_convert_loc (loc, TREE_TYPE (type),
2532 integer_zero_node));
2533 case COMPLEX_TYPE:
2535 tree rpart, ipart;
2537 if (TREE_CODE (arg) == COMPLEX_EXPR)
2539 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2540 TREE_OPERAND (arg, 0));
2541 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2542 TREE_OPERAND (arg, 1));
2543 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2546 arg = save_expr (arg);
2547 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2548 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2549 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2550 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2551 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2554 default:
2555 gcc_unreachable ();
2558 case VECTOR_TYPE:
2559 if (integer_zerop (arg))
2560 return build_zero_vector (type);
2561 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2562 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2563 || TREE_CODE (orig) == VECTOR_TYPE);
2564 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2566 case VOID_TYPE:
2567 tem = fold_ignored_result (arg);
2568 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2570 default:
2571 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2572 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2573 gcc_unreachable ();
2575 fold_convert_exit:
2576 protected_set_expr_location_unshare (tem, loc);
2577 return tem;
2580 /* Return false if expr can be assumed not to be an lvalue, true
2581 otherwise. */
2583 static bool
2584 maybe_lvalue_p (const_tree x)
2586 /* We only need to wrap lvalue tree codes. */
2587 switch (TREE_CODE (x))
2589 case VAR_DECL:
2590 case PARM_DECL:
2591 case RESULT_DECL:
2592 case LABEL_DECL:
2593 case FUNCTION_DECL:
2594 case SSA_NAME:
2596 case COMPONENT_REF:
2597 case MEM_REF:
2598 case INDIRECT_REF:
2599 case ARRAY_REF:
2600 case ARRAY_RANGE_REF:
2601 case BIT_FIELD_REF:
2602 case OBJ_TYPE_REF:
2604 case REALPART_EXPR:
2605 case IMAGPART_EXPR:
2606 case PREINCREMENT_EXPR:
2607 case PREDECREMENT_EXPR:
2608 case SAVE_EXPR:
2609 case TRY_CATCH_EXPR:
2610 case WITH_CLEANUP_EXPR:
2611 case COMPOUND_EXPR:
2612 case MODIFY_EXPR:
2613 case TARGET_EXPR:
2614 case COND_EXPR:
2615 case BIND_EXPR:
2616 case VIEW_CONVERT_EXPR:
2617 break;
2619 default:
2620 /* Assume the worst for front-end tree codes. */
2621 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2622 break;
2623 return false;
2626 return true;
2629 /* Return an expr equal to X but certainly not valid as an lvalue. */
2631 tree
2632 non_lvalue_loc (location_t loc, tree x)
2634 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2635 us. */
2636 if (in_gimple_form)
2637 return x;
2639 if (! maybe_lvalue_p (x))
2640 return x;
2641 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2644 /* Given a tree comparison code, return the code that is the logical inverse.
2645 It is generally not safe to do this for floating-point comparisons, except
2646 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2647 ERROR_MARK in this case. */
2649 enum tree_code
2650 invert_tree_comparison (enum tree_code code, bool honor_nans)
2652 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2653 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2654 return ERROR_MARK;
2656 switch (code)
2658 case EQ_EXPR:
2659 return NE_EXPR;
2660 case NE_EXPR:
2661 return EQ_EXPR;
2662 case GT_EXPR:
2663 return honor_nans ? UNLE_EXPR : LE_EXPR;
2664 case GE_EXPR:
2665 return honor_nans ? UNLT_EXPR : LT_EXPR;
2666 case LT_EXPR:
2667 return honor_nans ? UNGE_EXPR : GE_EXPR;
2668 case LE_EXPR:
2669 return honor_nans ? UNGT_EXPR : GT_EXPR;
2670 case LTGT_EXPR:
2671 return UNEQ_EXPR;
2672 case UNEQ_EXPR:
2673 return LTGT_EXPR;
2674 case UNGT_EXPR:
2675 return LE_EXPR;
2676 case UNGE_EXPR:
2677 return LT_EXPR;
2678 case UNLT_EXPR:
2679 return GE_EXPR;
2680 case UNLE_EXPR:
2681 return GT_EXPR;
2682 case ORDERED_EXPR:
2683 return UNORDERED_EXPR;
2684 case UNORDERED_EXPR:
2685 return ORDERED_EXPR;
2686 default:
2687 gcc_unreachable ();
2691 /* Similar, but return the comparison that results if the operands are
2692 swapped. This is safe for floating-point. */
2694 enum tree_code
2695 swap_tree_comparison (enum tree_code code)
2697 switch (code)
2699 case EQ_EXPR:
2700 case NE_EXPR:
2701 case ORDERED_EXPR:
2702 case UNORDERED_EXPR:
2703 case LTGT_EXPR:
2704 case UNEQ_EXPR:
2705 return code;
2706 case GT_EXPR:
2707 return LT_EXPR;
2708 case GE_EXPR:
2709 return LE_EXPR;
2710 case LT_EXPR:
2711 return GT_EXPR;
2712 case LE_EXPR:
2713 return GE_EXPR;
2714 case UNGT_EXPR:
2715 return UNLT_EXPR;
2716 case UNGE_EXPR:
2717 return UNLE_EXPR;
2718 case UNLT_EXPR:
2719 return UNGT_EXPR;
2720 case UNLE_EXPR:
2721 return UNGE_EXPR;
2722 default:
2723 gcc_unreachable ();
2728 /* Convert a comparison tree code from an enum tree_code representation
2729 into a compcode bit-based encoding. This function is the inverse of
2730 compcode_to_comparison. */
2732 static enum comparison_code
2733 comparison_to_compcode (enum tree_code code)
2735 switch (code)
2737 case LT_EXPR:
2738 return COMPCODE_LT;
2739 case EQ_EXPR:
2740 return COMPCODE_EQ;
2741 case LE_EXPR:
2742 return COMPCODE_LE;
2743 case GT_EXPR:
2744 return COMPCODE_GT;
2745 case NE_EXPR:
2746 return COMPCODE_NE;
2747 case GE_EXPR:
2748 return COMPCODE_GE;
2749 case ORDERED_EXPR:
2750 return COMPCODE_ORD;
2751 case UNORDERED_EXPR:
2752 return COMPCODE_UNORD;
2753 case UNLT_EXPR:
2754 return COMPCODE_UNLT;
2755 case UNEQ_EXPR:
2756 return COMPCODE_UNEQ;
2757 case UNLE_EXPR:
2758 return COMPCODE_UNLE;
2759 case UNGT_EXPR:
2760 return COMPCODE_UNGT;
2761 case LTGT_EXPR:
2762 return COMPCODE_LTGT;
2763 case UNGE_EXPR:
2764 return COMPCODE_UNGE;
2765 default:
2766 gcc_unreachable ();
2770 /* Convert a compcode bit-based encoding of a comparison operator back
2771 to GCC's enum tree_code representation. This function is the
2772 inverse of comparison_to_compcode. */
2774 static enum tree_code
2775 compcode_to_comparison (enum comparison_code code)
2777 switch (code)
2779 case COMPCODE_LT:
2780 return LT_EXPR;
2781 case COMPCODE_EQ:
2782 return EQ_EXPR;
2783 case COMPCODE_LE:
2784 return LE_EXPR;
2785 case COMPCODE_GT:
2786 return GT_EXPR;
2787 case COMPCODE_NE:
2788 return NE_EXPR;
2789 case COMPCODE_GE:
2790 return GE_EXPR;
2791 case COMPCODE_ORD:
2792 return ORDERED_EXPR;
2793 case COMPCODE_UNORD:
2794 return UNORDERED_EXPR;
2795 case COMPCODE_UNLT:
2796 return UNLT_EXPR;
2797 case COMPCODE_UNEQ:
2798 return UNEQ_EXPR;
2799 case COMPCODE_UNLE:
2800 return UNLE_EXPR;
2801 case COMPCODE_UNGT:
2802 return UNGT_EXPR;
2803 case COMPCODE_LTGT:
2804 return LTGT_EXPR;
2805 case COMPCODE_UNGE:
2806 return UNGE_EXPR;
2807 default:
2808 gcc_unreachable ();
2812 /* Return true if COND1 tests the opposite condition of COND2. */
2814 bool
2815 inverse_conditions_p (const_tree cond1, const_tree cond2)
2817 return (COMPARISON_CLASS_P (cond1)
2818 && COMPARISON_CLASS_P (cond2)
2819 && (invert_tree_comparison
2820 (TREE_CODE (cond1),
2821 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2822 && operand_equal_p (TREE_OPERAND (cond1, 0),
2823 TREE_OPERAND (cond2, 0), 0)
2824 && operand_equal_p (TREE_OPERAND (cond1, 1),
2825 TREE_OPERAND (cond2, 1), 0));
2828 /* Return a tree for the comparison which is the combination of
2829 doing the AND or OR (depending on CODE) of the two operations LCODE
2830 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2831 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2832 if this makes the transformation invalid. */
2834 tree
2835 combine_comparisons (location_t loc,
2836 enum tree_code code, enum tree_code lcode,
2837 enum tree_code rcode, tree truth_type,
2838 tree ll_arg, tree lr_arg)
2840 bool honor_nans = HONOR_NANS (ll_arg);
2841 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2842 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2843 int compcode;
2845 switch (code)
2847 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2848 compcode = lcompcode & rcompcode;
2849 break;
2851 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2852 compcode = lcompcode | rcompcode;
2853 break;
2855 default:
2856 return NULL_TREE;
2859 if (!honor_nans)
2861 /* Eliminate unordered comparisons, as well as LTGT and ORD
2862 which are not used unless the mode has NaNs. */
2863 compcode &= ~COMPCODE_UNORD;
2864 if (compcode == COMPCODE_LTGT)
2865 compcode = COMPCODE_NE;
2866 else if (compcode == COMPCODE_ORD)
2867 compcode = COMPCODE_TRUE;
2869 else if (flag_trapping_math)
2871 /* Check that the original operation and the optimized ones will trap
2872 under the same condition. */
2873 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2874 && (lcompcode != COMPCODE_EQ)
2875 && (lcompcode != COMPCODE_ORD);
2876 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2877 && (rcompcode != COMPCODE_EQ)
2878 && (rcompcode != COMPCODE_ORD);
2879 bool trap = (compcode & COMPCODE_UNORD) == 0
2880 && (compcode != COMPCODE_EQ)
2881 && (compcode != COMPCODE_ORD);
2883 /* In a short-circuited boolean expression the LHS might be
2884 such that the RHS, if evaluated, will never trap. For
2885 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2886 if neither x nor y is NaN. (This is a mixed blessing: for
2887 example, the expression above will never trap, hence
2888 optimizing it to x < y would be invalid). */
2889 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2890 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2891 rtrap = false;
2893 /* If the comparison was short-circuited, and only the RHS
2894 trapped, we may now generate a spurious trap. */
2895 if (rtrap && !ltrap
2896 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2897 return NULL_TREE;
2899 /* If we changed the conditions that cause a trap, we lose. */
2900 if ((ltrap || rtrap) != trap)
2901 return NULL_TREE;
2904 if (compcode == COMPCODE_TRUE)
2905 return constant_boolean_node (true, truth_type);
2906 else if (compcode == COMPCODE_FALSE)
2907 return constant_boolean_node (false, truth_type);
2908 else
2910 enum tree_code tcode;
2912 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2913 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2917 /* Return nonzero if two operands (typically of the same tree node)
2918 are necessarily equal. FLAGS modifies behavior as follows:
2920 If OEP_ONLY_CONST is set, only return nonzero for constants.
2921 This function tests whether the operands are indistinguishable;
2922 it does not test whether they are equal using C's == operation.
2923 The distinction is important for IEEE floating point, because
2924 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2925 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2927 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2928 even though it may hold multiple values during a function.
2929 This is because a GCC tree node guarantees that nothing else is
2930 executed between the evaluation of its "operands" (which may often
2931 be evaluated in arbitrary order). Hence if the operands themselves
2932 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2933 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2934 unset means assuming isochronic (or instantaneous) tree equivalence.
2935 Unless comparing arbitrary expression trees, such as from different
2936 statements, this flag can usually be left unset.
2938 If OEP_PURE_SAME is set, then pure functions with identical arguments
2939 are considered the same. It is used when the caller has other ways
2940 to ensure that global memory is unchanged in between.
2942 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2943 not values of expressions.
2945 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2946 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2948 If OEP_BITWISE is set, then require the values to be bitwise identical
2949 rather than simply numerically equal. Do not take advantage of things
2950 like math-related flags or undefined behavior; only return true for
2951 values that are provably bitwise identical in all circumstances.
2953 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2954 any operand with side effect. This is unnecesarily conservative in the
2955 case we know that arg0 and arg1 are in disjoint code paths (such as in
2956 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2957 addresses with TREE_CONSTANT flag set so we know that &var == &var
2958 even if var is volatile. */
2960 bool
2961 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
2962 unsigned int flags)
2964 bool r;
2965 if (verify_hash_value (arg0, arg1, flags, &r))
2966 return r;
2968 STRIP_ANY_LOCATION_WRAPPER (arg0);
2969 STRIP_ANY_LOCATION_WRAPPER (arg1);
2971 /* If either is ERROR_MARK, they aren't equal. */
2972 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2973 || TREE_TYPE (arg0) == error_mark_node
2974 || TREE_TYPE (arg1) == error_mark_node)
2975 return false;
2977 /* Similar, if either does not have a type (like a template id),
2978 they aren't equal. */
2979 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2980 return false;
2982 /* Bitwise identity makes no sense if the values have different layouts. */
2983 if ((flags & OEP_BITWISE)
2984 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
2985 return false;
2987 /* We cannot consider pointers to different address space equal. */
2988 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2989 && POINTER_TYPE_P (TREE_TYPE (arg1))
2990 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2991 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2992 return false;
2994 /* Check equality of integer constants before bailing out due to
2995 precision differences. */
2996 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2998 /* Address of INTEGER_CST is not defined; check that we did not forget
2999 to drop the OEP_ADDRESS_OF flags. */
3000 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3001 return tree_int_cst_equal (arg0, arg1);
3004 if (!(flags & OEP_ADDRESS_OF))
3006 /* If both types don't have the same signedness, then we can't consider
3007 them equal. We must check this before the STRIP_NOPS calls
3008 because they may change the signedness of the arguments. As pointers
3009 strictly don't have a signedness, require either two pointers or
3010 two non-pointers as well. */
3011 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3012 || POINTER_TYPE_P (TREE_TYPE (arg0))
3013 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3014 return false;
3016 /* If both types don't have the same precision, then it is not safe
3017 to strip NOPs. */
3018 if (element_precision (TREE_TYPE (arg0))
3019 != element_precision (TREE_TYPE (arg1)))
3020 return false;
3022 STRIP_NOPS (arg0);
3023 STRIP_NOPS (arg1);
3025 #if 0
3026 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3027 sanity check once the issue is solved. */
3028 else
3029 /* Addresses of conversions and SSA_NAMEs (and many other things)
3030 are not defined. Check that we did not forget to drop the
3031 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3032 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3033 && TREE_CODE (arg0) != SSA_NAME);
3034 #endif
3036 /* In case both args are comparisons but with different comparison
3037 code, try to swap the comparison operands of one arg to produce
3038 a match and compare that variant. */
3039 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3040 && COMPARISON_CLASS_P (arg0)
3041 && COMPARISON_CLASS_P (arg1))
3043 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3045 if (TREE_CODE (arg0) == swap_code)
3046 return operand_equal_p (TREE_OPERAND (arg0, 0),
3047 TREE_OPERAND (arg1, 1), flags)
3048 && operand_equal_p (TREE_OPERAND (arg0, 1),
3049 TREE_OPERAND (arg1, 0), flags);
3052 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3054 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3055 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3057 else if (flags & OEP_ADDRESS_OF)
3059 /* If we are interested in comparing addresses ignore
3060 MEM_REF wrappings of the base that can appear just for
3061 TBAA reasons. */
3062 if (TREE_CODE (arg0) == MEM_REF
3063 && DECL_P (arg1)
3064 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3065 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3066 && integer_zerop (TREE_OPERAND (arg0, 1)))
3067 return true;
3068 else if (TREE_CODE (arg1) == MEM_REF
3069 && DECL_P (arg0)
3070 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3071 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3072 && integer_zerop (TREE_OPERAND (arg1, 1)))
3073 return true;
3074 return false;
3076 else
3077 return false;
3080 /* When not checking adddresses, this is needed for conversions and for
3081 COMPONENT_REF. Might as well play it safe and always test this. */
3082 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3083 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3084 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3085 && !(flags & OEP_ADDRESS_OF)))
3086 return false;
3088 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3089 We don't care about side effects in that case because the SAVE_EXPR
3090 takes care of that for us. In all other cases, two expressions are
3091 equal if they have no side effects. If we have two identical
3092 expressions with side effects that should be treated the same due
3093 to the only side effects being identical SAVE_EXPR's, that will
3094 be detected in the recursive calls below.
3095 If we are taking an invariant address of two identical objects
3096 they are necessarily equal as well. */
3097 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3098 && (TREE_CODE (arg0) == SAVE_EXPR
3099 || (flags & OEP_MATCH_SIDE_EFFECTS)
3100 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3101 return true;
3103 /* Next handle constant cases, those for which we can return 1 even
3104 if ONLY_CONST is set. */
3105 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3106 switch (TREE_CODE (arg0))
3108 case INTEGER_CST:
3109 return tree_int_cst_equal (arg0, arg1);
3111 case FIXED_CST:
3112 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3113 TREE_FIXED_CST (arg1));
3115 case REAL_CST:
3116 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3117 return true;
3119 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3121 /* If we do not distinguish between signed and unsigned zero,
3122 consider them equal. */
3123 if (real_zerop (arg0) && real_zerop (arg1))
3124 return true;
3126 return false;
3128 case VECTOR_CST:
3130 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3131 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3132 return false;
3134 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3135 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3136 return false;
3138 unsigned int count = vector_cst_encoded_nelts (arg0);
3139 for (unsigned int i = 0; i < count; ++i)
3140 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3141 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3142 return false;
3143 return true;
3146 case COMPLEX_CST:
3147 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3148 flags)
3149 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3150 flags));
3152 case STRING_CST:
3153 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3154 && ! memcmp (TREE_STRING_POINTER (arg0),
3155 TREE_STRING_POINTER (arg1),
3156 TREE_STRING_LENGTH (arg0)));
3158 case ADDR_EXPR:
3159 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3160 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3161 flags | OEP_ADDRESS_OF
3162 | OEP_MATCH_SIDE_EFFECTS);
3163 case CONSTRUCTOR:
3164 /* In GIMPLE empty constructors are allowed in initializers of
3165 aggregates. */
3166 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3167 default:
3168 break;
3171 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3172 two instances of undefined behavior will give identical results. */
3173 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3174 return false;
3176 /* Define macros to test an operand from arg0 and arg1 for equality and a
3177 variant that allows null and views null as being different from any
3178 non-null value. In the latter case, if either is null, the both
3179 must be; otherwise, do the normal comparison. */
3180 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3181 TREE_OPERAND (arg1, N), flags)
3183 #define OP_SAME_WITH_NULL(N) \
3184 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3185 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3187 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3189 case tcc_unary:
3190 /* Two conversions are equal only if signedness and modes match. */
3191 switch (TREE_CODE (arg0))
3193 CASE_CONVERT:
3194 case FIX_TRUNC_EXPR:
3195 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3196 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3197 return false;
3198 break;
3199 default:
3200 break;
3203 return OP_SAME (0);
3206 case tcc_comparison:
3207 case tcc_binary:
3208 if (OP_SAME (0) && OP_SAME (1))
3209 return true;
3211 /* For commutative ops, allow the other order. */
3212 return (commutative_tree_code (TREE_CODE (arg0))
3213 && operand_equal_p (TREE_OPERAND (arg0, 0),
3214 TREE_OPERAND (arg1, 1), flags)
3215 && operand_equal_p (TREE_OPERAND (arg0, 1),
3216 TREE_OPERAND (arg1, 0), flags));
3218 case tcc_reference:
3219 /* If either of the pointer (or reference) expressions we are
3220 dereferencing contain a side effect, these cannot be equal,
3221 but their addresses can be. */
3222 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3223 && (TREE_SIDE_EFFECTS (arg0)
3224 || TREE_SIDE_EFFECTS (arg1)))
3225 return false;
3227 switch (TREE_CODE (arg0))
3229 case INDIRECT_REF:
3230 if (!(flags & OEP_ADDRESS_OF))
3232 if (TYPE_ALIGN (TREE_TYPE (arg0))
3233 != TYPE_ALIGN (TREE_TYPE (arg1)))
3234 return false;
3235 /* Verify that the access types are compatible. */
3236 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3237 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3238 return false;
3240 flags &= ~OEP_ADDRESS_OF;
3241 return OP_SAME (0);
3243 case IMAGPART_EXPR:
3244 /* Require the same offset. */
3245 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3246 TYPE_SIZE (TREE_TYPE (arg1)),
3247 flags & ~OEP_ADDRESS_OF))
3248 return false;
3250 /* Fallthru. */
3251 case REALPART_EXPR:
3252 case VIEW_CONVERT_EXPR:
3253 return OP_SAME (0);
3255 case TARGET_MEM_REF:
3256 case MEM_REF:
3257 if (!(flags & OEP_ADDRESS_OF))
3259 /* Require equal access sizes */
3260 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3261 && (!TYPE_SIZE (TREE_TYPE (arg0))
3262 || !TYPE_SIZE (TREE_TYPE (arg1))
3263 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3264 TYPE_SIZE (TREE_TYPE (arg1)),
3265 flags)))
3266 return false;
3267 /* Verify that access happens in similar types. */
3268 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3269 return false;
3270 /* Verify that accesses are TBAA compatible. */
3271 if (!alias_ptr_types_compatible_p
3272 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3273 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3274 || (MR_DEPENDENCE_CLIQUE (arg0)
3275 != MR_DEPENDENCE_CLIQUE (arg1))
3276 || (MR_DEPENDENCE_BASE (arg0)
3277 != MR_DEPENDENCE_BASE (arg1)))
3278 return false;
3279 /* Verify that alignment is compatible. */
3280 if (TYPE_ALIGN (TREE_TYPE (arg0))
3281 != TYPE_ALIGN (TREE_TYPE (arg1)))
3282 return false;
3284 flags &= ~OEP_ADDRESS_OF;
3285 return (OP_SAME (0) && OP_SAME (1)
3286 /* TARGET_MEM_REF require equal extra operands. */
3287 && (TREE_CODE (arg0) != TARGET_MEM_REF
3288 || (OP_SAME_WITH_NULL (2)
3289 && OP_SAME_WITH_NULL (3)
3290 && OP_SAME_WITH_NULL (4))));
3292 case ARRAY_REF:
3293 case ARRAY_RANGE_REF:
3294 if (!OP_SAME (0))
3295 return false;
3296 flags &= ~OEP_ADDRESS_OF;
3297 /* Compare the array index by value if it is constant first as we
3298 may have different types but same value here. */
3299 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3300 TREE_OPERAND (arg1, 1))
3301 || OP_SAME (1))
3302 && OP_SAME_WITH_NULL (2)
3303 && OP_SAME_WITH_NULL (3)
3304 /* Compare low bound and element size as with OEP_ADDRESS_OF
3305 we have to account for the offset of the ref. */
3306 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3307 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3308 || (operand_equal_p (array_ref_low_bound
3309 (CONST_CAST_TREE (arg0)),
3310 array_ref_low_bound
3311 (CONST_CAST_TREE (arg1)), flags)
3312 && operand_equal_p (array_ref_element_size
3313 (CONST_CAST_TREE (arg0)),
3314 array_ref_element_size
3315 (CONST_CAST_TREE (arg1)),
3316 flags))));
3318 case COMPONENT_REF:
3319 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3320 may be NULL when we're called to compare MEM_EXPRs. */
3321 if (!OP_SAME_WITH_NULL (0))
3322 return false;
3324 bool compare_address = flags & OEP_ADDRESS_OF;
3326 /* Most of time we only need to compare FIELD_DECLs for equality.
3327 However when determining address look into actual offsets.
3328 These may match for unions and unshared record types. */
3329 flags &= ~OEP_ADDRESS_OF;
3330 if (!OP_SAME (1))
3332 if (compare_address
3333 && (flags & OEP_ADDRESS_OF_SAME_FIELD) == 0)
3335 if (TREE_OPERAND (arg0, 2)
3336 || TREE_OPERAND (arg1, 2))
3337 return OP_SAME_WITH_NULL (2);
3338 tree field0 = TREE_OPERAND (arg0, 1);
3339 tree field1 = TREE_OPERAND (arg1, 1);
3341 if (!operand_equal_p (DECL_FIELD_OFFSET (field0),
3342 DECL_FIELD_OFFSET (field1), flags)
3343 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3344 DECL_FIELD_BIT_OFFSET (field1),
3345 flags))
3346 return false;
3348 else
3349 return false;
3352 return OP_SAME_WITH_NULL (2);
3354 case BIT_FIELD_REF:
3355 if (!OP_SAME (0))
3356 return false;
3357 flags &= ~OEP_ADDRESS_OF;
3358 return OP_SAME (1) && OP_SAME (2);
3360 default:
3361 return false;
3364 case tcc_expression:
3365 switch (TREE_CODE (arg0))
3367 case ADDR_EXPR:
3368 /* Be sure we pass right ADDRESS_OF flag. */
3369 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3370 return operand_equal_p (TREE_OPERAND (arg0, 0),
3371 TREE_OPERAND (arg1, 0),
3372 flags | OEP_ADDRESS_OF);
3374 case TRUTH_NOT_EXPR:
3375 return OP_SAME (0);
3377 case TRUTH_ANDIF_EXPR:
3378 case TRUTH_ORIF_EXPR:
3379 return OP_SAME (0) && OP_SAME (1);
3381 case WIDEN_MULT_PLUS_EXPR:
3382 case WIDEN_MULT_MINUS_EXPR:
3383 if (!OP_SAME (2))
3384 return false;
3385 /* The multiplcation operands are commutative. */
3386 /* FALLTHRU */
3388 case TRUTH_AND_EXPR:
3389 case TRUTH_OR_EXPR:
3390 case TRUTH_XOR_EXPR:
3391 if (OP_SAME (0) && OP_SAME (1))
3392 return true;
3394 /* Otherwise take into account this is a commutative operation. */
3395 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3396 TREE_OPERAND (arg1, 1), flags)
3397 && operand_equal_p (TREE_OPERAND (arg0, 1),
3398 TREE_OPERAND (arg1, 0), flags));
3400 case COND_EXPR:
3401 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3402 return false;
3403 flags &= ~OEP_ADDRESS_OF;
3404 return OP_SAME (0);
3406 case BIT_INSERT_EXPR:
3407 /* BIT_INSERT_EXPR has an implict operand as the type precision
3408 of op1. Need to check to make sure they are the same. */
3409 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3410 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3411 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3412 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3413 return false;
3414 /* FALLTHRU */
3416 case VEC_COND_EXPR:
3417 case DOT_PROD_EXPR:
3418 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3420 case MODIFY_EXPR:
3421 case INIT_EXPR:
3422 case COMPOUND_EXPR:
3423 case PREDECREMENT_EXPR:
3424 case PREINCREMENT_EXPR:
3425 case POSTDECREMENT_EXPR:
3426 case POSTINCREMENT_EXPR:
3427 if (flags & OEP_LEXICOGRAPHIC)
3428 return OP_SAME (0) && OP_SAME (1);
3429 return false;
3431 case CLEANUP_POINT_EXPR:
3432 case EXPR_STMT:
3433 case SAVE_EXPR:
3434 if (flags & OEP_LEXICOGRAPHIC)
3435 return OP_SAME (0);
3436 return false;
3438 case OBJ_TYPE_REF:
3439 /* Virtual table reference. */
3440 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3441 OBJ_TYPE_REF_EXPR (arg1), flags))
3442 return false;
3443 flags &= ~OEP_ADDRESS_OF;
3444 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3445 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3446 return false;
3447 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3448 OBJ_TYPE_REF_OBJECT (arg1), flags))
3449 return false;
3450 if (virtual_method_call_p (arg0))
3452 if (!virtual_method_call_p (arg1))
3453 return false;
3454 return types_same_for_odr (obj_type_ref_class (arg0),
3455 obj_type_ref_class (arg1));
3457 return false;
3459 default:
3460 return false;
3463 case tcc_vl_exp:
3464 switch (TREE_CODE (arg0))
3466 case CALL_EXPR:
3467 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3468 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3469 /* If not both CALL_EXPRs are either internal or normal function
3470 functions, then they are not equal. */
3471 return false;
3472 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3474 /* If the CALL_EXPRs call different internal functions, then they
3475 are not equal. */
3476 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3477 return false;
3479 else
3481 /* If the CALL_EXPRs call different functions, then they are not
3482 equal. */
3483 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3484 flags))
3485 return false;
3488 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3490 unsigned int cef = call_expr_flags (arg0);
3491 if (flags & OEP_PURE_SAME)
3492 cef &= ECF_CONST | ECF_PURE;
3493 else
3494 cef &= ECF_CONST;
3495 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3496 return false;
3499 /* Now see if all the arguments are the same. */
3501 const_call_expr_arg_iterator iter0, iter1;
3502 const_tree a0, a1;
3503 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3504 a1 = first_const_call_expr_arg (arg1, &iter1);
3505 a0 && a1;
3506 a0 = next_const_call_expr_arg (&iter0),
3507 a1 = next_const_call_expr_arg (&iter1))
3508 if (! operand_equal_p (a0, a1, flags))
3509 return false;
3511 /* If we get here and both argument lists are exhausted
3512 then the CALL_EXPRs are equal. */
3513 return ! (a0 || a1);
3515 default:
3516 return false;
3519 case tcc_declaration:
3520 /* Consider __builtin_sqrt equal to sqrt. */
3521 if (TREE_CODE (arg0) == FUNCTION_DECL)
3522 return (fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3523 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3524 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3525 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3527 if (DECL_P (arg0)
3528 && (flags & OEP_DECL_NAME)
3529 && (flags & OEP_LEXICOGRAPHIC))
3531 /* Consider decls with the same name equal. The caller needs
3532 to make sure they refer to the same entity (such as a function
3533 formal parameter). */
3534 tree a0name = DECL_NAME (arg0);
3535 tree a1name = DECL_NAME (arg1);
3536 const char *a0ns = a0name ? IDENTIFIER_POINTER (a0name) : NULL;
3537 const char *a1ns = a1name ? IDENTIFIER_POINTER (a1name) : NULL;
3538 return a0ns && a1ns && strcmp (a0ns, a1ns) == 0;
3540 return false;
3542 case tcc_exceptional:
3543 if (TREE_CODE (arg0) == CONSTRUCTOR)
3545 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3546 return false;
3548 /* In GIMPLE constructors are used only to build vectors from
3549 elements. Individual elements in the constructor must be
3550 indexed in increasing order and form an initial sequence.
3552 We make no effort to compare constructors in generic.
3553 (see sem_variable::equals in ipa-icf which can do so for
3554 constants). */
3555 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3556 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3557 return false;
3559 /* Be sure that vectors constructed have the same representation.
3560 We only tested element precision and modes to match.
3561 Vectors may be BLKmode and thus also check that the number of
3562 parts match. */
3563 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3564 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3565 return false;
3567 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3568 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3569 unsigned int len = vec_safe_length (v0);
3571 if (len != vec_safe_length (v1))
3572 return false;
3574 for (unsigned int i = 0; i < len; i++)
3576 constructor_elt *c0 = &(*v0)[i];
3577 constructor_elt *c1 = &(*v1)[i];
3579 if (!operand_equal_p (c0->value, c1->value, flags)
3580 /* In GIMPLE the indexes can be either NULL or matching i.
3581 Double check this so we won't get false
3582 positives for GENERIC. */
3583 || (c0->index
3584 && (TREE_CODE (c0->index) != INTEGER_CST
3585 || compare_tree_int (c0->index, i)))
3586 || (c1->index
3587 && (TREE_CODE (c1->index) != INTEGER_CST
3588 || compare_tree_int (c1->index, i))))
3589 return false;
3591 return true;
3593 else if (TREE_CODE (arg0) == STATEMENT_LIST
3594 && (flags & OEP_LEXICOGRAPHIC))
3596 /* Compare the STATEMENT_LISTs. */
3597 tree_stmt_iterator tsi1, tsi2;
3598 tree body1 = CONST_CAST_TREE (arg0);
3599 tree body2 = CONST_CAST_TREE (arg1);
3600 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3601 tsi_next (&tsi1), tsi_next (&tsi2))
3603 /* The lists don't have the same number of statements. */
3604 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3605 return false;
3606 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3607 return true;
3608 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3609 flags & (OEP_LEXICOGRAPHIC
3610 | OEP_NO_HASH_CHECK)))
3611 return false;
3614 return false;
3616 case tcc_statement:
3617 switch (TREE_CODE (arg0))
3619 case RETURN_EXPR:
3620 if (flags & OEP_LEXICOGRAPHIC)
3621 return OP_SAME_WITH_NULL (0);
3622 return false;
3623 case DEBUG_BEGIN_STMT:
3624 if (flags & OEP_LEXICOGRAPHIC)
3625 return true;
3626 return false;
3627 default:
3628 return false;
3631 default:
3632 return false;
3635 #undef OP_SAME
3636 #undef OP_SAME_WITH_NULL
3639 /* Generate a hash value for an expression. This can be used iteratively
3640 by passing a previous result as the HSTATE argument. */
3642 void
3643 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3644 unsigned int flags)
3646 int i;
3647 enum tree_code code;
3648 enum tree_code_class tclass;
3650 if (t == NULL_TREE || t == error_mark_node)
3652 hstate.merge_hash (0);
3653 return;
3656 STRIP_ANY_LOCATION_WRAPPER (t);
3658 if (!(flags & OEP_ADDRESS_OF))
3659 STRIP_NOPS (t);
3661 code = TREE_CODE (t);
3663 switch (code)
3665 /* Alas, constants aren't shared, so we can't rely on pointer
3666 identity. */
3667 case VOID_CST:
3668 hstate.merge_hash (0);
3669 return;
3670 case INTEGER_CST:
3671 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3672 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3673 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3674 return;
3675 case REAL_CST:
3677 unsigned int val2;
3678 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3679 val2 = rvc_zero;
3680 else
3681 val2 = real_hash (TREE_REAL_CST_PTR (t));
3682 hstate.merge_hash (val2);
3683 return;
3685 case FIXED_CST:
3687 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3688 hstate.merge_hash (val2);
3689 return;
3691 case STRING_CST:
3692 hstate.add ((const void *) TREE_STRING_POINTER (t),
3693 TREE_STRING_LENGTH (t));
3694 return;
3695 case COMPLEX_CST:
3696 hash_operand (TREE_REALPART (t), hstate, flags);
3697 hash_operand (TREE_IMAGPART (t), hstate, flags);
3698 return;
3699 case VECTOR_CST:
3701 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3702 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3703 unsigned int count = vector_cst_encoded_nelts (t);
3704 for (unsigned int i = 0; i < count; ++i)
3705 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3706 return;
3708 case SSA_NAME:
3709 /* We can just compare by pointer. */
3710 hstate.add_hwi (SSA_NAME_VERSION (t));
3711 return;
3712 case PLACEHOLDER_EXPR:
3713 /* The node itself doesn't matter. */
3714 return;
3715 case BLOCK:
3716 case OMP_CLAUSE:
3717 /* Ignore. */
3718 return;
3719 case TREE_LIST:
3720 /* A list of expressions, for a CALL_EXPR or as the elements of a
3721 VECTOR_CST. */
3722 for (; t; t = TREE_CHAIN (t))
3723 hash_operand (TREE_VALUE (t), hstate, flags);
3724 return;
3725 case CONSTRUCTOR:
3727 unsigned HOST_WIDE_INT idx;
3728 tree field, value;
3729 flags &= ~OEP_ADDRESS_OF;
3730 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3731 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3733 /* In GIMPLE the indexes can be either NULL or matching i. */
3734 if (field == NULL_TREE)
3735 field = bitsize_int (idx);
3736 hash_operand (field, hstate, flags);
3737 hash_operand (value, hstate, flags);
3739 return;
3741 case STATEMENT_LIST:
3743 tree_stmt_iterator i;
3744 for (i = tsi_start (CONST_CAST_TREE (t));
3745 !tsi_end_p (i); tsi_next (&i))
3746 hash_operand (tsi_stmt (i), hstate, flags);
3747 return;
3749 case TREE_VEC:
3750 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3751 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3752 return;
3753 case IDENTIFIER_NODE:
3754 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3755 return;
3756 case FUNCTION_DECL:
3757 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3758 Otherwise nodes that compare equal according to operand_equal_p might
3759 get different hash codes. However, don't do this for machine specific
3760 or front end builtins, since the function code is overloaded in those
3761 cases. */
3762 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3763 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3765 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3766 code = TREE_CODE (t);
3768 /* FALL THROUGH */
3769 default:
3770 if (POLY_INT_CST_P (t))
3772 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3773 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3774 return;
3776 tclass = TREE_CODE_CLASS (code);
3778 if (tclass == tcc_declaration)
3780 /* DECL's have a unique ID */
3781 hstate.add_hwi (DECL_UID (t));
3783 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3785 /* For comparisons that can be swapped, use the lower
3786 tree code. */
3787 enum tree_code ccode = swap_tree_comparison (code);
3788 if (code < ccode)
3789 ccode = code;
3790 hstate.add_object (ccode);
3791 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3792 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3794 else if (CONVERT_EXPR_CODE_P (code))
3796 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3797 operand_equal_p. */
3798 enum tree_code ccode = NOP_EXPR;
3799 hstate.add_object (ccode);
3801 /* Don't hash the type, that can lead to having nodes which
3802 compare equal according to operand_equal_p, but which
3803 have different hash codes. Make sure to include signedness
3804 in the hash computation. */
3805 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3806 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3808 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3809 else if (code == MEM_REF
3810 && (flags & OEP_ADDRESS_OF) != 0
3811 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3812 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3813 && integer_zerop (TREE_OPERAND (t, 1)))
3814 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3815 hstate, flags);
3816 /* Don't ICE on FE specific trees, or their arguments etc.
3817 during operand_equal_p hash verification. */
3818 else if (!IS_EXPR_CODE_CLASS (tclass))
3819 gcc_assert (flags & OEP_HASH_CHECK);
3820 else
3822 unsigned int sflags = flags;
3824 hstate.add_object (code);
3826 switch (code)
3828 case ADDR_EXPR:
3829 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3830 flags |= OEP_ADDRESS_OF;
3831 sflags = flags;
3832 break;
3834 case INDIRECT_REF:
3835 case MEM_REF:
3836 case TARGET_MEM_REF:
3837 flags &= ~OEP_ADDRESS_OF;
3838 sflags = flags;
3839 break;
3841 case COMPONENT_REF:
3842 if (sflags & OEP_ADDRESS_OF)
3844 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3845 if (TREE_OPERAND (t, 2))
3846 hash_operand (TREE_OPERAND (t, 2), hstate,
3847 flags & ~OEP_ADDRESS_OF);
3848 else
3850 tree field = TREE_OPERAND (t, 1);
3851 hash_operand (DECL_FIELD_OFFSET (field),
3852 hstate, flags & ~OEP_ADDRESS_OF);
3853 hash_operand (DECL_FIELD_BIT_OFFSET (field),
3854 hstate, flags & ~OEP_ADDRESS_OF);
3856 return;
3858 break;
3859 case ARRAY_REF:
3860 case ARRAY_RANGE_REF:
3861 case BIT_FIELD_REF:
3862 sflags &= ~OEP_ADDRESS_OF;
3863 break;
3865 case COND_EXPR:
3866 flags &= ~OEP_ADDRESS_OF;
3867 break;
3869 case WIDEN_MULT_PLUS_EXPR:
3870 case WIDEN_MULT_MINUS_EXPR:
3872 /* The multiplication operands are commutative. */
3873 inchash::hash one, two;
3874 hash_operand (TREE_OPERAND (t, 0), one, flags);
3875 hash_operand (TREE_OPERAND (t, 1), two, flags);
3876 hstate.add_commutative (one, two);
3877 hash_operand (TREE_OPERAND (t, 2), two, flags);
3878 return;
3881 case CALL_EXPR:
3882 if (CALL_EXPR_FN (t) == NULL_TREE)
3883 hstate.add_int (CALL_EXPR_IFN (t));
3884 break;
3886 case TARGET_EXPR:
3887 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3888 Usually different TARGET_EXPRs just should use
3889 different temporaries in their slots. */
3890 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
3891 return;
3893 case OBJ_TYPE_REF:
3894 /* Virtual table reference. */
3895 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
3896 flags &= ~OEP_ADDRESS_OF;
3897 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
3898 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
3899 if (!virtual_method_call_p (t))
3900 return;
3901 if (tree c = obj_type_ref_class (t))
3903 c = TYPE_NAME (TYPE_MAIN_VARIANT (c));
3904 /* We compute mangled names only when free_lang_data is run.
3905 In that case we can hash precisely. */
3906 if (TREE_CODE (c) == TYPE_DECL
3907 && DECL_ASSEMBLER_NAME_SET_P (c))
3908 hstate.add_object
3909 (IDENTIFIER_HASH_VALUE
3910 (DECL_ASSEMBLER_NAME (c)));
3912 return;
3913 default:
3914 break;
3917 /* Don't hash the type, that can lead to having nodes which
3918 compare equal according to operand_equal_p, but which
3919 have different hash codes. */
3920 if (code == NON_LVALUE_EXPR)
3922 /* Make sure to include signness in the hash computation. */
3923 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3924 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3927 else if (commutative_tree_code (code))
3929 /* It's a commutative expression. We want to hash it the same
3930 however it appears. We do this by first hashing both operands
3931 and then rehashing based on the order of their independent
3932 hashes. */
3933 inchash::hash one, two;
3934 hash_operand (TREE_OPERAND (t, 0), one, flags);
3935 hash_operand (TREE_OPERAND (t, 1), two, flags);
3936 hstate.add_commutative (one, two);
3938 else
3939 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3940 hash_operand (TREE_OPERAND (t, i), hstate,
3941 i == 0 ? flags : sflags);
3943 return;
3947 bool
3948 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
3949 unsigned int flags, bool *ret)
3951 /* When checking and unless comparing DECL names, verify that if
3952 the outermost operand_equal_p call returns non-zero then ARG0
3953 and ARG1 have the same hash value. */
3954 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
3956 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
3958 if (arg0 != arg1 && !(flags & OEP_DECL_NAME))
3960 inchash::hash hstate0 (0), hstate1 (0);
3961 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
3962 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
3963 hashval_t h0 = hstate0.end ();
3964 hashval_t h1 = hstate1.end ();
3965 gcc_assert (h0 == h1);
3967 *ret = true;
3969 else
3970 *ret = false;
3972 return true;
3975 return false;
3979 static operand_compare default_compare_instance;
3981 /* Conveinece wrapper around operand_compare class because usually we do
3982 not need to play with the valueizer. */
3984 bool
3985 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3987 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
3990 namespace inchash
3993 /* Generate a hash value for an expression. This can be used iteratively
3994 by passing a previous result as the HSTATE argument.
3996 This function is intended to produce the same hash for expressions which
3997 would compare equal using operand_equal_p. */
3998 void
3999 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
4001 default_compare_instance.hash_operand (t, hstate, flags);
4006 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
4007 with a different signedness or a narrower precision. */
4009 static bool
4010 operand_equal_for_comparison_p (tree arg0, tree arg1)
4012 if (operand_equal_p (arg0, arg1, 0))
4013 return true;
4015 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
4016 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
4017 return false;
4019 /* Discard any conversions that don't change the modes of ARG0 and ARG1
4020 and see if the inner values are the same. This removes any
4021 signedness comparison, which doesn't matter here. */
4022 tree op0 = arg0;
4023 tree op1 = arg1;
4024 STRIP_NOPS (op0);
4025 STRIP_NOPS (op1);
4026 if (operand_equal_p (op0, op1, 0))
4027 return true;
4029 /* Discard a single widening conversion from ARG1 and see if the inner
4030 value is the same as ARG0. */
4031 if (CONVERT_EXPR_P (arg1)
4032 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4033 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4034 < TYPE_PRECISION (TREE_TYPE (arg1))
4035 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
4036 return true;
4038 return false;
4041 /* See if ARG is an expression that is either a comparison or is performing
4042 arithmetic on comparisons. The comparisons must only be comparing
4043 two different values, which will be stored in *CVAL1 and *CVAL2; if
4044 they are nonzero it means that some operands have already been found.
4045 No variables may be used anywhere else in the expression except in the
4046 comparisons.
4048 If this is true, return 1. Otherwise, return zero. */
4050 static bool
4051 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4053 enum tree_code code = TREE_CODE (arg);
4054 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4056 /* We can handle some of the tcc_expression cases here. */
4057 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4058 tclass = tcc_unary;
4059 else if (tclass == tcc_expression
4060 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4061 || code == COMPOUND_EXPR))
4062 tclass = tcc_binary;
4064 switch (tclass)
4066 case tcc_unary:
4067 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4069 case tcc_binary:
4070 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4071 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4073 case tcc_constant:
4074 return true;
4076 case tcc_expression:
4077 if (code == COND_EXPR)
4078 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4079 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4080 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4081 return false;
4083 case tcc_comparison:
4084 /* First see if we can handle the first operand, then the second. For
4085 the second operand, we know *CVAL1 can't be zero. It must be that
4086 one side of the comparison is each of the values; test for the
4087 case where this isn't true by failing if the two operands
4088 are the same. */
4090 if (operand_equal_p (TREE_OPERAND (arg, 0),
4091 TREE_OPERAND (arg, 1), 0))
4092 return false;
4094 if (*cval1 == 0)
4095 *cval1 = TREE_OPERAND (arg, 0);
4096 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4098 else if (*cval2 == 0)
4099 *cval2 = TREE_OPERAND (arg, 0);
4100 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4102 else
4103 return false;
4105 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4107 else if (*cval2 == 0)
4108 *cval2 = TREE_OPERAND (arg, 1);
4109 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4111 else
4112 return false;
4114 return true;
4116 default:
4117 return false;
4121 /* ARG is a tree that is known to contain just arithmetic operations and
4122 comparisons. Evaluate the operations in the tree substituting NEW0 for
4123 any occurrence of OLD0 as an operand of a comparison and likewise for
4124 NEW1 and OLD1. */
4126 static tree
4127 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4128 tree old1, tree new1)
4130 tree type = TREE_TYPE (arg);
4131 enum tree_code code = TREE_CODE (arg);
4132 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4134 /* We can handle some of the tcc_expression cases here. */
4135 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4136 tclass = tcc_unary;
4137 else if (tclass == tcc_expression
4138 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4139 tclass = tcc_binary;
4141 switch (tclass)
4143 case tcc_unary:
4144 return fold_build1_loc (loc, code, type,
4145 eval_subst (loc, TREE_OPERAND (arg, 0),
4146 old0, new0, old1, new1));
4148 case tcc_binary:
4149 return fold_build2_loc (loc, code, type,
4150 eval_subst (loc, TREE_OPERAND (arg, 0),
4151 old0, new0, old1, new1),
4152 eval_subst (loc, TREE_OPERAND (arg, 1),
4153 old0, new0, old1, new1));
4155 case tcc_expression:
4156 switch (code)
4158 case SAVE_EXPR:
4159 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4160 old1, new1);
4162 case COMPOUND_EXPR:
4163 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4164 old1, new1);
4166 case COND_EXPR:
4167 return fold_build3_loc (loc, code, type,
4168 eval_subst (loc, TREE_OPERAND (arg, 0),
4169 old0, new0, old1, new1),
4170 eval_subst (loc, TREE_OPERAND (arg, 1),
4171 old0, new0, old1, new1),
4172 eval_subst (loc, TREE_OPERAND (arg, 2),
4173 old0, new0, old1, new1));
4174 default:
4175 break;
4177 /* Fall through - ??? */
4179 case tcc_comparison:
4181 tree arg0 = TREE_OPERAND (arg, 0);
4182 tree arg1 = TREE_OPERAND (arg, 1);
4184 /* We need to check both for exact equality and tree equality. The
4185 former will be true if the operand has a side-effect. In that
4186 case, we know the operand occurred exactly once. */
4188 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4189 arg0 = new0;
4190 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4191 arg0 = new1;
4193 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4194 arg1 = new0;
4195 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4196 arg1 = new1;
4198 return fold_build2_loc (loc, code, type, arg0, arg1);
4201 default:
4202 return arg;
4206 /* Return a tree for the case when the result of an expression is RESULT
4207 converted to TYPE and OMITTED was previously an operand of the expression
4208 but is now not needed (e.g., we folded OMITTED * 0).
4210 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4211 the conversion of RESULT to TYPE. */
4213 tree
4214 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4216 tree t = fold_convert_loc (loc, type, result);
4218 /* If the resulting operand is an empty statement, just return the omitted
4219 statement casted to void. */
4220 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4221 return build1_loc (loc, NOP_EXPR, void_type_node,
4222 fold_ignored_result (omitted));
4224 if (TREE_SIDE_EFFECTS (omitted))
4225 return build2_loc (loc, COMPOUND_EXPR, type,
4226 fold_ignored_result (omitted), t);
4228 return non_lvalue_loc (loc, t);
4231 /* Return a tree for the case when the result of an expression is RESULT
4232 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4233 of the expression but are now not needed.
4235 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4236 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4237 evaluated before OMITTED2. Otherwise, if neither has side effects,
4238 just do the conversion of RESULT to TYPE. */
4240 tree
4241 omit_two_operands_loc (location_t loc, tree type, tree result,
4242 tree omitted1, tree omitted2)
4244 tree t = fold_convert_loc (loc, type, result);
4246 if (TREE_SIDE_EFFECTS (omitted2))
4247 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4248 if (TREE_SIDE_EFFECTS (omitted1))
4249 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4251 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4255 /* Return a simplified tree node for the truth-negation of ARG. This
4256 never alters ARG itself. We assume that ARG is an operation that
4257 returns a truth value (0 or 1).
4259 FIXME: one would think we would fold the result, but it causes
4260 problems with the dominator optimizer. */
4262 static tree
4263 fold_truth_not_expr (location_t loc, tree arg)
4265 tree type = TREE_TYPE (arg);
4266 enum tree_code code = TREE_CODE (arg);
4267 location_t loc1, loc2;
4269 /* If this is a comparison, we can simply invert it, except for
4270 floating-point non-equality comparisons, in which case we just
4271 enclose a TRUTH_NOT_EXPR around what we have. */
4273 if (TREE_CODE_CLASS (code) == tcc_comparison)
4275 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4276 if (FLOAT_TYPE_P (op_type)
4277 && flag_trapping_math
4278 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4279 && code != NE_EXPR && code != EQ_EXPR)
4280 return NULL_TREE;
4282 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4283 if (code == ERROR_MARK)
4284 return NULL_TREE;
4286 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4287 TREE_OPERAND (arg, 1));
4288 copy_warning (ret, arg);
4289 return ret;
4292 switch (code)
4294 case INTEGER_CST:
4295 return constant_boolean_node (integer_zerop (arg), type);
4297 case TRUTH_AND_EXPR:
4298 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4299 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4300 return build2_loc (loc, TRUTH_OR_EXPR, type,
4301 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4302 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4304 case TRUTH_OR_EXPR:
4305 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4306 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4307 return build2_loc (loc, TRUTH_AND_EXPR, type,
4308 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4309 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4311 case TRUTH_XOR_EXPR:
4312 /* Here we can invert either operand. We invert the first operand
4313 unless the second operand is a TRUTH_NOT_EXPR in which case our
4314 result is the XOR of the first operand with the inside of the
4315 negation of the second operand. */
4317 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4318 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4319 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4320 else
4321 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4322 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4323 TREE_OPERAND (arg, 1));
4325 case TRUTH_ANDIF_EXPR:
4326 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4327 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4328 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4329 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4330 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4332 case TRUTH_ORIF_EXPR:
4333 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4334 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4335 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4336 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4337 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4339 case TRUTH_NOT_EXPR:
4340 return TREE_OPERAND (arg, 0);
4342 case COND_EXPR:
4344 tree arg1 = TREE_OPERAND (arg, 1);
4345 tree arg2 = TREE_OPERAND (arg, 2);
4347 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4348 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4350 /* A COND_EXPR may have a throw as one operand, which
4351 then has void type. Just leave void operands
4352 as they are. */
4353 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4354 VOID_TYPE_P (TREE_TYPE (arg1))
4355 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4356 VOID_TYPE_P (TREE_TYPE (arg2))
4357 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4360 case COMPOUND_EXPR:
4361 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4362 return build2_loc (loc, COMPOUND_EXPR, type,
4363 TREE_OPERAND (arg, 0),
4364 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4366 case NON_LVALUE_EXPR:
4367 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4368 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4370 CASE_CONVERT:
4371 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4372 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4374 /* fall through */
4376 case FLOAT_EXPR:
4377 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4378 return build1_loc (loc, TREE_CODE (arg), type,
4379 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4381 case BIT_AND_EXPR:
4382 if (!integer_onep (TREE_OPERAND (arg, 1)))
4383 return NULL_TREE;
4384 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4386 case SAVE_EXPR:
4387 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4389 case CLEANUP_POINT_EXPR:
4390 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4391 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4392 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4394 default:
4395 return NULL_TREE;
4399 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4400 assume that ARG is an operation that returns a truth value (0 or 1
4401 for scalars, 0 or -1 for vectors). Return the folded expression if
4402 folding is successful. Otherwise, return NULL_TREE. */
4404 static tree
4405 fold_invert_truthvalue (location_t loc, tree arg)
4407 tree type = TREE_TYPE (arg);
4408 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4409 ? BIT_NOT_EXPR
4410 : TRUTH_NOT_EXPR,
4411 type, arg);
4414 /* Return a simplified tree node for the truth-negation of ARG. This
4415 never alters ARG itself. We assume that ARG is an operation that
4416 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4418 tree
4419 invert_truthvalue_loc (location_t loc, tree arg)
4421 if (TREE_CODE (arg) == ERROR_MARK)
4422 return arg;
4424 tree type = TREE_TYPE (arg);
4425 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4426 ? BIT_NOT_EXPR
4427 : TRUTH_NOT_EXPR,
4428 type, arg);
4431 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4432 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4433 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4434 is the original memory reference used to preserve the alias set of
4435 the access. */
4437 static tree
4438 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4439 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4440 int unsignedp, int reversep)
4442 tree result, bftype;
4444 /* Attempt not to lose the access path if possible. */
4445 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4447 tree ninner = TREE_OPERAND (orig_inner, 0);
4448 machine_mode nmode;
4449 poly_int64 nbitsize, nbitpos;
4450 tree noffset;
4451 int nunsignedp, nreversep, nvolatilep = 0;
4452 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4453 &noffset, &nmode, &nunsignedp,
4454 &nreversep, &nvolatilep);
4455 if (base == inner
4456 && noffset == NULL_TREE
4457 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4458 && !reversep
4459 && !nreversep
4460 && !nvolatilep)
4462 inner = ninner;
4463 bitpos -= nbitpos;
4467 alias_set_type iset = get_alias_set (orig_inner);
4468 if (iset == 0 && get_alias_set (inner) != iset)
4469 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4470 build_fold_addr_expr (inner),
4471 build_int_cst (ptr_type_node, 0));
4473 if (known_eq (bitpos, 0) && !reversep)
4475 tree size = TYPE_SIZE (TREE_TYPE (inner));
4476 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4477 || POINTER_TYPE_P (TREE_TYPE (inner)))
4478 && tree_fits_shwi_p (size)
4479 && tree_to_shwi (size) == bitsize)
4480 return fold_convert_loc (loc, type, inner);
4483 bftype = type;
4484 if (TYPE_PRECISION (bftype) != bitsize
4485 || TYPE_UNSIGNED (bftype) == !unsignedp)
4486 bftype = build_nonstandard_integer_type (bitsize, 0);
4488 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4489 bitsize_int (bitsize), bitsize_int (bitpos));
4490 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4492 if (bftype != type)
4493 result = fold_convert_loc (loc, type, result);
4495 return result;
4498 /* Optimize a bit-field compare.
4500 There are two cases: First is a compare against a constant and the
4501 second is a comparison of two items where the fields are at the same
4502 bit position relative to the start of a chunk (byte, halfword, word)
4503 large enough to contain it. In these cases we can avoid the shift
4504 implicit in bitfield extractions.
4506 For constants, we emit a compare of the shifted constant with the
4507 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4508 compared. For two fields at the same position, we do the ANDs with the
4509 similar mask and compare the result of the ANDs.
4511 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4512 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4513 are the left and right operands of the comparison, respectively.
4515 If the optimization described above can be done, we return the resulting
4516 tree. Otherwise we return zero. */
4518 static tree
4519 optimize_bit_field_compare (location_t loc, enum tree_code code,
4520 tree compare_type, tree lhs, tree rhs)
4522 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4523 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4524 tree type = TREE_TYPE (lhs);
4525 tree unsigned_type;
4526 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4527 machine_mode lmode, rmode;
4528 scalar_int_mode nmode;
4529 int lunsignedp, runsignedp;
4530 int lreversep, rreversep;
4531 int lvolatilep = 0, rvolatilep = 0;
4532 tree linner, rinner = NULL_TREE;
4533 tree mask;
4534 tree offset;
4536 /* Get all the information about the extractions being done. If the bit size
4537 is the same as the size of the underlying object, we aren't doing an
4538 extraction at all and so can do nothing. We also don't want to
4539 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4540 then will no longer be able to replace it. */
4541 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4542 &lunsignedp, &lreversep, &lvolatilep);
4543 if (linner == lhs
4544 || !known_size_p (plbitsize)
4545 || !plbitsize.is_constant (&lbitsize)
4546 || !plbitpos.is_constant (&lbitpos)
4547 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4548 || offset != 0
4549 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4550 || lvolatilep)
4551 return 0;
4553 if (const_p)
4554 rreversep = lreversep;
4555 else
4557 /* If this is not a constant, we can only do something if bit positions,
4558 sizes, signedness and storage order are the same. */
4559 rinner
4560 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4561 &runsignedp, &rreversep, &rvolatilep);
4563 if (rinner == rhs
4564 || maybe_ne (lbitpos, rbitpos)
4565 || maybe_ne (lbitsize, rbitsize)
4566 || lunsignedp != runsignedp
4567 || lreversep != rreversep
4568 || offset != 0
4569 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4570 || rvolatilep)
4571 return 0;
4574 /* Honor the C++ memory model and mimic what RTL expansion does. */
4575 poly_uint64 bitstart = 0;
4576 poly_uint64 bitend = 0;
4577 if (TREE_CODE (lhs) == COMPONENT_REF)
4579 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4580 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4581 return 0;
4584 /* See if we can find a mode to refer to this field. We should be able to,
4585 but fail if we can't. */
4586 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4587 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4588 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4589 TYPE_ALIGN (TREE_TYPE (rinner))),
4590 BITS_PER_WORD, false, &nmode))
4591 return 0;
4593 /* Set signed and unsigned types of the precision of this mode for the
4594 shifts below. */
4595 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4597 /* Compute the bit position and size for the new reference and our offset
4598 within it. If the new reference is the same size as the original, we
4599 won't optimize anything, so return zero. */
4600 nbitsize = GET_MODE_BITSIZE (nmode);
4601 nbitpos = lbitpos & ~ (nbitsize - 1);
4602 lbitpos -= nbitpos;
4603 if (nbitsize == lbitsize)
4604 return 0;
4606 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4607 lbitpos = nbitsize - lbitsize - lbitpos;
4609 /* Make the mask to be used against the extracted field. */
4610 mask = build_int_cst_type (unsigned_type, -1);
4611 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4612 mask = const_binop (RSHIFT_EXPR, mask,
4613 size_int (nbitsize - lbitsize - lbitpos));
4615 if (! const_p)
4617 if (nbitpos < 0)
4618 return 0;
4620 /* If not comparing with constant, just rework the comparison
4621 and return. */
4622 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4623 nbitsize, nbitpos, 1, lreversep);
4624 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4625 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4626 nbitsize, nbitpos, 1, rreversep);
4627 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4628 return fold_build2_loc (loc, code, compare_type, t1, t2);
4631 /* Otherwise, we are handling the constant case. See if the constant is too
4632 big for the field. Warn and return a tree for 0 (false) if so. We do
4633 this not only for its own sake, but to avoid having to test for this
4634 error case below. If we didn't, we might generate wrong code.
4636 For unsigned fields, the constant shifted right by the field length should
4637 be all zero. For signed fields, the high-order bits should agree with
4638 the sign bit. */
4640 if (lunsignedp)
4642 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4644 warning (0, "comparison is always %d due to width of bit-field",
4645 code == NE_EXPR);
4646 return constant_boolean_node (code == NE_EXPR, compare_type);
4649 else
4651 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4652 if (tem != 0 && tem != -1)
4654 warning (0, "comparison is always %d due to width of bit-field",
4655 code == NE_EXPR);
4656 return constant_boolean_node (code == NE_EXPR, compare_type);
4660 if (nbitpos < 0)
4661 return 0;
4663 /* Single-bit compares should always be against zero. */
4664 if (lbitsize == 1 && ! integer_zerop (rhs))
4666 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4667 rhs = build_int_cst (type, 0);
4670 /* Make a new bitfield reference, shift the constant over the
4671 appropriate number of bits and mask it with the computed mask
4672 (in case this was a signed field). If we changed it, make a new one. */
4673 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4674 nbitsize, nbitpos, 1, lreversep);
4676 rhs = const_binop (BIT_AND_EXPR,
4677 const_binop (LSHIFT_EXPR,
4678 fold_convert_loc (loc, unsigned_type, rhs),
4679 size_int (lbitpos)),
4680 mask);
4682 lhs = build2_loc (loc, code, compare_type,
4683 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4684 return lhs;
4687 /* Subroutine for fold_truth_andor_1: decode a field reference.
4689 If EXP is a comparison reference, we return the innermost reference.
4691 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4692 set to the starting bit number.
4694 If the innermost field can be completely contained in a mode-sized
4695 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4697 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4698 otherwise it is not changed.
4700 *PUNSIGNEDP is set to the signedness of the field.
4702 *PREVERSEP is set to the storage order of the field.
4704 *PMASK is set to the mask used. This is either contained in a
4705 BIT_AND_EXPR or derived from the width of the field.
4707 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4709 Return 0 if this is not a component reference or is one that we can't
4710 do anything with. */
4712 static tree
4713 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4714 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4715 int *punsignedp, int *preversep, int *pvolatilep,
4716 tree *pmask, tree *pand_mask)
4718 tree exp = *exp_;
4719 tree outer_type = 0;
4720 tree and_mask = 0;
4721 tree mask, inner, offset;
4722 tree unsigned_type;
4723 unsigned int precision;
4725 /* All the optimizations using this function assume integer fields.
4726 There are problems with FP fields since the type_for_size call
4727 below can fail for, e.g., XFmode. */
4728 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4729 return NULL_TREE;
4731 /* We are interested in the bare arrangement of bits, so strip everything
4732 that doesn't affect the machine mode. However, record the type of the
4733 outermost expression if it may matter below. */
4734 if (CONVERT_EXPR_P (exp)
4735 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4736 outer_type = TREE_TYPE (exp);
4737 STRIP_NOPS (exp);
4739 if (TREE_CODE (exp) == BIT_AND_EXPR)
4741 and_mask = TREE_OPERAND (exp, 1);
4742 exp = TREE_OPERAND (exp, 0);
4743 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4744 if (TREE_CODE (and_mask) != INTEGER_CST)
4745 return NULL_TREE;
4748 poly_int64 poly_bitsize, poly_bitpos;
4749 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4750 pmode, punsignedp, preversep, pvolatilep);
4751 if ((inner == exp && and_mask == 0)
4752 || !poly_bitsize.is_constant (pbitsize)
4753 || !poly_bitpos.is_constant (pbitpos)
4754 || *pbitsize < 0
4755 || offset != 0
4756 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4757 /* Reject out-of-bound accesses (PR79731). */
4758 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4759 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4760 *pbitpos + *pbitsize) < 0))
4761 return NULL_TREE;
4763 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4764 if (unsigned_type == NULL_TREE)
4765 return NULL_TREE;
4767 *exp_ = exp;
4769 /* If the number of bits in the reference is the same as the bitsize of
4770 the outer type, then the outer type gives the signedness. Otherwise
4771 (in case of a small bitfield) the signedness is unchanged. */
4772 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4773 *punsignedp = TYPE_UNSIGNED (outer_type);
4775 /* Compute the mask to access the bitfield. */
4776 precision = TYPE_PRECISION (unsigned_type);
4778 mask = build_int_cst_type (unsigned_type, -1);
4780 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4781 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4783 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4784 if (and_mask != 0)
4785 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4786 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4788 *pmask = mask;
4789 *pand_mask = and_mask;
4790 return inner;
4793 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4794 bit positions and MASK is SIGNED. */
4796 static bool
4797 all_ones_mask_p (const_tree mask, unsigned int size)
4799 tree type = TREE_TYPE (mask);
4800 unsigned int precision = TYPE_PRECISION (type);
4802 /* If this function returns true when the type of the mask is
4803 UNSIGNED, then there will be errors. In particular see
4804 gcc.c-torture/execute/990326-1.c. There does not appear to be
4805 any documentation paper trail as to why this is so. But the pre
4806 wide-int worked with that restriction and it has been preserved
4807 here. */
4808 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4809 return false;
4811 return wi::mask (size, false, precision) == wi::to_wide (mask);
4814 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4815 represents the sign bit of EXP's type. If EXP represents a sign
4816 or zero extension, also test VAL against the unextended type.
4817 The return value is the (sub)expression whose sign bit is VAL,
4818 or NULL_TREE otherwise. */
4820 tree
4821 sign_bit_p (tree exp, const_tree val)
4823 int width;
4824 tree t;
4826 /* Tree EXP must have an integral type. */
4827 t = TREE_TYPE (exp);
4828 if (! INTEGRAL_TYPE_P (t))
4829 return NULL_TREE;
4831 /* Tree VAL must be an integer constant. */
4832 if (TREE_CODE (val) != INTEGER_CST
4833 || TREE_OVERFLOW (val))
4834 return NULL_TREE;
4836 width = TYPE_PRECISION (t);
4837 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4838 return exp;
4840 /* Handle extension from a narrower type. */
4841 if (TREE_CODE (exp) == NOP_EXPR
4842 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4843 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4845 return NULL_TREE;
4848 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4849 to be evaluated unconditionally. */
4851 static bool
4852 simple_operand_p (const_tree exp)
4854 /* Strip any conversions that don't change the machine mode. */
4855 STRIP_NOPS (exp);
4857 return (CONSTANT_CLASS_P (exp)
4858 || TREE_CODE (exp) == SSA_NAME
4859 || (DECL_P (exp)
4860 && ! TREE_ADDRESSABLE (exp)
4861 && ! TREE_THIS_VOLATILE (exp)
4862 && ! DECL_NONLOCAL (exp)
4863 /* Don't regard global variables as simple. They may be
4864 allocated in ways unknown to the compiler (shared memory,
4865 #pragma weak, etc). */
4866 && ! TREE_PUBLIC (exp)
4867 && ! DECL_EXTERNAL (exp)
4868 /* Weakrefs are not safe to be read, since they can be NULL.
4869 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4870 have DECL_WEAK flag set. */
4871 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4872 /* Loading a static variable is unduly expensive, but global
4873 registers aren't expensive. */
4874 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4877 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4878 to be evaluated unconditionally.
4879 I addition to simple_operand_p, we assume that comparisons, conversions,
4880 and logic-not operations are simple, if their operands are simple, too. */
4882 static bool
4883 simple_operand_p_2 (tree exp)
4885 enum tree_code code;
4887 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
4888 return false;
4890 while (CONVERT_EXPR_P (exp))
4891 exp = TREE_OPERAND (exp, 0);
4893 code = TREE_CODE (exp);
4895 if (TREE_CODE_CLASS (code) == tcc_comparison)
4896 return (simple_operand_p (TREE_OPERAND (exp, 0))
4897 && simple_operand_p (TREE_OPERAND (exp, 1)));
4899 if (code == TRUTH_NOT_EXPR)
4900 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4902 return simple_operand_p (exp);
4906 /* The following functions are subroutines to fold_range_test and allow it to
4907 try to change a logical combination of comparisons into a range test.
4909 For example, both
4910 X == 2 || X == 3 || X == 4 || X == 5
4912 X >= 2 && X <= 5
4913 are converted to
4914 (unsigned) (X - 2) <= 3
4916 We describe each set of comparisons as being either inside or outside
4917 a range, using a variable named like IN_P, and then describe the
4918 range with a lower and upper bound. If one of the bounds is omitted,
4919 it represents either the highest or lowest value of the type.
4921 In the comments below, we represent a range by two numbers in brackets
4922 preceded by a "+" to designate being inside that range, or a "-" to
4923 designate being outside that range, so the condition can be inverted by
4924 flipping the prefix. An omitted bound is represented by a "-". For
4925 example, "- [-, 10]" means being outside the range starting at the lowest
4926 possible value and ending at 10, in other words, being greater than 10.
4927 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4928 always false.
4930 We set up things so that the missing bounds are handled in a consistent
4931 manner so neither a missing bound nor "true" and "false" need to be
4932 handled using a special case. */
4934 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4935 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4936 and UPPER1_P are nonzero if the respective argument is an upper bound
4937 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4938 must be specified for a comparison. ARG1 will be converted to ARG0's
4939 type if both are specified. */
4941 static tree
4942 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4943 tree arg1, int upper1_p)
4945 tree tem;
4946 int result;
4947 int sgn0, sgn1;
4949 /* If neither arg represents infinity, do the normal operation.
4950 Else, if not a comparison, return infinity. Else handle the special
4951 comparison rules. Note that most of the cases below won't occur, but
4952 are handled for consistency. */
4954 if (arg0 != 0 && arg1 != 0)
4956 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4957 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4958 STRIP_NOPS (tem);
4959 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4962 if (TREE_CODE_CLASS (code) != tcc_comparison)
4963 return 0;
4965 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4966 for neither. In real maths, we cannot assume open ended ranges are
4967 the same. But, this is computer arithmetic, where numbers are finite.
4968 We can therefore make the transformation of any unbounded range with
4969 the value Z, Z being greater than any representable number. This permits
4970 us to treat unbounded ranges as equal. */
4971 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4972 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4973 switch (code)
4975 case EQ_EXPR:
4976 result = sgn0 == sgn1;
4977 break;
4978 case NE_EXPR:
4979 result = sgn0 != sgn1;
4980 break;
4981 case LT_EXPR:
4982 result = sgn0 < sgn1;
4983 break;
4984 case LE_EXPR:
4985 result = sgn0 <= sgn1;
4986 break;
4987 case GT_EXPR:
4988 result = sgn0 > sgn1;
4989 break;
4990 case GE_EXPR:
4991 result = sgn0 >= sgn1;
4992 break;
4993 default:
4994 gcc_unreachable ();
4997 return constant_boolean_node (result, type);
5000 /* Helper routine for make_range. Perform one step for it, return
5001 new expression if the loop should continue or NULL_TREE if it should
5002 stop. */
5004 tree
5005 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
5006 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
5007 bool *strict_overflow_p)
5009 tree arg0_type = TREE_TYPE (arg0);
5010 tree n_low, n_high, low = *p_low, high = *p_high;
5011 int in_p = *p_in_p, n_in_p;
5013 switch (code)
5015 case TRUTH_NOT_EXPR:
5016 /* We can only do something if the range is testing for zero. */
5017 if (low == NULL_TREE || high == NULL_TREE
5018 || ! integer_zerop (low) || ! integer_zerop (high))
5019 return NULL_TREE;
5020 *p_in_p = ! in_p;
5021 return arg0;
5023 case EQ_EXPR: case NE_EXPR:
5024 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
5025 /* We can only do something if the range is testing for zero
5026 and if the second operand is an integer constant. Note that
5027 saying something is "in" the range we make is done by
5028 complementing IN_P since it will set in the initial case of
5029 being not equal to zero; "out" is leaving it alone. */
5030 if (low == NULL_TREE || high == NULL_TREE
5031 || ! integer_zerop (low) || ! integer_zerop (high)
5032 || TREE_CODE (arg1) != INTEGER_CST)
5033 return NULL_TREE;
5035 switch (code)
5037 case NE_EXPR: /* - [c, c] */
5038 low = high = arg1;
5039 break;
5040 case EQ_EXPR: /* + [c, c] */
5041 in_p = ! in_p, low = high = arg1;
5042 break;
5043 case GT_EXPR: /* - [-, c] */
5044 low = 0, high = arg1;
5045 break;
5046 case GE_EXPR: /* + [c, -] */
5047 in_p = ! in_p, low = arg1, high = 0;
5048 break;
5049 case LT_EXPR: /* - [c, -] */
5050 low = arg1, high = 0;
5051 break;
5052 case LE_EXPR: /* + [-, c] */
5053 in_p = ! in_p, low = 0, high = arg1;
5054 break;
5055 default:
5056 gcc_unreachable ();
5059 /* If this is an unsigned comparison, we also know that EXP is
5060 greater than or equal to zero. We base the range tests we make
5061 on that fact, so we record it here so we can parse existing
5062 range tests. We test arg0_type since often the return type
5063 of, e.g. EQ_EXPR, is boolean. */
5064 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5066 if (! merge_ranges (&n_in_p, &n_low, &n_high,
5067 in_p, low, high, 1,
5068 build_int_cst (arg0_type, 0),
5069 NULL_TREE))
5070 return NULL_TREE;
5072 in_p = n_in_p, low = n_low, high = n_high;
5074 /* If the high bound is missing, but we have a nonzero low
5075 bound, reverse the range so it goes from zero to the low bound
5076 minus 1. */
5077 if (high == 0 && low && ! integer_zerop (low))
5079 in_p = ! in_p;
5080 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5081 build_int_cst (TREE_TYPE (low), 1), 0);
5082 low = build_int_cst (arg0_type, 0);
5086 *p_low = low;
5087 *p_high = high;
5088 *p_in_p = in_p;
5089 return arg0;
5091 case NEGATE_EXPR:
5092 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5093 low and high are non-NULL, then normalize will DTRT. */
5094 if (!TYPE_UNSIGNED (arg0_type)
5095 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5097 if (low == NULL_TREE)
5098 low = TYPE_MIN_VALUE (arg0_type);
5099 if (high == NULL_TREE)
5100 high = TYPE_MAX_VALUE (arg0_type);
5103 /* (-x) IN [a,b] -> x in [-b, -a] */
5104 n_low = range_binop (MINUS_EXPR, exp_type,
5105 build_int_cst (exp_type, 0),
5106 0, high, 1);
5107 n_high = range_binop (MINUS_EXPR, exp_type,
5108 build_int_cst (exp_type, 0),
5109 0, low, 0);
5110 if (n_high != 0 && TREE_OVERFLOW (n_high))
5111 return NULL_TREE;
5112 goto normalize;
5114 case BIT_NOT_EXPR:
5115 /* ~ X -> -X - 1 */
5116 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5117 build_int_cst (exp_type, 1));
5119 case PLUS_EXPR:
5120 case MINUS_EXPR:
5121 if (TREE_CODE (arg1) != INTEGER_CST)
5122 return NULL_TREE;
5124 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5125 move a constant to the other side. */
5126 if (!TYPE_UNSIGNED (arg0_type)
5127 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5128 return NULL_TREE;
5130 /* If EXP is signed, any overflow in the computation is undefined,
5131 so we don't worry about it so long as our computations on
5132 the bounds don't overflow. For unsigned, overflow is defined
5133 and this is exactly the right thing. */
5134 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5135 arg0_type, low, 0, arg1, 0);
5136 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5137 arg0_type, high, 1, arg1, 0);
5138 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5139 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5140 return NULL_TREE;
5142 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5143 *strict_overflow_p = true;
5145 normalize:
5146 /* Check for an unsigned range which has wrapped around the maximum
5147 value thus making n_high < n_low, and normalize it. */
5148 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5150 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5151 build_int_cst (TREE_TYPE (n_high), 1), 0);
5152 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5153 build_int_cst (TREE_TYPE (n_low), 1), 0);
5155 /* If the range is of the form +/- [ x+1, x ], we won't
5156 be able to normalize it. But then, it represents the
5157 whole range or the empty set, so make it
5158 +/- [ -, - ]. */
5159 if (tree_int_cst_equal (n_low, low)
5160 && tree_int_cst_equal (n_high, high))
5161 low = high = 0;
5162 else
5163 in_p = ! in_p;
5165 else
5166 low = n_low, high = n_high;
5168 *p_low = low;
5169 *p_high = high;
5170 *p_in_p = in_p;
5171 return arg0;
5173 CASE_CONVERT:
5174 case NON_LVALUE_EXPR:
5175 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5176 return NULL_TREE;
5178 if (! INTEGRAL_TYPE_P (arg0_type)
5179 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5180 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5181 return NULL_TREE;
5183 n_low = low, n_high = high;
5185 if (n_low != 0)
5186 n_low = fold_convert_loc (loc, arg0_type, n_low);
5188 if (n_high != 0)
5189 n_high = fold_convert_loc (loc, arg0_type, n_high);
5191 /* If we're converting arg0 from an unsigned type, to exp,
5192 a signed type, we will be doing the comparison as unsigned.
5193 The tests above have already verified that LOW and HIGH
5194 are both positive.
5196 So we have to ensure that we will handle large unsigned
5197 values the same way that the current signed bounds treat
5198 negative values. */
5200 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5202 tree high_positive;
5203 tree equiv_type;
5204 /* For fixed-point modes, we need to pass the saturating flag
5205 as the 2nd parameter. */
5206 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5207 equiv_type
5208 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5209 TYPE_SATURATING (arg0_type));
5210 else
5211 equiv_type
5212 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5214 /* A range without an upper bound is, naturally, unbounded.
5215 Since convert would have cropped a very large value, use
5216 the max value for the destination type. */
5217 high_positive
5218 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5219 : TYPE_MAX_VALUE (arg0_type);
5221 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5222 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5223 fold_convert_loc (loc, arg0_type,
5224 high_positive),
5225 build_int_cst (arg0_type, 1));
5227 /* If the low bound is specified, "and" the range with the
5228 range for which the original unsigned value will be
5229 positive. */
5230 if (low != 0)
5232 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5233 1, fold_convert_loc (loc, arg0_type,
5234 integer_zero_node),
5235 high_positive))
5236 return NULL_TREE;
5238 in_p = (n_in_p == in_p);
5240 else
5242 /* Otherwise, "or" the range with the range of the input
5243 that will be interpreted as negative. */
5244 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5245 1, fold_convert_loc (loc, arg0_type,
5246 integer_zero_node),
5247 high_positive))
5248 return NULL_TREE;
5250 in_p = (in_p != n_in_p);
5254 *p_low = n_low;
5255 *p_high = n_high;
5256 *p_in_p = in_p;
5257 return arg0;
5259 default:
5260 return NULL_TREE;
5264 /* Given EXP, a logical expression, set the range it is testing into
5265 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5266 actually being tested. *PLOW and *PHIGH will be made of the same
5267 type as the returned expression. If EXP is not a comparison, we
5268 will most likely not be returning a useful value and range. Set
5269 *STRICT_OVERFLOW_P to true if the return value is only valid
5270 because signed overflow is undefined; otherwise, do not change
5271 *STRICT_OVERFLOW_P. */
5273 tree
5274 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5275 bool *strict_overflow_p)
5277 enum tree_code code;
5278 tree arg0, arg1 = NULL_TREE;
5279 tree exp_type, nexp;
5280 int in_p;
5281 tree low, high;
5282 location_t loc = EXPR_LOCATION (exp);
5284 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5285 and see if we can refine the range. Some of the cases below may not
5286 happen, but it doesn't seem worth worrying about this. We "continue"
5287 the outer loop when we've changed something; otherwise we "break"
5288 the switch, which will "break" the while. */
5290 in_p = 0;
5291 low = high = build_int_cst (TREE_TYPE (exp), 0);
5293 while (1)
5295 code = TREE_CODE (exp);
5296 exp_type = TREE_TYPE (exp);
5297 arg0 = NULL_TREE;
5299 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5301 if (TREE_OPERAND_LENGTH (exp) > 0)
5302 arg0 = TREE_OPERAND (exp, 0);
5303 if (TREE_CODE_CLASS (code) == tcc_binary
5304 || TREE_CODE_CLASS (code) == tcc_comparison
5305 || (TREE_CODE_CLASS (code) == tcc_expression
5306 && TREE_OPERAND_LENGTH (exp) > 1))
5307 arg1 = TREE_OPERAND (exp, 1);
5309 if (arg0 == NULL_TREE)
5310 break;
5312 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5313 &high, &in_p, strict_overflow_p);
5314 if (nexp == NULL_TREE)
5315 break;
5316 exp = nexp;
5319 /* If EXP is a constant, we can evaluate whether this is true or false. */
5320 if (TREE_CODE (exp) == INTEGER_CST)
5322 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5323 exp, 0, low, 0))
5324 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5325 exp, 1, high, 1)));
5326 low = high = 0;
5327 exp = 0;
5330 *pin_p = in_p, *plow = low, *phigh = high;
5331 return exp;
5334 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5335 a bitwise check i.e. when
5336 LOW == 0xXX...X00...0
5337 HIGH == 0xXX...X11...1
5338 Return corresponding mask in MASK and stem in VALUE. */
5340 static bool
5341 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5342 tree *value)
5344 if (TREE_CODE (low) != INTEGER_CST
5345 || TREE_CODE (high) != INTEGER_CST)
5346 return false;
5348 unsigned prec = TYPE_PRECISION (type);
5349 wide_int lo = wi::to_wide (low, prec);
5350 wide_int hi = wi::to_wide (high, prec);
5352 wide_int end_mask = lo ^ hi;
5353 if ((end_mask & (end_mask + 1)) != 0
5354 || (lo & end_mask) != 0)
5355 return false;
5357 wide_int stem_mask = ~end_mask;
5358 wide_int stem = lo & stem_mask;
5359 if (stem != (hi & stem_mask))
5360 return false;
5362 *mask = wide_int_to_tree (type, stem_mask);
5363 *value = wide_int_to_tree (type, stem);
5365 return true;
5368 /* Helper routine for build_range_check and match.pd. Return the type to
5369 perform the check or NULL if it shouldn't be optimized. */
5371 tree
5372 range_check_type (tree etype)
5374 /* First make sure that arithmetics in this type is valid, then make sure
5375 that it wraps around. */
5376 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5377 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5379 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5381 tree utype, minv, maxv;
5383 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5384 for the type in question, as we rely on this here. */
5385 utype = unsigned_type_for (etype);
5386 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5387 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5388 build_int_cst (TREE_TYPE (maxv), 1), 1);
5389 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5391 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5392 minv, 1, maxv, 1)))
5393 etype = utype;
5394 else
5395 return NULL_TREE;
5397 else if (POINTER_TYPE_P (etype) || TREE_CODE (etype) == OFFSET_TYPE)
5398 etype = unsigned_type_for (etype);
5399 return etype;
5402 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5403 type, TYPE, return an expression to test if EXP is in (or out of, depending
5404 on IN_P) the range. Return 0 if the test couldn't be created. */
5406 tree
5407 build_range_check (location_t loc, tree type, tree exp, int in_p,
5408 tree low, tree high)
5410 tree etype = TREE_TYPE (exp), mask, value;
5412 /* Disable this optimization for function pointer expressions
5413 on targets that require function pointer canonicalization. */
5414 if (targetm.have_canonicalize_funcptr_for_compare ()
5415 && POINTER_TYPE_P (etype)
5416 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5417 return NULL_TREE;
5419 if (! in_p)
5421 value = build_range_check (loc, type, exp, 1, low, high);
5422 if (value != 0)
5423 return invert_truthvalue_loc (loc, value);
5425 return 0;
5428 if (low == 0 && high == 0)
5429 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5431 if (low == 0)
5432 return fold_build2_loc (loc, LE_EXPR, type, exp,
5433 fold_convert_loc (loc, etype, high));
5435 if (high == 0)
5436 return fold_build2_loc (loc, GE_EXPR, type, exp,
5437 fold_convert_loc (loc, etype, low));
5439 if (operand_equal_p (low, high, 0))
5440 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5441 fold_convert_loc (loc, etype, low));
5443 if (TREE_CODE (exp) == BIT_AND_EXPR
5444 && maskable_range_p (low, high, etype, &mask, &value))
5445 return fold_build2_loc (loc, EQ_EXPR, type,
5446 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5447 exp, mask),
5448 value);
5450 if (integer_zerop (low))
5452 if (! TYPE_UNSIGNED (etype))
5454 etype = unsigned_type_for (etype);
5455 high = fold_convert_loc (loc, etype, high);
5456 exp = fold_convert_loc (loc, etype, exp);
5458 return build_range_check (loc, type, exp, 1, 0, high);
5461 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5462 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5464 int prec = TYPE_PRECISION (etype);
5466 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5468 if (TYPE_UNSIGNED (etype))
5470 tree signed_etype = signed_type_for (etype);
5471 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5472 etype
5473 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5474 else
5475 etype = signed_etype;
5476 exp = fold_convert_loc (loc, etype, exp);
5478 return fold_build2_loc (loc, GT_EXPR, type, exp,
5479 build_int_cst (etype, 0));
5483 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5484 This requires wrap-around arithmetics for the type of the expression. */
5485 etype = range_check_type (etype);
5486 if (etype == NULL_TREE)
5487 return NULL_TREE;
5489 high = fold_convert_loc (loc, etype, high);
5490 low = fold_convert_loc (loc, etype, low);
5491 exp = fold_convert_loc (loc, etype, exp);
5493 value = const_binop (MINUS_EXPR, high, low);
5495 if (value != 0 && !TREE_OVERFLOW (value))
5496 return build_range_check (loc, type,
5497 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5498 1, build_int_cst (etype, 0), value);
5500 return 0;
5503 /* Return the predecessor of VAL in its type, handling the infinite case. */
5505 static tree
5506 range_predecessor (tree val)
5508 tree type = TREE_TYPE (val);
5510 if (INTEGRAL_TYPE_P (type)
5511 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5512 return 0;
5513 else
5514 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5515 build_int_cst (TREE_TYPE (val), 1), 0);
5518 /* Return the successor of VAL in its type, handling the infinite case. */
5520 static tree
5521 range_successor (tree val)
5523 tree type = TREE_TYPE (val);
5525 if (INTEGRAL_TYPE_P (type)
5526 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5527 return 0;
5528 else
5529 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5530 build_int_cst (TREE_TYPE (val), 1), 0);
5533 /* Given two ranges, see if we can merge them into one. Return 1 if we
5534 can, 0 if we can't. Set the output range into the specified parameters. */
5536 bool
5537 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5538 tree high0, int in1_p, tree low1, tree high1)
5540 int no_overlap;
5541 int subset;
5542 int temp;
5543 tree tem;
5544 int in_p;
5545 tree low, high;
5546 int lowequal = ((low0 == 0 && low1 == 0)
5547 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5548 low0, 0, low1, 0)));
5549 int highequal = ((high0 == 0 && high1 == 0)
5550 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5551 high0, 1, high1, 1)));
5553 /* Make range 0 be the range that starts first, or ends last if they
5554 start at the same value. Swap them if it isn't. */
5555 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5556 low0, 0, low1, 0))
5557 || (lowequal
5558 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5559 high1, 1, high0, 1))))
5561 temp = in0_p, in0_p = in1_p, in1_p = temp;
5562 tem = low0, low0 = low1, low1 = tem;
5563 tem = high0, high0 = high1, high1 = tem;
5566 /* If the second range is != high1 where high1 is the type maximum of
5567 the type, try first merging with < high1 range. */
5568 if (low1
5569 && high1
5570 && TREE_CODE (low1) == INTEGER_CST
5571 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5572 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5573 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5574 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5575 && operand_equal_p (low1, high1, 0))
5577 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5578 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5579 !in1_p, NULL_TREE, range_predecessor (low1)))
5580 return true;
5581 /* Similarly for the second range != low1 where low1 is the type minimum
5582 of the type, try first merging with > low1 range. */
5583 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5584 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5585 !in1_p, range_successor (low1), NULL_TREE))
5586 return true;
5589 /* Now flag two cases, whether the ranges are disjoint or whether the
5590 second range is totally subsumed in the first. Note that the tests
5591 below are simplified by the ones above. */
5592 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5593 high0, 1, low1, 0));
5594 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5595 high1, 1, high0, 1));
5597 /* We now have four cases, depending on whether we are including or
5598 excluding the two ranges. */
5599 if (in0_p && in1_p)
5601 /* If they don't overlap, the result is false. If the second range
5602 is a subset it is the result. Otherwise, the range is from the start
5603 of the second to the end of the first. */
5604 if (no_overlap)
5605 in_p = 0, low = high = 0;
5606 else if (subset)
5607 in_p = 1, low = low1, high = high1;
5608 else
5609 in_p = 1, low = low1, high = high0;
5612 else if (in0_p && ! in1_p)
5614 /* If they don't overlap, the result is the first range. If they are
5615 equal, the result is false. If the second range is a subset of the
5616 first, and the ranges begin at the same place, we go from just after
5617 the end of the second range to the end of the first. If the second
5618 range is not a subset of the first, or if it is a subset and both
5619 ranges end at the same place, the range starts at the start of the
5620 first range and ends just before the second range.
5621 Otherwise, we can't describe this as a single range. */
5622 if (no_overlap)
5623 in_p = 1, low = low0, high = high0;
5624 else if (lowequal && highequal)
5625 in_p = 0, low = high = 0;
5626 else if (subset && lowequal)
5628 low = range_successor (high1);
5629 high = high0;
5630 in_p = 1;
5631 if (low == 0)
5633 /* We are in the weird situation where high0 > high1 but
5634 high1 has no successor. Punt. */
5635 return 0;
5638 else if (! subset || highequal)
5640 low = low0;
5641 high = range_predecessor (low1);
5642 in_p = 1;
5643 if (high == 0)
5645 /* low0 < low1 but low1 has no predecessor. Punt. */
5646 return 0;
5649 else
5650 return 0;
5653 else if (! in0_p && in1_p)
5655 /* If they don't overlap, the result is the second range. If the second
5656 is a subset of the first, the result is false. Otherwise,
5657 the range starts just after the first range and ends at the
5658 end of the second. */
5659 if (no_overlap)
5660 in_p = 1, low = low1, high = high1;
5661 else if (subset || highequal)
5662 in_p = 0, low = high = 0;
5663 else
5665 low = range_successor (high0);
5666 high = high1;
5667 in_p = 1;
5668 if (low == 0)
5670 /* high1 > high0 but high0 has no successor. Punt. */
5671 return 0;
5676 else
5678 /* The case where we are excluding both ranges. Here the complex case
5679 is if they don't overlap. In that case, the only time we have a
5680 range is if they are adjacent. If the second is a subset of the
5681 first, the result is the first. Otherwise, the range to exclude
5682 starts at the beginning of the first range and ends at the end of the
5683 second. */
5684 if (no_overlap)
5686 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5687 range_successor (high0),
5688 1, low1, 0)))
5689 in_p = 0, low = low0, high = high1;
5690 else
5692 /* Canonicalize - [min, x] into - [-, x]. */
5693 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5694 switch (TREE_CODE (TREE_TYPE (low0)))
5696 case ENUMERAL_TYPE:
5697 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5698 GET_MODE_BITSIZE
5699 (TYPE_MODE (TREE_TYPE (low0)))))
5700 break;
5701 /* FALLTHROUGH */
5702 case INTEGER_TYPE:
5703 if (tree_int_cst_equal (low0,
5704 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5705 low0 = 0;
5706 break;
5707 case POINTER_TYPE:
5708 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5709 && integer_zerop (low0))
5710 low0 = 0;
5711 break;
5712 default:
5713 break;
5716 /* Canonicalize - [x, max] into - [x, -]. */
5717 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5718 switch (TREE_CODE (TREE_TYPE (high1)))
5720 case ENUMERAL_TYPE:
5721 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5722 GET_MODE_BITSIZE
5723 (TYPE_MODE (TREE_TYPE (high1)))))
5724 break;
5725 /* FALLTHROUGH */
5726 case INTEGER_TYPE:
5727 if (tree_int_cst_equal (high1,
5728 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5729 high1 = 0;
5730 break;
5731 case POINTER_TYPE:
5732 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5733 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5734 high1, 1,
5735 build_int_cst (TREE_TYPE (high1), 1),
5736 1)))
5737 high1 = 0;
5738 break;
5739 default:
5740 break;
5743 /* The ranges might be also adjacent between the maximum and
5744 minimum values of the given type. For
5745 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5746 return + [x + 1, y - 1]. */
5747 if (low0 == 0 && high1 == 0)
5749 low = range_successor (high0);
5750 high = range_predecessor (low1);
5751 if (low == 0 || high == 0)
5752 return 0;
5754 in_p = 1;
5756 else
5757 return 0;
5760 else if (subset)
5761 in_p = 0, low = low0, high = high0;
5762 else
5763 in_p = 0, low = low0, high = high1;
5766 *pin_p = in_p, *plow = low, *phigh = high;
5767 return 1;
5771 /* Subroutine of fold, looking inside expressions of the form
5772 A op B ? A : C, where (ARG00, COMP_CODE, ARG01), ARG1 and ARG2
5773 are the three operands of the COND_EXPR. This function is
5774 being used also to optimize A op B ? C : A, by reversing the
5775 comparison first.
5777 Return a folded expression whose code is not a COND_EXPR
5778 anymore, or NULL_TREE if no folding opportunity is found. */
5780 static tree
5781 fold_cond_expr_with_comparison (location_t loc, tree type,
5782 enum tree_code comp_code,
5783 tree arg00, tree arg01, tree arg1, tree arg2)
5785 tree arg1_type = TREE_TYPE (arg1);
5786 tree tem;
5788 STRIP_NOPS (arg1);
5789 STRIP_NOPS (arg2);
5791 /* If we have A op 0 ? A : -A, consider applying the following
5792 transformations:
5794 A == 0? A : -A same as -A
5795 A != 0? A : -A same as A
5796 A >= 0? A : -A same as abs (A)
5797 A > 0? A : -A same as abs (A)
5798 A <= 0? A : -A same as -abs (A)
5799 A < 0? A : -A same as -abs (A)
5801 None of these transformations work for modes with signed
5802 zeros. If A is +/-0, the first two transformations will
5803 change the sign of the result (from +0 to -0, or vice
5804 versa). The last four will fix the sign of the result,
5805 even though the original expressions could be positive or
5806 negative, depending on the sign of A.
5808 Note that all these transformations are correct if A is
5809 NaN, since the two alternatives (A and -A) are also NaNs. */
5810 if (!HONOR_SIGNED_ZEROS (type)
5811 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5812 ? real_zerop (arg01)
5813 : integer_zerop (arg01))
5814 && ((TREE_CODE (arg2) == NEGATE_EXPR
5815 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5816 /* In the case that A is of the form X-Y, '-A' (arg2) may
5817 have already been folded to Y-X, check for that. */
5818 || (TREE_CODE (arg1) == MINUS_EXPR
5819 && TREE_CODE (arg2) == MINUS_EXPR
5820 && operand_equal_p (TREE_OPERAND (arg1, 0),
5821 TREE_OPERAND (arg2, 1), 0)
5822 && operand_equal_p (TREE_OPERAND (arg1, 1),
5823 TREE_OPERAND (arg2, 0), 0))))
5824 switch (comp_code)
5826 case EQ_EXPR:
5827 case UNEQ_EXPR:
5828 tem = fold_convert_loc (loc, arg1_type, arg1);
5829 return fold_convert_loc (loc, type, negate_expr (tem));
5830 case NE_EXPR:
5831 case LTGT_EXPR:
5832 return fold_convert_loc (loc, type, arg1);
5833 case UNGE_EXPR:
5834 case UNGT_EXPR:
5835 if (flag_trapping_math)
5836 break;
5837 /* Fall through. */
5838 case GE_EXPR:
5839 case GT_EXPR:
5840 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5841 break;
5842 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5843 return fold_convert_loc (loc, type, tem);
5844 case UNLE_EXPR:
5845 case UNLT_EXPR:
5846 if (flag_trapping_math)
5847 break;
5848 /* FALLTHRU */
5849 case LE_EXPR:
5850 case LT_EXPR:
5851 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5852 break;
5853 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5854 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
5856 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
5857 is not, invokes UB both in abs and in the negation of it.
5858 So, use ABSU_EXPR instead. */
5859 tree utype = unsigned_type_for (TREE_TYPE (arg1));
5860 tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
5861 tem = negate_expr (tem);
5862 return fold_convert_loc (loc, type, tem);
5864 else
5866 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5867 return negate_expr (fold_convert_loc (loc, type, tem));
5869 default:
5870 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5871 break;
5874 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5875 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5876 both transformations are correct when A is NaN: A != 0
5877 is then true, and A == 0 is false. */
5879 if (!HONOR_SIGNED_ZEROS (type)
5880 && integer_zerop (arg01) && integer_zerop (arg2))
5882 if (comp_code == NE_EXPR)
5883 return fold_convert_loc (loc, type, arg1);
5884 else if (comp_code == EQ_EXPR)
5885 return build_zero_cst (type);
5888 /* Try some transformations of A op B ? A : B.
5890 A == B? A : B same as B
5891 A != B? A : B same as A
5892 A >= B? A : B same as max (A, B)
5893 A > B? A : B same as max (B, A)
5894 A <= B? A : B same as min (A, B)
5895 A < B? A : B same as min (B, A)
5897 As above, these transformations don't work in the presence
5898 of signed zeros. For example, if A and B are zeros of
5899 opposite sign, the first two transformations will change
5900 the sign of the result. In the last four, the original
5901 expressions give different results for (A=+0, B=-0) and
5902 (A=-0, B=+0), but the transformed expressions do not.
5904 The first two transformations are correct if either A or B
5905 is a NaN. In the first transformation, the condition will
5906 be false, and B will indeed be chosen. In the case of the
5907 second transformation, the condition A != B will be true,
5908 and A will be chosen.
5910 The conversions to max() and min() are not correct if B is
5911 a number and A is not. The conditions in the original
5912 expressions will be false, so all four give B. The min()
5913 and max() versions would give a NaN instead. */
5914 if (!HONOR_SIGNED_ZEROS (type)
5915 && operand_equal_for_comparison_p (arg01, arg2)
5916 /* Avoid these transformations if the COND_EXPR may be used
5917 as an lvalue in the C++ front-end. PR c++/19199. */
5918 && (in_gimple_form
5919 || VECTOR_TYPE_P (type)
5920 || (! lang_GNU_CXX ()
5921 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5922 || ! maybe_lvalue_p (arg1)
5923 || ! maybe_lvalue_p (arg2)))
5925 tree comp_op0 = arg00;
5926 tree comp_op1 = arg01;
5927 tree comp_type = TREE_TYPE (comp_op0);
5929 switch (comp_code)
5931 case EQ_EXPR:
5932 return fold_convert_loc (loc, type, arg2);
5933 case NE_EXPR:
5934 return fold_convert_loc (loc, type, arg1);
5935 case LE_EXPR:
5936 case LT_EXPR:
5937 case UNLE_EXPR:
5938 case UNLT_EXPR:
5939 /* In C++ a ?: expression can be an lvalue, so put the
5940 operand which will be used if they are equal first
5941 so that we can convert this back to the
5942 corresponding COND_EXPR. */
5943 if (!HONOR_NANS (arg1))
5945 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5946 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5947 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5948 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5949 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5950 comp_op1, comp_op0);
5951 return fold_convert_loc (loc, type, tem);
5953 break;
5954 case GE_EXPR:
5955 case GT_EXPR:
5956 case UNGE_EXPR:
5957 case UNGT_EXPR:
5958 if (!HONOR_NANS (arg1))
5960 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5961 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5962 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5963 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5964 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5965 comp_op1, comp_op0);
5966 return fold_convert_loc (loc, type, tem);
5968 break;
5969 case UNEQ_EXPR:
5970 if (!HONOR_NANS (arg1))
5971 return fold_convert_loc (loc, type, arg2);
5972 break;
5973 case LTGT_EXPR:
5974 if (!HONOR_NANS (arg1))
5975 return fold_convert_loc (loc, type, arg1);
5976 break;
5977 default:
5978 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5979 break;
5983 return NULL_TREE;
5988 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5989 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5990 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5991 false) >= 2)
5992 #endif
5994 /* EXP is some logical combination of boolean tests. See if we can
5995 merge it into some range test. Return the new tree if so. */
5997 static tree
5998 fold_range_test (location_t loc, enum tree_code code, tree type,
5999 tree op0, tree op1)
6001 int or_op = (code == TRUTH_ORIF_EXPR
6002 || code == TRUTH_OR_EXPR);
6003 int in0_p, in1_p, in_p;
6004 tree low0, low1, low, high0, high1, high;
6005 bool strict_overflow_p = false;
6006 tree tem, lhs, rhs;
6007 const char * const warnmsg = G_("assuming signed overflow does not occur "
6008 "when simplifying range test");
6010 if (!INTEGRAL_TYPE_P (type))
6011 return 0;
6013 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
6014 /* If op0 is known true or false and this is a short-circuiting
6015 operation we must not merge with op1 since that makes side-effects
6016 unconditional. So special-case this. */
6017 if (!lhs
6018 && ((code == TRUTH_ORIF_EXPR && in0_p)
6019 || (code == TRUTH_ANDIF_EXPR && !in0_p)))
6020 return op0;
6021 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
6023 /* If this is an OR operation, invert both sides; we will invert
6024 again at the end. */
6025 if (or_op)
6026 in0_p = ! in0_p, in1_p = ! in1_p;
6028 /* If both expressions are the same, if we can merge the ranges, and we
6029 can build the range test, return it or it inverted. If one of the
6030 ranges is always true or always false, consider it to be the same
6031 expression as the other. */
6032 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
6033 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
6034 in1_p, low1, high1)
6035 && (tem = (build_range_check (loc, type,
6036 lhs != 0 ? lhs
6037 : rhs != 0 ? rhs : integer_zero_node,
6038 in_p, low, high))) != 0)
6040 if (strict_overflow_p)
6041 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
6042 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6045 /* On machines where the branch cost is expensive, if this is a
6046 short-circuited branch and the underlying object on both sides
6047 is the same, make a non-short-circuit operation. */
6048 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6049 if (param_logical_op_non_short_circuit != -1)
6050 logical_op_non_short_circuit
6051 = param_logical_op_non_short_circuit;
6052 if (logical_op_non_short_circuit
6053 && !sanitize_coverage_p ()
6054 && lhs != 0 && rhs != 0
6055 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6056 && operand_equal_p (lhs, rhs, 0))
6058 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6059 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6060 which cases we can't do this. */
6061 if (simple_operand_p (lhs))
6062 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6063 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6064 type, op0, op1);
6066 else if (!lang_hooks.decls.global_bindings_p ()
6067 && !CONTAINS_PLACEHOLDER_P (lhs))
6069 tree common = save_expr (lhs);
6071 if ((lhs = build_range_check (loc, type, common,
6072 or_op ? ! in0_p : in0_p,
6073 low0, high0)) != 0
6074 && (rhs = build_range_check (loc, type, common,
6075 or_op ? ! in1_p : in1_p,
6076 low1, high1)) != 0)
6078 if (strict_overflow_p)
6079 fold_overflow_warning (warnmsg,
6080 WARN_STRICT_OVERFLOW_COMPARISON);
6081 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6082 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6083 type, lhs, rhs);
6088 return 0;
6091 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6092 bit value. Arrange things so the extra bits will be set to zero if and
6093 only if C is signed-extended to its full width. If MASK is nonzero,
6094 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6096 static tree
6097 unextend (tree c, int p, int unsignedp, tree mask)
6099 tree type = TREE_TYPE (c);
6100 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6101 tree temp;
6103 if (p == modesize || unsignedp)
6104 return c;
6106 /* We work by getting just the sign bit into the low-order bit, then
6107 into the high-order bit, then sign-extend. We then XOR that value
6108 with C. */
6109 temp = build_int_cst (TREE_TYPE (c),
6110 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6112 /* We must use a signed type in order to get an arithmetic right shift.
6113 However, we must also avoid introducing accidental overflows, so that
6114 a subsequent call to integer_zerop will work. Hence we must
6115 do the type conversion here. At this point, the constant is either
6116 zero or one, and the conversion to a signed type can never overflow.
6117 We could get an overflow if this conversion is done anywhere else. */
6118 if (TYPE_UNSIGNED (type))
6119 temp = fold_convert (signed_type_for (type), temp);
6121 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6122 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6123 if (mask != 0)
6124 temp = const_binop (BIT_AND_EXPR, temp,
6125 fold_convert (TREE_TYPE (c), mask));
6126 /* If necessary, convert the type back to match the type of C. */
6127 if (TYPE_UNSIGNED (type))
6128 temp = fold_convert (type, temp);
6130 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6133 /* For an expression that has the form
6134 (A && B) || ~B
6136 (A || B) && ~B,
6137 we can drop one of the inner expressions and simplify to
6138 A || ~B
6140 A && ~B
6141 LOC is the location of the resulting expression. OP is the inner
6142 logical operation; the left-hand side in the examples above, while CMPOP
6143 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6144 removing a condition that guards another, as in
6145 (A != NULL && A->...) || A == NULL
6146 which we must not transform. If RHS_ONLY is true, only eliminate the
6147 right-most operand of the inner logical operation. */
6149 static tree
6150 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6151 bool rhs_only)
6153 tree type = TREE_TYPE (cmpop);
6154 enum tree_code code = TREE_CODE (cmpop);
6155 enum tree_code truthop_code = TREE_CODE (op);
6156 tree lhs = TREE_OPERAND (op, 0);
6157 tree rhs = TREE_OPERAND (op, 1);
6158 tree orig_lhs = lhs, orig_rhs = rhs;
6159 enum tree_code rhs_code = TREE_CODE (rhs);
6160 enum tree_code lhs_code = TREE_CODE (lhs);
6161 enum tree_code inv_code;
6163 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6164 return NULL_TREE;
6166 if (TREE_CODE_CLASS (code) != tcc_comparison)
6167 return NULL_TREE;
6169 if (rhs_code == truthop_code)
6171 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6172 if (newrhs != NULL_TREE)
6174 rhs = newrhs;
6175 rhs_code = TREE_CODE (rhs);
6178 if (lhs_code == truthop_code && !rhs_only)
6180 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6181 if (newlhs != NULL_TREE)
6183 lhs = newlhs;
6184 lhs_code = TREE_CODE (lhs);
6188 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6189 if (inv_code == rhs_code
6190 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6191 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6192 return lhs;
6193 if (!rhs_only && inv_code == lhs_code
6194 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6195 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6196 return rhs;
6197 if (rhs != orig_rhs || lhs != orig_lhs)
6198 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6199 lhs, rhs);
6200 return NULL_TREE;
6203 /* Find ways of folding logical expressions of LHS and RHS:
6204 Try to merge two comparisons to the same innermost item.
6205 Look for range tests like "ch >= '0' && ch <= '9'".
6206 Look for combinations of simple terms on machines with expensive branches
6207 and evaluate the RHS unconditionally.
6209 For example, if we have p->a == 2 && p->b == 4 and we can make an
6210 object large enough to span both A and B, we can do this with a comparison
6211 against the object ANDed with the a mask.
6213 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6214 operations to do this with one comparison.
6216 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6217 function and the one above.
6219 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6220 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6222 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6223 two operands.
6225 We return the simplified tree or 0 if no optimization is possible. */
6227 static tree
6228 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6229 tree lhs, tree rhs)
6231 /* If this is the "or" of two comparisons, we can do something if
6232 the comparisons are NE_EXPR. If this is the "and", we can do something
6233 if the comparisons are EQ_EXPR. I.e.,
6234 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6236 WANTED_CODE is this operation code. For single bit fields, we can
6237 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6238 comparison for one-bit fields. */
6240 enum tree_code wanted_code;
6241 enum tree_code lcode, rcode;
6242 tree ll_arg, lr_arg, rl_arg, rr_arg;
6243 tree ll_inner, lr_inner, rl_inner, rr_inner;
6244 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6245 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6246 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6247 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6248 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6249 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6250 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6251 scalar_int_mode lnmode, rnmode;
6252 tree ll_mask, lr_mask, rl_mask, rr_mask;
6253 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6254 tree l_const, r_const;
6255 tree lntype, rntype, result;
6256 HOST_WIDE_INT first_bit, end_bit;
6257 int volatilep;
6259 /* Start by getting the comparison codes. Fail if anything is volatile.
6260 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6261 it were surrounded with a NE_EXPR. */
6263 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6264 return 0;
6266 lcode = TREE_CODE (lhs);
6267 rcode = TREE_CODE (rhs);
6269 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6271 lhs = build2 (NE_EXPR, truth_type, lhs,
6272 build_int_cst (TREE_TYPE (lhs), 0));
6273 lcode = NE_EXPR;
6276 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6278 rhs = build2 (NE_EXPR, truth_type, rhs,
6279 build_int_cst (TREE_TYPE (rhs), 0));
6280 rcode = NE_EXPR;
6283 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6284 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6285 return 0;
6287 ll_arg = TREE_OPERAND (lhs, 0);
6288 lr_arg = TREE_OPERAND (lhs, 1);
6289 rl_arg = TREE_OPERAND (rhs, 0);
6290 rr_arg = TREE_OPERAND (rhs, 1);
6292 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6293 if (simple_operand_p (ll_arg)
6294 && simple_operand_p (lr_arg))
6296 if (operand_equal_p (ll_arg, rl_arg, 0)
6297 && operand_equal_p (lr_arg, rr_arg, 0))
6299 result = combine_comparisons (loc, code, lcode, rcode,
6300 truth_type, ll_arg, lr_arg);
6301 if (result)
6302 return result;
6304 else if (operand_equal_p (ll_arg, rr_arg, 0)
6305 && operand_equal_p (lr_arg, rl_arg, 0))
6307 result = combine_comparisons (loc, code, lcode,
6308 swap_tree_comparison (rcode),
6309 truth_type, ll_arg, lr_arg);
6310 if (result)
6311 return result;
6315 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6316 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6318 /* If the RHS can be evaluated unconditionally and its operands are
6319 simple, it wins to evaluate the RHS unconditionally on machines
6320 with expensive branches. In this case, this isn't a comparison
6321 that can be merged. */
6323 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6324 false) >= 2
6325 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6326 && simple_operand_p (rl_arg)
6327 && simple_operand_p (rr_arg))
6329 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6330 if (code == TRUTH_OR_EXPR
6331 && lcode == NE_EXPR && integer_zerop (lr_arg)
6332 && rcode == NE_EXPR && integer_zerop (rr_arg)
6333 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6334 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6335 return build2_loc (loc, NE_EXPR, truth_type,
6336 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6337 ll_arg, rl_arg),
6338 build_int_cst (TREE_TYPE (ll_arg), 0));
6340 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6341 if (code == TRUTH_AND_EXPR
6342 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6343 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6344 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6345 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6346 return build2_loc (loc, EQ_EXPR, truth_type,
6347 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6348 ll_arg, rl_arg),
6349 build_int_cst (TREE_TYPE (ll_arg), 0));
6352 /* See if the comparisons can be merged. Then get all the parameters for
6353 each side. */
6355 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6356 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6357 return 0;
6359 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6360 volatilep = 0;
6361 ll_inner = decode_field_reference (loc, &ll_arg,
6362 &ll_bitsize, &ll_bitpos, &ll_mode,
6363 &ll_unsignedp, &ll_reversep, &volatilep,
6364 &ll_mask, &ll_and_mask);
6365 lr_inner = decode_field_reference (loc, &lr_arg,
6366 &lr_bitsize, &lr_bitpos, &lr_mode,
6367 &lr_unsignedp, &lr_reversep, &volatilep,
6368 &lr_mask, &lr_and_mask);
6369 rl_inner = decode_field_reference (loc, &rl_arg,
6370 &rl_bitsize, &rl_bitpos, &rl_mode,
6371 &rl_unsignedp, &rl_reversep, &volatilep,
6372 &rl_mask, &rl_and_mask);
6373 rr_inner = decode_field_reference (loc, &rr_arg,
6374 &rr_bitsize, &rr_bitpos, &rr_mode,
6375 &rr_unsignedp, &rr_reversep, &volatilep,
6376 &rr_mask, &rr_and_mask);
6378 /* It must be true that the inner operation on the lhs of each
6379 comparison must be the same if we are to be able to do anything.
6380 Then see if we have constants. If not, the same must be true for
6381 the rhs's. */
6382 if (volatilep
6383 || ll_reversep != rl_reversep
6384 || ll_inner == 0 || rl_inner == 0
6385 || ! operand_equal_p (ll_inner, rl_inner, 0))
6386 return 0;
6388 if (TREE_CODE (lr_arg) == INTEGER_CST
6389 && TREE_CODE (rr_arg) == INTEGER_CST)
6391 l_const = lr_arg, r_const = rr_arg;
6392 lr_reversep = ll_reversep;
6394 else if (lr_reversep != rr_reversep
6395 || lr_inner == 0 || rr_inner == 0
6396 || ! operand_equal_p (lr_inner, rr_inner, 0))
6397 return 0;
6398 else
6399 l_const = r_const = 0;
6401 /* If either comparison code is not correct for our logical operation,
6402 fail. However, we can convert a one-bit comparison against zero into
6403 the opposite comparison against that bit being set in the field. */
6405 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6406 if (lcode != wanted_code)
6408 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6410 /* Make the left operand unsigned, since we are only interested
6411 in the value of one bit. Otherwise we are doing the wrong
6412 thing below. */
6413 ll_unsignedp = 1;
6414 l_const = ll_mask;
6416 else
6417 return 0;
6420 /* This is analogous to the code for l_const above. */
6421 if (rcode != wanted_code)
6423 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6425 rl_unsignedp = 1;
6426 r_const = rl_mask;
6428 else
6429 return 0;
6432 /* See if we can find a mode that contains both fields being compared on
6433 the left. If we can't, fail. Otherwise, update all constants and masks
6434 to be relative to a field of that size. */
6435 first_bit = MIN (ll_bitpos, rl_bitpos);
6436 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6437 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6438 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6439 volatilep, &lnmode))
6440 return 0;
6442 lnbitsize = GET_MODE_BITSIZE (lnmode);
6443 lnbitpos = first_bit & ~ (lnbitsize - 1);
6444 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6445 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6447 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6449 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6450 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6453 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6454 size_int (xll_bitpos));
6455 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6456 size_int (xrl_bitpos));
6458 if (l_const)
6460 l_const = fold_convert_loc (loc, lntype, l_const);
6461 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6462 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6463 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6464 fold_build1_loc (loc, BIT_NOT_EXPR,
6465 lntype, ll_mask))))
6467 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6469 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6472 if (r_const)
6474 r_const = fold_convert_loc (loc, lntype, r_const);
6475 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6476 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6477 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6478 fold_build1_loc (loc, BIT_NOT_EXPR,
6479 lntype, rl_mask))))
6481 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6483 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6487 /* If the right sides are not constant, do the same for it. Also,
6488 disallow this optimization if a size, signedness or storage order
6489 mismatch occurs between the left and right sides. */
6490 if (l_const == 0)
6492 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6493 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6494 || ll_reversep != lr_reversep
6495 /* Make sure the two fields on the right
6496 correspond to the left without being swapped. */
6497 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6498 return 0;
6500 first_bit = MIN (lr_bitpos, rr_bitpos);
6501 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6502 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6503 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6504 volatilep, &rnmode))
6505 return 0;
6507 rnbitsize = GET_MODE_BITSIZE (rnmode);
6508 rnbitpos = first_bit & ~ (rnbitsize - 1);
6509 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6510 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6512 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6514 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6515 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6518 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6519 rntype, lr_mask),
6520 size_int (xlr_bitpos));
6521 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6522 rntype, rr_mask),
6523 size_int (xrr_bitpos));
6525 /* Make a mask that corresponds to both fields being compared.
6526 Do this for both items being compared. If the operands are the
6527 same size and the bits being compared are in the same position
6528 then we can do this by masking both and comparing the masked
6529 results. */
6530 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6531 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6532 if (lnbitsize == rnbitsize
6533 && xll_bitpos == xlr_bitpos
6534 && lnbitpos >= 0
6535 && rnbitpos >= 0)
6537 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6538 lntype, lnbitsize, lnbitpos,
6539 ll_unsignedp || rl_unsignedp, ll_reversep);
6540 if (! all_ones_mask_p (ll_mask, lnbitsize))
6541 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6543 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6544 rntype, rnbitsize, rnbitpos,
6545 lr_unsignedp || rr_unsignedp, lr_reversep);
6546 if (! all_ones_mask_p (lr_mask, rnbitsize))
6547 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6549 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6552 /* There is still another way we can do something: If both pairs of
6553 fields being compared are adjacent, we may be able to make a wider
6554 field containing them both.
6556 Note that we still must mask the lhs/rhs expressions. Furthermore,
6557 the mask must be shifted to account for the shift done by
6558 make_bit_field_ref. */
6559 if (((ll_bitsize + ll_bitpos == rl_bitpos
6560 && lr_bitsize + lr_bitpos == rr_bitpos)
6561 || (ll_bitpos == rl_bitpos + rl_bitsize
6562 && lr_bitpos == rr_bitpos + rr_bitsize))
6563 && ll_bitpos >= 0
6564 && rl_bitpos >= 0
6565 && lr_bitpos >= 0
6566 && rr_bitpos >= 0)
6568 tree type;
6570 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6571 ll_bitsize + rl_bitsize,
6572 MIN (ll_bitpos, rl_bitpos),
6573 ll_unsignedp, ll_reversep);
6574 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6575 lr_bitsize + rr_bitsize,
6576 MIN (lr_bitpos, rr_bitpos),
6577 lr_unsignedp, lr_reversep);
6579 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6580 size_int (MIN (xll_bitpos, xrl_bitpos)));
6581 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6582 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6584 /* Convert to the smaller type before masking out unwanted bits. */
6585 type = lntype;
6586 if (lntype != rntype)
6588 if (lnbitsize > rnbitsize)
6590 lhs = fold_convert_loc (loc, rntype, lhs);
6591 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6592 type = rntype;
6594 else if (lnbitsize < rnbitsize)
6596 rhs = fold_convert_loc (loc, lntype, rhs);
6597 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6598 type = lntype;
6602 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6603 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6605 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6606 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6608 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6611 return 0;
6614 /* Handle the case of comparisons with constants. If there is something in
6615 common between the masks, those bits of the constants must be the same.
6616 If not, the condition is always false. Test for this to avoid generating
6617 incorrect code below. */
6618 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6619 if (! integer_zerop (result)
6620 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6621 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6623 if (wanted_code == NE_EXPR)
6625 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6626 return constant_boolean_node (true, truth_type);
6628 else
6630 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6631 return constant_boolean_node (false, truth_type);
6635 if (lnbitpos < 0)
6636 return 0;
6638 /* Construct the expression we will return. First get the component
6639 reference we will make. Unless the mask is all ones the width of
6640 that field, perform the mask operation. Then compare with the
6641 merged constant. */
6642 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6643 lntype, lnbitsize, lnbitpos,
6644 ll_unsignedp || rl_unsignedp, ll_reversep);
6646 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6647 if (! all_ones_mask_p (ll_mask, lnbitsize))
6648 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6650 return build2_loc (loc, wanted_code, truth_type, result,
6651 const_binop (BIT_IOR_EXPR, l_const, r_const));
6654 /* T is an integer expression that is being multiplied, divided, or taken a
6655 modulus (CODE says which and what kind of divide or modulus) by a
6656 constant C. See if we can eliminate that operation by folding it with
6657 other operations already in T. WIDE_TYPE, if non-null, is a type that
6658 should be used for the computation if wider than our type.
6660 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6661 (X * 2) + (Y * 4). We must, however, be assured that either the original
6662 expression would not overflow or that overflow is undefined for the type
6663 in the language in question.
6665 If we return a non-null expression, it is an equivalent form of the
6666 original computation, but need not be in the original type.
6668 We set *STRICT_OVERFLOW_P to true if the return values depends on
6669 signed overflow being undefined. Otherwise we do not change
6670 *STRICT_OVERFLOW_P. */
6672 static tree
6673 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6674 bool *strict_overflow_p)
6676 /* To avoid exponential search depth, refuse to allow recursion past
6677 three levels. Beyond that (1) it's highly unlikely that we'll find
6678 something interesting and (2) we've probably processed it before
6679 when we built the inner expression. */
6681 static int depth;
6682 tree ret;
6684 if (depth > 3)
6685 return NULL;
6687 depth++;
6688 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6689 depth--;
6691 return ret;
6694 static tree
6695 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6696 bool *strict_overflow_p)
6698 tree type = TREE_TYPE (t);
6699 enum tree_code tcode = TREE_CODE (t);
6700 tree ctype = (wide_type != 0
6701 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6702 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6703 ? wide_type : type);
6704 tree t1, t2;
6705 int same_p = tcode == code;
6706 tree op0 = NULL_TREE, op1 = NULL_TREE;
6707 bool sub_strict_overflow_p;
6709 /* Don't deal with constants of zero here; they confuse the code below. */
6710 if (integer_zerop (c))
6711 return NULL_TREE;
6713 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6714 op0 = TREE_OPERAND (t, 0);
6716 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6717 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6719 /* Note that we need not handle conditional operations here since fold
6720 already handles those cases. So just do arithmetic here. */
6721 switch (tcode)
6723 case INTEGER_CST:
6724 /* For a constant, we can always simplify if we are a multiply
6725 or (for divide and modulus) if it is a multiple of our constant. */
6726 if (code == MULT_EXPR
6727 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6728 TYPE_SIGN (type)))
6730 tree tem = const_binop (code, fold_convert (ctype, t),
6731 fold_convert (ctype, c));
6732 /* If the multiplication overflowed, we lost information on it.
6733 See PR68142 and PR69845. */
6734 if (TREE_OVERFLOW (tem))
6735 return NULL_TREE;
6736 return tem;
6738 break;
6740 CASE_CONVERT: case NON_LVALUE_EXPR:
6741 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0)))
6742 break;
6743 /* If op0 is an expression ... */
6744 if ((COMPARISON_CLASS_P (op0)
6745 || UNARY_CLASS_P (op0)
6746 || BINARY_CLASS_P (op0)
6747 || VL_EXP_CLASS_P (op0)
6748 || EXPRESSION_CLASS_P (op0))
6749 /* ... and has wrapping overflow, and its type is smaller
6750 than ctype, then we cannot pass through as widening. */
6751 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6752 && (TYPE_PRECISION (ctype)
6753 > TYPE_PRECISION (TREE_TYPE (op0))))
6754 /* ... or this is a truncation (t is narrower than op0),
6755 then we cannot pass through this narrowing. */
6756 || (TYPE_PRECISION (type)
6757 < TYPE_PRECISION (TREE_TYPE (op0)))
6758 /* ... or signedness changes for division or modulus,
6759 then we cannot pass through this conversion. */
6760 || (code != MULT_EXPR
6761 && (TYPE_UNSIGNED (ctype)
6762 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6763 /* ... or has undefined overflow while the converted to
6764 type has not, we cannot do the operation in the inner type
6765 as that would introduce undefined overflow. */
6766 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6767 && !TYPE_OVERFLOW_UNDEFINED (type))))
6768 break;
6770 /* Pass the constant down and see if we can make a simplification. If
6771 we can, replace this expression with the inner simplification for
6772 possible later conversion to our or some other type. */
6773 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6774 && TREE_CODE (t2) == INTEGER_CST
6775 && !TREE_OVERFLOW (t2)
6776 && (t1 = extract_muldiv (op0, t2, code,
6777 code == MULT_EXPR ? ctype : NULL_TREE,
6778 strict_overflow_p)) != 0)
6779 return t1;
6780 break;
6782 case ABS_EXPR:
6783 /* If widening the type changes it from signed to unsigned, then we
6784 must avoid building ABS_EXPR itself as unsigned. */
6785 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6787 tree cstype = (*signed_type_for) (ctype);
6788 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6789 != 0)
6791 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6792 return fold_convert (ctype, t1);
6794 break;
6796 /* If the constant is negative, we cannot simplify this. */
6797 if (tree_int_cst_sgn (c) == -1)
6798 break;
6799 /* FALLTHROUGH */
6800 case NEGATE_EXPR:
6801 /* For division and modulus, type can't be unsigned, as e.g.
6802 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6803 For signed types, even with wrapping overflow, this is fine. */
6804 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6805 break;
6806 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6807 != 0)
6808 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6809 break;
6811 case MIN_EXPR: case MAX_EXPR:
6812 /* If widening the type changes the signedness, then we can't perform
6813 this optimization as that changes the result. */
6814 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6815 break;
6817 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6818 sub_strict_overflow_p = false;
6819 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6820 &sub_strict_overflow_p)) != 0
6821 && (t2 = extract_muldiv (op1, c, code, wide_type,
6822 &sub_strict_overflow_p)) != 0)
6824 if (tree_int_cst_sgn (c) < 0)
6825 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6826 if (sub_strict_overflow_p)
6827 *strict_overflow_p = true;
6828 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6829 fold_convert (ctype, t2));
6831 break;
6833 case LSHIFT_EXPR: case RSHIFT_EXPR:
6834 /* If the second operand is constant, this is a multiplication
6835 or floor division, by a power of two, so we can treat it that
6836 way unless the multiplier or divisor overflows. Signed
6837 left-shift overflow is implementation-defined rather than
6838 undefined in C90, so do not convert signed left shift into
6839 multiplication. */
6840 if (TREE_CODE (op1) == INTEGER_CST
6841 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6842 /* const_binop may not detect overflow correctly,
6843 so check for it explicitly here. */
6844 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6845 wi::to_wide (op1))
6846 && (t1 = fold_convert (ctype,
6847 const_binop (LSHIFT_EXPR, size_one_node,
6848 op1))) != 0
6849 && !TREE_OVERFLOW (t1))
6850 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6851 ? MULT_EXPR : FLOOR_DIV_EXPR,
6852 ctype,
6853 fold_convert (ctype, op0),
6854 t1),
6855 c, code, wide_type, strict_overflow_p);
6856 break;
6858 case PLUS_EXPR: case MINUS_EXPR:
6859 /* See if we can eliminate the operation on both sides. If we can, we
6860 can return a new PLUS or MINUS. If we can't, the only remaining
6861 cases where we can do anything are if the second operand is a
6862 constant. */
6863 sub_strict_overflow_p = false;
6864 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6865 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6866 if (t1 != 0 && t2 != 0
6867 && TYPE_OVERFLOW_WRAPS (ctype)
6868 && (code == MULT_EXPR
6869 /* If not multiplication, we can only do this if both operands
6870 are divisible by c. */
6871 || (multiple_of_p (ctype, op0, c)
6872 && multiple_of_p (ctype, op1, c))))
6874 if (sub_strict_overflow_p)
6875 *strict_overflow_p = true;
6876 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6877 fold_convert (ctype, t2));
6880 /* If this was a subtraction, negate OP1 and set it to be an addition.
6881 This simplifies the logic below. */
6882 if (tcode == MINUS_EXPR)
6884 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6885 /* If OP1 was not easily negatable, the constant may be OP0. */
6886 if (TREE_CODE (op0) == INTEGER_CST)
6888 std::swap (op0, op1);
6889 std::swap (t1, t2);
6893 if (TREE_CODE (op1) != INTEGER_CST)
6894 break;
6896 /* If either OP1 or C are negative, this optimization is not safe for
6897 some of the division and remainder types while for others we need
6898 to change the code. */
6899 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6901 if (code == CEIL_DIV_EXPR)
6902 code = FLOOR_DIV_EXPR;
6903 else if (code == FLOOR_DIV_EXPR)
6904 code = CEIL_DIV_EXPR;
6905 else if (code != MULT_EXPR
6906 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6907 break;
6910 /* If it's a multiply or a division/modulus operation of a multiple
6911 of our constant, do the operation and verify it doesn't overflow. */
6912 if (code == MULT_EXPR
6913 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6914 TYPE_SIGN (type)))
6916 op1 = const_binop (code, fold_convert (ctype, op1),
6917 fold_convert (ctype, c));
6918 /* We allow the constant to overflow with wrapping semantics. */
6919 if (op1 == 0
6920 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6921 break;
6923 else
6924 break;
6926 /* If we have an unsigned type, we cannot widen the operation since it
6927 will change the result if the original computation overflowed. */
6928 if (TYPE_UNSIGNED (ctype) && ctype != type)
6929 break;
6931 /* The last case is if we are a multiply. In that case, we can
6932 apply the distributive law to commute the multiply and addition
6933 if the multiplication of the constants doesn't overflow
6934 and overflow is defined. With undefined overflow
6935 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6936 But fold_plusminus_mult_expr would factor back any power-of-two
6937 value so do not distribute in the first place in this case. */
6938 if (code == MULT_EXPR
6939 && TYPE_OVERFLOW_WRAPS (ctype)
6940 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
6941 return fold_build2 (tcode, ctype,
6942 fold_build2 (code, ctype,
6943 fold_convert (ctype, op0),
6944 fold_convert (ctype, c)),
6945 op1);
6947 break;
6949 case MULT_EXPR:
6950 /* We have a special case here if we are doing something like
6951 (C * 8) % 4 since we know that's zero. */
6952 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6953 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6954 /* If the multiplication can overflow we cannot optimize this. */
6955 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6956 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6957 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6958 TYPE_SIGN (type)))
6960 *strict_overflow_p = true;
6961 return omit_one_operand (type, integer_zero_node, op0);
6964 /* ... fall through ... */
6966 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6967 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6968 /* If we can extract our operation from the LHS, do so and return a
6969 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6970 do something only if the second operand is a constant. */
6971 if (same_p
6972 && TYPE_OVERFLOW_WRAPS (ctype)
6973 && (t1 = extract_muldiv (op0, c, code, wide_type,
6974 strict_overflow_p)) != 0)
6975 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6976 fold_convert (ctype, op1));
6977 else if (tcode == MULT_EXPR && code == MULT_EXPR
6978 && TYPE_OVERFLOW_WRAPS (ctype)
6979 && (t1 = extract_muldiv (op1, c, code, wide_type,
6980 strict_overflow_p)) != 0)
6981 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6982 fold_convert (ctype, t1));
6983 else if (TREE_CODE (op1) != INTEGER_CST)
6984 return 0;
6986 /* If these are the same operation types, we can associate them
6987 assuming no overflow. */
6988 if (tcode == code)
6990 bool overflow_p = false;
6991 wi::overflow_type overflow_mul;
6992 signop sign = TYPE_SIGN (ctype);
6993 unsigned prec = TYPE_PRECISION (ctype);
6994 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6995 wi::to_wide (c, prec),
6996 sign, &overflow_mul);
6997 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6998 if (overflow_mul
6999 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
7000 overflow_p = true;
7001 if (!overflow_p)
7002 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7003 wide_int_to_tree (ctype, mul));
7006 /* If these operations "cancel" each other, we have the main
7007 optimizations of this pass, which occur when either constant is a
7008 multiple of the other, in which case we replace this with either an
7009 operation or CODE or TCODE.
7011 If we have an unsigned type, we cannot do this since it will change
7012 the result if the original computation overflowed. */
7013 if (TYPE_OVERFLOW_UNDEFINED (ctype)
7014 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
7015 || (tcode == MULT_EXPR
7016 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
7017 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
7018 && code != MULT_EXPR)))
7020 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7021 TYPE_SIGN (type)))
7023 if (TYPE_OVERFLOW_UNDEFINED (ctype))
7024 *strict_overflow_p = true;
7025 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7026 fold_convert (ctype,
7027 const_binop (TRUNC_DIV_EXPR,
7028 op1, c)));
7030 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
7031 TYPE_SIGN (type)))
7033 if (TYPE_OVERFLOW_UNDEFINED (ctype))
7034 *strict_overflow_p = true;
7035 return fold_build2 (code, ctype, fold_convert (ctype, op0),
7036 fold_convert (ctype,
7037 const_binop (TRUNC_DIV_EXPR,
7038 c, op1)));
7041 break;
7043 default:
7044 break;
7047 return 0;
7050 /* Return a node which has the indicated constant VALUE (either 0 or
7051 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7052 and is of the indicated TYPE. */
7054 tree
7055 constant_boolean_node (bool value, tree type)
7057 if (type == integer_type_node)
7058 return value ? integer_one_node : integer_zero_node;
7059 else if (type == boolean_type_node)
7060 return value ? boolean_true_node : boolean_false_node;
7061 else if (TREE_CODE (type) == VECTOR_TYPE)
7062 return build_vector_from_val (type,
7063 build_int_cst (TREE_TYPE (type),
7064 value ? -1 : 0));
7065 else
7066 return fold_convert (type, value ? integer_one_node : integer_zero_node);
7070 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7071 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7072 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7073 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7074 COND is the first argument to CODE; otherwise (as in the example
7075 given here), it is the second argument. TYPE is the type of the
7076 original expression. Return NULL_TREE if no simplification is
7077 possible. */
7079 static tree
7080 fold_binary_op_with_conditional_arg (location_t loc,
7081 enum tree_code code,
7082 tree type, tree op0, tree op1,
7083 tree cond, tree arg, int cond_first_p)
7085 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7086 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7087 tree test, true_value, false_value;
7088 tree lhs = NULL_TREE;
7089 tree rhs = NULL_TREE;
7090 enum tree_code cond_code = COND_EXPR;
7092 /* Do not move possibly trapping operations into the conditional as this
7093 pessimizes code and causes gimplification issues when applied late. */
7094 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7095 ANY_INTEGRAL_TYPE_P (type)
7096 && TYPE_OVERFLOW_TRAPS (type), op1))
7097 return NULL_TREE;
7099 if (TREE_CODE (cond) == COND_EXPR
7100 || TREE_CODE (cond) == VEC_COND_EXPR)
7102 test = TREE_OPERAND (cond, 0);
7103 true_value = TREE_OPERAND (cond, 1);
7104 false_value = TREE_OPERAND (cond, 2);
7105 /* If this operand throws an expression, then it does not make
7106 sense to try to perform a logical or arithmetic operation
7107 involving it. */
7108 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7109 lhs = true_value;
7110 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7111 rhs = false_value;
7113 else if (!(TREE_CODE (type) != VECTOR_TYPE
7114 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
7116 tree testtype = TREE_TYPE (cond);
7117 test = cond;
7118 true_value = constant_boolean_node (true, testtype);
7119 false_value = constant_boolean_node (false, testtype);
7121 else
7122 /* Detect the case of mixing vector and scalar types - bail out. */
7123 return NULL_TREE;
7125 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
7126 cond_code = VEC_COND_EXPR;
7128 /* This transformation is only worthwhile if we don't have to wrap ARG
7129 in a SAVE_EXPR and the operation can be simplified without recursing
7130 on at least one of the branches once its pushed inside the COND_EXPR. */
7131 if (!TREE_CONSTANT (arg)
7132 && (TREE_SIDE_EFFECTS (arg)
7133 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7134 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7135 return NULL_TREE;
7137 arg = fold_convert_loc (loc, arg_type, arg);
7138 if (lhs == 0)
7140 true_value = fold_convert_loc (loc, cond_type, true_value);
7141 if (cond_first_p)
7142 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7143 else
7144 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7146 if (rhs == 0)
7148 false_value = fold_convert_loc (loc, cond_type, false_value);
7149 if (cond_first_p)
7150 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7151 else
7152 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7155 /* Check that we have simplified at least one of the branches. */
7156 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7157 return NULL_TREE;
7159 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7163 /* Subroutine of fold() that checks for the addition of ARG +/- 0.0.
7165 If !NEGATE, return true if ZERO_ARG is +/-0.0 and, for all ARG of
7166 type TYPE, ARG + ZERO_ARG is the same as ARG. If NEGATE, return true
7167 if ARG - ZERO_ARG is the same as X.
7169 If ARG is NULL, check for any value of type TYPE.
7171 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7172 and finite. The problematic cases are when X is zero, and its mode
7173 has signed zeros. In the case of rounding towards -infinity,
7174 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7175 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7177 bool
7178 fold_real_zero_addition_p (const_tree type, const_tree arg,
7179 const_tree zero_arg, int negate)
7181 if (!real_zerop (zero_arg))
7182 return false;
7184 /* Don't allow the fold with -fsignaling-nans. */
7185 if (arg ? tree_expr_maybe_signaling_nan_p (arg) : HONOR_SNANS (type))
7186 return false;
7188 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7189 if (!HONOR_SIGNED_ZEROS (type))
7190 return true;
7192 /* There is no case that is safe for all rounding modes. */
7193 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7194 return false;
7196 /* In a vector or complex, we would need to check the sign of all zeros. */
7197 if (TREE_CODE (zero_arg) == VECTOR_CST)
7198 zero_arg = uniform_vector_p (zero_arg);
7199 if (!zero_arg || TREE_CODE (zero_arg) != REAL_CST)
7200 return false;
7202 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7203 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (zero_arg)))
7204 negate = !negate;
7206 /* The mode has signed zeros, and we have to honor their sign.
7207 In this situation, there are only two cases we can return true for.
7208 (i) X - 0 is the same as X with default rounding.
7209 (ii) X + 0 is X when X can't possibly be -0.0. */
7210 return negate || (arg && !tree_expr_maybe_real_minus_zero_p (arg));
7213 /* Subroutine of match.pd that optimizes comparisons of a division by
7214 a nonzero integer constant against an integer constant, i.e.
7215 X/C1 op C2.
7217 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7218 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7220 enum tree_code
7221 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7222 tree *hi, bool *neg_overflow)
7224 tree prod, tmp, type = TREE_TYPE (c1);
7225 signop sign = TYPE_SIGN (type);
7226 wi::overflow_type overflow;
7228 /* We have to do this the hard way to detect unsigned overflow.
7229 prod = int_const_binop (MULT_EXPR, c1, c2); */
7230 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7231 prod = force_fit_type (type, val, -1, overflow);
7232 *neg_overflow = false;
7234 if (sign == UNSIGNED)
7236 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7237 *lo = prod;
7239 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7240 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7241 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7243 else if (tree_int_cst_sgn (c1) >= 0)
7245 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7246 switch (tree_int_cst_sgn (c2))
7248 case -1:
7249 *neg_overflow = true;
7250 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7251 *hi = prod;
7252 break;
7254 case 0:
7255 *lo = fold_negate_const (tmp, type);
7256 *hi = tmp;
7257 break;
7259 case 1:
7260 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7261 *lo = prod;
7262 break;
7264 default:
7265 gcc_unreachable ();
7268 else
7270 /* A negative divisor reverses the relational operators. */
7271 code = swap_tree_comparison (code);
7273 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7274 switch (tree_int_cst_sgn (c2))
7276 case -1:
7277 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7278 *lo = prod;
7279 break;
7281 case 0:
7282 *hi = fold_negate_const (tmp, type);
7283 *lo = tmp;
7284 break;
7286 case 1:
7287 *neg_overflow = true;
7288 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7289 *hi = prod;
7290 break;
7292 default:
7293 gcc_unreachable ();
7297 if (code != EQ_EXPR && code != NE_EXPR)
7298 return code;
7300 if (TREE_OVERFLOW (*lo)
7301 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7302 *lo = NULL_TREE;
7303 if (TREE_OVERFLOW (*hi)
7304 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7305 *hi = NULL_TREE;
7307 return code;
7311 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7312 equality/inequality test, then return a simplified form of the test
7313 using a sign testing. Otherwise return NULL. TYPE is the desired
7314 result type. */
7316 static tree
7317 fold_single_bit_test_into_sign_test (location_t loc,
7318 enum tree_code code, tree arg0, tree arg1,
7319 tree result_type)
7321 /* If this is testing a single bit, we can optimize the test. */
7322 if ((code == NE_EXPR || code == EQ_EXPR)
7323 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7324 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7326 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7327 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7328 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7330 if (arg00 != NULL_TREE
7331 /* This is only a win if casting to a signed type is cheap,
7332 i.e. when arg00's type is not a partial mode. */
7333 && type_has_mode_precision_p (TREE_TYPE (arg00)))
7335 tree stype = signed_type_for (TREE_TYPE (arg00));
7336 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7337 result_type,
7338 fold_convert_loc (loc, stype, arg00),
7339 build_int_cst (stype, 0));
7343 return NULL_TREE;
7346 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7347 equality/inequality test, then return a simplified form of
7348 the test using shifts and logical operations. Otherwise return
7349 NULL. TYPE is the desired result type. */
7351 tree
7352 fold_single_bit_test (location_t loc, enum tree_code code,
7353 tree arg0, tree arg1, tree result_type)
7355 /* If this is testing a single bit, we can optimize the test. */
7356 if ((code == NE_EXPR || code == EQ_EXPR)
7357 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7358 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7360 tree inner = TREE_OPERAND (arg0, 0);
7361 tree type = TREE_TYPE (arg0);
7362 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7363 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
7364 int ops_unsigned;
7365 tree signed_type, unsigned_type, intermediate_type;
7366 tree tem, one;
7368 /* First, see if we can fold the single bit test into a sign-bit
7369 test. */
7370 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7371 result_type);
7372 if (tem)
7373 return tem;
7375 /* Otherwise we have (A & C) != 0 where C is a single bit,
7376 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7377 Similarly for (A & C) == 0. */
7379 /* If INNER is a right shift of a constant and it plus BITNUM does
7380 not overflow, adjust BITNUM and INNER. */
7381 if (TREE_CODE (inner) == RSHIFT_EXPR
7382 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7383 && bitnum < TYPE_PRECISION (type)
7384 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
7385 TYPE_PRECISION (type) - bitnum))
7387 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
7388 inner = TREE_OPERAND (inner, 0);
7391 /* If we are going to be able to omit the AND below, we must do our
7392 operations as unsigned. If we must use the AND, we have a choice.
7393 Normally unsigned is faster, but for some machines signed is. */
7394 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
7395 && !flag_syntax_only) ? 0 : 1;
7397 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7398 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7399 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7400 inner = fold_convert_loc (loc, intermediate_type, inner);
7402 if (bitnum != 0)
7403 inner = build2 (RSHIFT_EXPR, intermediate_type,
7404 inner, size_int (bitnum));
7406 one = build_int_cst (intermediate_type, 1);
7408 if (code == EQ_EXPR)
7409 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7411 /* Put the AND last so it can combine with more things. */
7412 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7414 /* Make sure to return the proper type. */
7415 inner = fold_convert_loc (loc, result_type, inner);
7417 return inner;
7419 return NULL_TREE;
7422 /* Test whether it is preferable to swap two operands, ARG0 and
7423 ARG1, for example because ARG0 is an integer constant and ARG1
7424 isn't. */
7426 bool
7427 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7429 if (CONSTANT_CLASS_P (arg1))
7430 return 0;
7431 if (CONSTANT_CLASS_P (arg0))
7432 return 1;
7434 STRIP_NOPS (arg0);
7435 STRIP_NOPS (arg1);
7437 if (TREE_CONSTANT (arg1))
7438 return 0;
7439 if (TREE_CONSTANT (arg0))
7440 return 1;
7442 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7443 for commutative and comparison operators. Ensuring a canonical
7444 form allows the optimizers to find additional redundancies without
7445 having to explicitly check for both orderings. */
7446 if (TREE_CODE (arg0) == SSA_NAME
7447 && TREE_CODE (arg1) == SSA_NAME
7448 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7449 return 1;
7451 /* Put SSA_NAMEs last. */
7452 if (TREE_CODE (arg1) == SSA_NAME)
7453 return 0;
7454 if (TREE_CODE (arg0) == SSA_NAME)
7455 return 1;
7457 /* Put variables last. */
7458 if (DECL_P (arg1))
7459 return 0;
7460 if (DECL_P (arg0))
7461 return 1;
7463 return 0;
7467 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7468 means A >= Y && A != MAX, but in this case we know that
7469 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7471 static tree
7472 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7474 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7476 if (TREE_CODE (bound) == LT_EXPR)
7477 a = TREE_OPERAND (bound, 0);
7478 else if (TREE_CODE (bound) == GT_EXPR)
7479 a = TREE_OPERAND (bound, 1);
7480 else
7481 return NULL_TREE;
7483 typea = TREE_TYPE (a);
7484 if (!INTEGRAL_TYPE_P (typea)
7485 && !POINTER_TYPE_P (typea))
7486 return NULL_TREE;
7488 if (TREE_CODE (ineq) == LT_EXPR)
7490 a1 = TREE_OPERAND (ineq, 1);
7491 y = TREE_OPERAND (ineq, 0);
7493 else if (TREE_CODE (ineq) == GT_EXPR)
7495 a1 = TREE_OPERAND (ineq, 0);
7496 y = TREE_OPERAND (ineq, 1);
7498 else
7499 return NULL_TREE;
7501 if (TREE_TYPE (a1) != typea)
7502 return NULL_TREE;
7504 if (POINTER_TYPE_P (typea))
7506 /* Convert the pointer types into integer before taking the difference. */
7507 tree ta = fold_convert_loc (loc, ssizetype, a);
7508 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7509 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7511 else
7512 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7514 if (!diff || !integer_onep (diff))
7515 return NULL_TREE;
7517 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7520 /* Fold a sum or difference of at least one multiplication.
7521 Returns the folded tree or NULL if no simplification could be made. */
7523 static tree
7524 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7525 tree arg0, tree arg1)
7527 tree arg00, arg01, arg10, arg11;
7528 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7530 /* (A * C) +- (B * C) -> (A+-B) * C.
7531 (A * C) +- A -> A * (C+-1).
7532 We are most concerned about the case where C is a constant,
7533 but other combinations show up during loop reduction. Since
7534 it is not difficult, try all four possibilities. */
7536 if (TREE_CODE (arg0) == MULT_EXPR)
7538 arg00 = TREE_OPERAND (arg0, 0);
7539 arg01 = TREE_OPERAND (arg0, 1);
7541 else if (TREE_CODE (arg0) == INTEGER_CST)
7543 arg00 = build_one_cst (type);
7544 arg01 = arg0;
7546 else
7548 /* We cannot generate constant 1 for fract. */
7549 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7550 return NULL_TREE;
7551 arg00 = arg0;
7552 arg01 = build_one_cst (type);
7554 if (TREE_CODE (arg1) == MULT_EXPR)
7556 arg10 = TREE_OPERAND (arg1, 0);
7557 arg11 = TREE_OPERAND (arg1, 1);
7559 else if (TREE_CODE (arg1) == INTEGER_CST)
7561 arg10 = build_one_cst (type);
7562 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7563 the purpose of this canonicalization. */
7564 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7565 && negate_expr_p (arg1)
7566 && code == PLUS_EXPR)
7568 arg11 = negate_expr (arg1);
7569 code = MINUS_EXPR;
7571 else
7572 arg11 = arg1;
7574 else
7576 /* We cannot generate constant 1 for fract. */
7577 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7578 return NULL_TREE;
7579 arg10 = arg1;
7580 arg11 = build_one_cst (type);
7582 same = NULL_TREE;
7584 /* Prefer factoring a common non-constant. */
7585 if (operand_equal_p (arg00, arg10, 0))
7586 same = arg00, alt0 = arg01, alt1 = arg11;
7587 else if (operand_equal_p (arg01, arg11, 0))
7588 same = arg01, alt0 = arg00, alt1 = arg10;
7589 else if (operand_equal_p (arg00, arg11, 0))
7590 same = arg00, alt0 = arg01, alt1 = arg10;
7591 else if (operand_equal_p (arg01, arg10, 0))
7592 same = arg01, alt0 = arg00, alt1 = arg11;
7594 /* No identical multiplicands; see if we can find a common
7595 power-of-two factor in non-power-of-two multiplies. This
7596 can help in multi-dimensional array access. */
7597 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7599 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7600 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7601 HOST_WIDE_INT tmp;
7602 bool swap = false;
7603 tree maybe_same;
7605 /* Move min of absolute values to int11. */
7606 if (absu_hwi (int01) < absu_hwi (int11))
7608 tmp = int01, int01 = int11, int11 = tmp;
7609 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7610 maybe_same = arg01;
7611 swap = true;
7613 else
7614 maybe_same = arg11;
7616 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7617 if (factor > 1
7618 && pow2p_hwi (factor)
7619 && (int01 & (factor - 1)) == 0
7620 /* The remainder should not be a constant, otherwise we
7621 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7622 increased the number of multiplications necessary. */
7623 && TREE_CODE (arg10) != INTEGER_CST)
7625 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7626 build_int_cst (TREE_TYPE (arg00),
7627 int01 / int11));
7628 alt1 = arg10;
7629 same = maybe_same;
7630 if (swap)
7631 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7635 if (!same)
7636 return NULL_TREE;
7638 if (! ANY_INTEGRAL_TYPE_P (type)
7639 || TYPE_OVERFLOW_WRAPS (type)
7640 /* We are neither factoring zero nor minus one. */
7641 || TREE_CODE (same) == INTEGER_CST)
7642 return fold_build2_loc (loc, MULT_EXPR, type,
7643 fold_build2_loc (loc, code, type,
7644 fold_convert_loc (loc, type, alt0),
7645 fold_convert_loc (loc, type, alt1)),
7646 fold_convert_loc (loc, type, same));
7648 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7649 same may be minus one and thus the multiplication may overflow. Perform
7650 the sum operation in an unsigned type. */
7651 tree utype = unsigned_type_for (type);
7652 tree tem = fold_build2_loc (loc, code, utype,
7653 fold_convert_loc (loc, utype, alt0),
7654 fold_convert_loc (loc, utype, alt1));
7655 /* If the sum evaluated to a constant that is not -INF the multiplication
7656 cannot overflow. */
7657 if (TREE_CODE (tem) == INTEGER_CST
7658 && (wi::to_wide (tem)
7659 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7660 return fold_build2_loc (loc, MULT_EXPR, type,
7661 fold_convert (type, tem), same);
7663 /* Do not resort to unsigned multiplication because
7664 we lose the no-overflow property of the expression. */
7665 return NULL_TREE;
7668 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7669 specified by EXPR into the buffer PTR of length LEN bytes.
7670 Return the number of bytes placed in the buffer, or zero
7671 upon failure. */
7673 static int
7674 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7676 tree type = TREE_TYPE (expr);
7677 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7678 int byte, offset, word, words;
7679 unsigned char value;
7681 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7682 return 0;
7683 if (off == -1)
7684 off = 0;
7686 if (ptr == NULL)
7687 /* Dry run. */
7688 return MIN (len, total_bytes - off);
7690 words = total_bytes / UNITS_PER_WORD;
7692 for (byte = 0; byte < total_bytes; byte++)
7694 int bitpos = byte * BITS_PER_UNIT;
7695 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7696 number of bytes. */
7697 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7699 if (total_bytes > UNITS_PER_WORD)
7701 word = byte / UNITS_PER_WORD;
7702 if (WORDS_BIG_ENDIAN)
7703 word = (words - 1) - word;
7704 offset = word * UNITS_PER_WORD;
7705 if (BYTES_BIG_ENDIAN)
7706 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7707 else
7708 offset += byte % UNITS_PER_WORD;
7710 else
7711 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7712 if (offset >= off && offset - off < len)
7713 ptr[offset - off] = value;
7715 return MIN (len, total_bytes - off);
7719 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7720 specified by EXPR into the buffer PTR of length LEN bytes.
7721 Return the number of bytes placed in the buffer, or zero
7722 upon failure. */
7724 static int
7725 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7727 tree type = TREE_TYPE (expr);
7728 scalar_mode mode = SCALAR_TYPE_MODE (type);
7729 int total_bytes = GET_MODE_SIZE (mode);
7730 FIXED_VALUE_TYPE value;
7731 tree i_value, i_type;
7733 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7734 return 0;
7736 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7738 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7739 return 0;
7741 value = TREE_FIXED_CST (expr);
7742 i_value = double_int_to_tree (i_type, value.data);
7744 return native_encode_int (i_value, ptr, len, off);
7748 /* Subroutine of native_encode_expr. Encode the REAL_CST
7749 specified by EXPR into the buffer PTR of length LEN bytes.
7750 Return the number of bytes placed in the buffer, or zero
7751 upon failure. */
7753 static int
7754 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7756 tree type = TREE_TYPE (expr);
7757 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7758 int byte, offset, word, words, bitpos;
7759 unsigned char value;
7761 /* There are always 32 bits in each long, no matter the size of
7762 the hosts long. We handle floating point representations with
7763 up to 192 bits. */
7764 long tmp[6];
7766 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7767 return 0;
7768 if (off == -1)
7769 off = 0;
7771 if (ptr == NULL)
7772 /* Dry run. */
7773 return MIN (len, total_bytes - off);
7775 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7777 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7779 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7780 bitpos += BITS_PER_UNIT)
7782 byte = (bitpos / BITS_PER_UNIT) & 3;
7783 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7785 if (UNITS_PER_WORD < 4)
7787 word = byte / UNITS_PER_WORD;
7788 if (WORDS_BIG_ENDIAN)
7789 word = (words - 1) - word;
7790 offset = word * UNITS_PER_WORD;
7791 if (BYTES_BIG_ENDIAN)
7792 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7793 else
7794 offset += byte % UNITS_PER_WORD;
7796 else
7798 offset = byte;
7799 if (BYTES_BIG_ENDIAN)
7801 /* Reverse bytes within each long, or within the entire float
7802 if it's smaller than a long (for HFmode). */
7803 offset = MIN (3, total_bytes - 1) - offset;
7804 gcc_assert (offset >= 0);
7807 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7808 if (offset >= off
7809 && offset - off < len)
7810 ptr[offset - off] = value;
7812 return MIN (len, total_bytes - off);
7815 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7816 specified by EXPR into the buffer PTR of length LEN bytes.
7817 Return the number of bytes placed in the buffer, or zero
7818 upon failure. */
7820 static int
7821 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7823 int rsize, isize;
7824 tree part;
7826 part = TREE_REALPART (expr);
7827 rsize = native_encode_expr (part, ptr, len, off);
7828 if (off == -1 && rsize == 0)
7829 return 0;
7830 part = TREE_IMAGPART (expr);
7831 if (off != -1)
7832 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7833 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7834 len - rsize, off);
7835 if (off == -1 && isize != rsize)
7836 return 0;
7837 return rsize + isize;
7840 /* Like native_encode_vector, but only encode the first COUNT elements.
7841 The other arguments are as for native_encode_vector. */
7843 static int
7844 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7845 int off, unsigned HOST_WIDE_INT count)
7847 tree itype = TREE_TYPE (TREE_TYPE (expr));
7848 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7849 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7851 /* This is the only case in which elements can be smaller than a byte.
7852 Element 0 is always in the lsb of the containing byte. */
7853 unsigned int elt_bits = TYPE_PRECISION (itype);
7854 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7855 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7856 return 0;
7858 if (off == -1)
7859 off = 0;
7861 /* Zero the buffer and then set bits later where necessary. */
7862 int extract_bytes = MIN (len, total_bytes - off);
7863 if (ptr)
7864 memset (ptr, 0, extract_bytes);
7866 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7867 unsigned int first_elt = off * elts_per_byte;
7868 unsigned int extract_elts = extract_bytes * elts_per_byte;
7869 for (unsigned int i = 0; i < extract_elts; ++i)
7871 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7872 if (TREE_CODE (elt) != INTEGER_CST)
7873 return 0;
7875 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7877 unsigned int bit = i * elt_bits;
7878 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7881 return extract_bytes;
7884 int offset = 0;
7885 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7886 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7888 if (off >= size)
7890 off -= size;
7891 continue;
7893 tree elem = VECTOR_CST_ELT (expr, i);
7894 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7895 len - offset, off);
7896 if ((off == -1 && res != size) || res == 0)
7897 return 0;
7898 offset += res;
7899 if (offset >= len)
7900 return (off == -1 && i < count - 1) ? 0 : offset;
7901 if (off != -1)
7902 off = 0;
7904 return offset;
7907 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7908 specified by EXPR into the buffer PTR of length LEN bytes.
7909 Return the number of bytes placed in the buffer, or zero
7910 upon failure. */
7912 static int
7913 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7915 unsigned HOST_WIDE_INT count;
7916 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7917 return 0;
7918 return native_encode_vector_part (expr, ptr, len, off, count);
7922 /* Subroutine of native_encode_expr. Encode the STRING_CST
7923 specified by EXPR into the buffer PTR of length LEN bytes.
7924 Return the number of bytes placed in the buffer, or zero
7925 upon failure. */
7927 static int
7928 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7930 tree type = TREE_TYPE (expr);
7932 /* Wide-char strings are encoded in target byte-order so native
7933 encoding them is trivial. */
7934 if (BITS_PER_UNIT != CHAR_BIT
7935 || TREE_CODE (type) != ARRAY_TYPE
7936 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7937 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7938 return 0;
7940 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7941 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7942 return 0;
7943 if (off == -1)
7944 off = 0;
7945 len = MIN (total_bytes - off, len);
7946 if (ptr == NULL)
7947 /* Dry run. */;
7948 else
7950 int written = 0;
7951 if (off < TREE_STRING_LENGTH (expr))
7953 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7954 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7956 memset (ptr + written, 0, len - written);
7958 return len;
7962 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
7963 FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
7964 the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
7965 anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
7966 sufficient to encode the entire EXPR, or if OFF is out of bounds.
7967 Otherwise, start at byte offset OFF and encode at most LEN bytes.
7968 Return the number of bytes placed in the buffer, or zero upon failure. */
7971 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7973 /* We don't support starting at negative offset and -1 is special. */
7974 if (off < -1)
7975 return 0;
7977 switch (TREE_CODE (expr))
7979 case INTEGER_CST:
7980 return native_encode_int (expr, ptr, len, off);
7982 case REAL_CST:
7983 return native_encode_real (expr, ptr, len, off);
7985 case FIXED_CST:
7986 return native_encode_fixed (expr, ptr, len, off);
7988 case COMPLEX_CST:
7989 return native_encode_complex (expr, ptr, len, off);
7991 case VECTOR_CST:
7992 return native_encode_vector (expr, ptr, len, off);
7994 case STRING_CST:
7995 return native_encode_string (expr, ptr, len, off);
7997 default:
7998 return 0;
8002 /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
8003 or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
8004 of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
8005 machine modes, we can't just use build_nonstandard_integer_type. */
8007 tree
8008 find_bitfield_repr_type (int fieldsize, int len)
8010 machine_mode mode;
8011 for (int pass = 0; pass < 2; pass++)
8013 enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
8014 FOR_EACH_MODE_IN_CLASS (mode, mclass)
8015 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8016 && known_eq (GET_MODE_PRECISION (mode),
8017 GET_MODE_BITSIZE (mode))
8018 && known_le (GET_MODE_SIZE (mode), len))
8020 tree ret = lang_hooks.types.type_for_mode (mode, 1);
8021 if (ret && TYPE_MODE (ret) == mode)
8022 return ret;
8026 for (int i = 0; i < NUM_INT_N_ENTS; i ++)
8027 if (int_n_enabled_p[i]
8028 && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
8029 && int_n_trees[i].unsigned_type)
8031 tree ret = int_n_trees[i].unsigned_type;
8032 mode = TYPE_MODE (ret);
8033 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8034 && known_eq (GET_MODE_PRECISION (mode),
8035 GET_MODE_BITSIZE (mode))
8036 && known_le (GET_MODE_SIZE (mode), len))
8037 return ret;
8040 return NULL_TREE;
8043 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8044 NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
8045 to be non-NULL and OFF zero), then in addition to filling the
8046 bytes pointed by PTR with the value also clear any bits pointed
8047 by MASK that are known to be initialized, keep them as is for
8048 e.g. uninitialized padding bits or uninitialized fields. */
8051 native_encode_initializer (tree init, unsigned char *ptr, int len,
8052 int off, unsigned char *mask)
8054 int r;
8056 /* We don't support starting at negative offset and -1 is special. */
8057 if (off < -1 || init == NULL_TREE)
8058 return 0;
8060 gcc_assert (mask == NULL || (off == 0 && ptr));
8062 STRIP_NOPS (init);
8063 switch (TREE_CODE (init))
8065 case VIEW_CONVERT_EXPR:
8066 case NON_LVALUE_EXPR:
8067 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
8068 mask);
8069 default:
8070 r = native_encode_expr (init, ptr, len, off);
8071 if (mask)
8072 memset (mask, 0, r);
8073 return r;
8074 case CONSTRUCTOR:
8075 tree type = TREE_TYPE (init);
8076 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
8077 if (total_bytes < 0)
8078 return 0;
8079 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8080 return 0;
8081 int o = off == -1 ? 0 : off;
8082 if (TREE_CODE (type) == ARRAY_TYPE)
8084 tree min_index;
8085 unsigned HOST_WIDE_INT cnt;
8086 HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
8087 constructor_elt *ce;
8089 if (!TYPE_DOMAIN (type)
8090 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
8091 return 0;
8093 fieldsize = int_size_in_bytes (TREE_TYPE (type));
8094 if (fieldsize <= 0)
8095 return 0;
8097 min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8098 if (ptr)
8099 memset (ptr, '\0', MIN (total_bytes - off, len));
8101 for (cnt = 0; ; cnt++)
8103 tree val = NULL_TREE, index = NULL_TREE;
8104 HOST_WIDE_INT pos = curpos, count = 0;
8105 bool full = false;
8106 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8108 val = ce->value;
8109 index = ce->index;
8111 else if (mask == NULL
8112 || CONSTRUCTOR_NO_CLEARING (init)
8113 || curpos >= total_bytes)
8114 break;
8115 else
8116 pos = total_bytes;
8118 if (index && TREE_CODE (index) == RANGE_EXPR)
8120 if (TREE_CODE (TREE_OPERAND (index, 0)) != INTEGER_CST
8121 || TREE_CODE (TREE_OPERAND (index, 1)) != INTEGER_CST)
8122 return 0;
8123 offset_int wpos
8124 = wi::sext (wi::to_offset (TREE_OPERAND (index, 0))
8125 - wi::to_offset (min_index),
8126 TYPE_PRECISION (sizetype));
8127 wpos *= fieldsize;
8128 if (!wi::fits_shwi_p (pos))
8129 return 0;
8130 pos = wpos.to_shwi ();
8131 offset_int wcount
8132 = wi::sext (wi::to_offset (TREE_OPERAND (index, 1))
8133 - wi::to_offset (TREE_OPERAND (index, 0)),
8134 TYPE_PRECISION (sizetype));
8135 if (!wi::fits_shwi_p (wcount))
8136 return 0;
8137 count = wcount.to_shwi ();
8139 else if (index)
8141 if (TREE_CODE (index) != INTEGER_CST)
8142 return 0;
8143 offset_int wpos
8144 = wi::sext (wi::to_offset (index)
8145 - wi::to_offset (min_index),
8146 TYPE_PRECISION (sizetype));
8147 wpos *= fieldsize;
8148 if (!wi::fits_shwi_p (wpos))
8149 return 0;
8150 pos = wpos.to_shwi ();
8153 if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
8155 if (valueinit == -1)
8157 tree zero = build_zero_cst (TREE_TYPE (type));
8158 r = native_encode_initializer (zero, ptr + curpos,
8159 fieldsize, 0,
8160 mask + curpos);
8161 if (TREE_CODE (zero) == CONSTRUCTOR)
8162 ggc_free (zero);
8163 if (!r)
8164 return 0;
8165 valueinit = curpos;
8166 curpos += fieldsize;
8168 while (curpos != pos)
8170 memcpy (ptr + curpos, ptr + valueinit, fieldsize);
8171 memcpy (mask + curpos, mask + valueinit, fieldsize);
8172 curpos += fieldsize;
8176 curpos = pos;
8177 if (val)
8180 if (off == -1
8181 || (curpos >= off
8182 && (curpos + fieldsize
8183 <= (HOST_WIDE_INT) off + len)))
8185 if (full)
8187 if (ptr)
8188 memcpy (ptr + (curpos - o), ptr + (pos - o),
8189 fieldsize);
8190 if (mask)
8191 memcpy (mask + curpos, mask + pos, fieldsize);
8193 else if (!native_encode_initializer (val,
8195 ? ptr + curpos - o
8196 : NULL,
8197 fieldsize,
8198 off == -1 ? -1
8199 : 0,
8200 mask
8201 ? mask + curpos
8202 : NULL))
8203 return 0;
8204 else
8206 full = true;
8207 pos = curpos;
8210 else if (curpos + fieldsize > off
8211 && curpos < (HOST_WIDE_INT) off + len)
8213 /* Partial overlap. */
8214 unsigned char *p = NULL;
8215 int no = 0;
8216 int l;
8217 gcc_assert (mask == NULL);
8218 if (curpos >= off)
8220 if (ptr)
8221 p = ptr + curpos - off;
8222 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8223 fieldsize);
8225 else
8227 p = ptr;
8228 no = off - curpos;
8229 l = len;
8231 if (!native_encode_initializer (val, p, l, no, NULL))
8232 return 0;
8234 curpos += fieldsize;
8236 while (count-- != 0);
8238 return MIN (total_bytes - off, len);
8240 else if (TREE_CODE (type) == RECORD_TYPE
8241 || TREE_CODE (type) == UNION_TYPE)
8243 unsigned HOST_WIDE_INT cnt;
8244 constructor_elt *ce;
8245 tree fld_base = TYPE_FIELDS (type);
8246 tree to_free = NULL_TREE;
8248 gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
8249 if (ptr != NULL)
8250 memset (ptr, '\0', MIN (total_bytes - o, len));
8251 for (cnt = 0; ; cnt++)
8253 tree val = NULL_TREE, field = NULL_TREE;
8254 HOST_WIDE_INT pos = 0, fieldsize;
8255 unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8257 if (to_free)
8259 ggc_free (to_free);
8260 to_free = NULL_TREE;
8263 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8265 val = ce->value;
8266 field = ce->index;
8267 if (field == NULL_TREE)
8268 return 0;
8270 pos = int_byte_position (field);
8271 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8272 continue;
8274 else if (mask == NULL
8275 || CONSTRUCTOR_NO_CLEARING (init))
8276 break;
8277 else
8278 pos = total_bytes;
8280 if (mask && !CONSTRUCTOR_NO_CLEARING (init))
8282 tree fld;
8283 for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
8285 if (TREE_CODE (fld) != FIELD_DECL)
8286 continue;
8287 if (fld == field)
8288 break;
8289 if (DECL_PADDING_P (fld))
8290 continue;
8291 if (DECL_SIZE_UNIT (fld) == NULL_TREE
8292 || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
8293 return 0;
8294 if (integer_zerop (DECL_SIZE_UNIT (fld)))
8295 continue;
8296 break;
8298 if (fld == NULL_TREE)
8300 if (ce == NULL)
8301 break;
8302 return 0;
8304 fld_base = DECL_CHAIN (fld);
8305 if (fld != field)
8307 cnt--;
8308 field = fld;
8309 pos = int_byte_position (field);
8310 val = build_zero_cst (TREE_TYPE (fld));
8311 if (TREE_CODE (val) == CONSTRUCTOR)
8312 to_free = val;
8316 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8317 && TYPE_DOMAIN (TREE_TYPE (field))
8318 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8320 if (mask || off != -1)
8321 return 0;
8322 if (val == NULL_TREE)
8323 continue;
8324 if (TREE_CODE (TREE_TYPE (val)) != ARRAY_TYPE)
8325 return 0;
8326 fieldsize = int_size_in_bytes (TREE_TYPE (val));
8327 if (fieldsize < 0
8328 || (int) fieldsize != fieldsize
8329 || (pos + fieldsize) > INT_MAX)
8330 return 0;
8331 if (pos + fieldsize > total_bytes)
8333 if (ptr != NULL && total_bytes < len)
8334 memset (ptr + total_bytes, '\0',
8335 MIN (pos + fieldsize, len) - total_bytes);
8336 total_bytes = pos + fieldsize;
8339 else
8341 if (DECL_SIZE_UNIT (field) == NULL_TREE
8342 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8343 return 0;
8344 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8346 if (fieldsize == 0)
8347 continue;
8349 if (DECL_BIT_FIELD (field))
8351 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8352 return 0;
8353 fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8354 bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8355 if (bpos % BITS_PER_UNIT)
8356 bpos %= BITS_PER_UNIT;
8357 else
8358 bpos = 0;
8359 fieldsize += bpos;
8360 epos = fieldsize % BITS_PER_UNIT;
8361 fieldsize += BITS_PER_UNIT - 1;
8362 fieldsize /= BITS_PER_UNIT;
8365 if (off != -1 && pos + fieldsize <= off)
8366 continue;
8368 if (val == NULL_TREE)
8369 continue;
8371 if (DECL_BIT_FIELD (field))
8373 /* FIXME: Handle PDP endian. */
8374 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8375 return 0;
8377 if (TREE_CODE (val) != INTEGER_CST)
8378 return 0;
8380 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8381 tree repr_type = NULL_TREE;
8382 HOST_WIDE_INT rpos = 0;
8383 if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8385 rpos = int_byte_position (repr);
8386 repr_type = TREE_TYPE (repr);
8388 else
8390 repr_type = find_bitfield_repr_type (fieldsize, len);
8391 if (repr_type == NULL_TREE)
8392 return 0;
8393 HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
8394 gcc_assert (repr_size > 0 && repr_size <= len);
8395 if (pos + repr_size <= o + len)
8396 rpos = pos;
8397 else
8399 rpos = o + len - repr_size;
8400 gcc_assert (rpos <= pos);
8404 if (rpos > pos)
8405 return 0;
8406 wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
8407 int diff = (TYPE_PRECISION (repr_type)
8408 - TYPE_PRECISION (TREE_TYPE (field)));
8409 HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8410 if (!BYTES_BIG_ENDIAN)
8411 w = wi::lshift (w, bitoff);
8412 else
8413 w = wi::lshift (w, diff - bitoff);
8414 val = wide_int_to_tree (repr_type, w);
8416 unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8417 / BITS_PER_UNIT + 1];
8418 int l = native_encode_int (val, buf, sizeof buf, 0);
8419 if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
8420 return 0;
8422 if (ptr == NULL)
8423 continue;
8425 /* If the bitfield does not start at byte boundary, handle
8426 the partial byte at the start. */
8427 if (bpos
8428 && (off == -1 || (pos >= off && len >= 1)))
8430 if (!BYTES_BIG_ENDIAN)
8432 int msk = (1 << bpos) - 1;
8433 buf[pos - rpos] &= ~msk;
8434 buf[pos - rpos] |= ptr[pos - o] & msk;
8435 if (mask)
8437 if (fieldsize > 1 || epos == 0)
8438 mask[pos] &= msk;
8439 else
8440 mask[pos] &= (msk | ~((1 << epos) - 1));
8443 else
8445 int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
8446 buf[pos - rpos] &= msk;
8447 buf[pos - rpos] |= ptr[pos - o] & ~msk;
8448 if (mask)
8450 if (fieldsize > 1 || epos == 0)
8451 mask[pos] &= ~msk;
8452 else
8453 mask[pos] &= (~msk
8454 | ((1 << (BITS_PER_UNIT - epos))
8455 - 1));
8459 /* If the bitfield does not end at byte boundary, handle
8460 the partial byte at the end. */
8461 if (epos
8462 && (off == -1
8463 || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8465 if (!BYTES_BIG_ENDIAN)
8467 int msk = (1 << epos) - 1;
8468 buf[pos - rpos + fieldsize - 1] &= msk;
8469 buf[pos - rpos + fieldsize - 1]
8470 |= ptr[pos + fieldsize - 1 - o] & ~msk;
8471 if (mask && (fieldsize > 1 || bpos == 0))
8472 mask[pos + fieldsize - 1] &= ~msk;
8474 else
8476 int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
8477 buf[pos - rpos + fieldsize - 1] &= ~msk;
8478 buf[pos - rpos + fieldsize - 1]
8479 |= ptr[pos + fieldsize - 1 - o] & msk;
8480 if (mask && (fieldsize > 1 || bpos == 0))
8481 mask[pos + fieldsize - 1] &= msk;
8484 if (off == -1
8485 || (pos >= off
8486 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8488 memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8489 if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
8490 memset (mask + pos + (bpos != 0), 0,
8491 fieldsize - (bpos != 0) - (epos != 0));
8493 else
8495 /* Partial overlap. */
8496 HOST_WIDE_INT fsz = fieldsize;
8497 gcc_assert (mask == NULL);
8498 if (pos < off)
8500 fsz -= (off - pos);
8501 pos = off;
8503 if (pos + fsz > (HOST_WIDE_INT) off + len)
8504 fsz = (HOST_WIDE_INT) off + len - pos;
8505 memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8507 continue;
8510 if (off == -1
8511 || (pos >= off
8512 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8514 int fldsize = fieldsize;
8515 if (off == -1)
8517 tree fld = DECL_CHAIN (field);
8518 while (fld)
8520 if (TREE_CODE (fld) == FIELD_DECL)
8521 break;
8522 fld = DECL_CHAIN (fld);
8524 if (fld == NULL_TREE)
8525 fldsize = len - pos;
8527 r = native_encode_initializer (val, ptr ? ptr + pos - o
8528 : NULL,
8529 fldsize,
8530 off == -1 ? -1 : 0,
8531 mask ? mask + pos : NULL);
8532 if (!r)
8533 return 0;
8534 if (off == -1
8535 && fldsize != fieldsize
8536 && r > fieldsize
8537 && pos + r > total_bytes)
8538 total_bytes = pos + r;
8540 else
8542 /* Partial overlap. */
8543 unsigned char *p = NULL;
8544 int no = 0;
8545 int l;
8546 gcc_assert (mask == NULL);
8547 if (pos >= off)
8549 if (ptr)
8550 p = ptr + pos - off;
8551 l = MIN ((HOST_WIDE_INT) off + len - pos,
8552 fieldsize);
8554 else
8556 p = ptr;
8557 no = off - pos;
8558 l = len;
8560 if (!native_encode_initializer (val, p, l, no, NULL))
8561 return 0;
8564 return MIN (total_bytes - off, len);
8566 return 0;
8571 /* Subroutine of native_interpret_expr. Interpret the contents of
8572 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8573 If the buffer cannot be interpreted, return NULL_TREE. */
8575 static tree
8576 native_interpret_int (tree type, const unsigned char *ptr, int len)
8578 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8580 if (total_bytes > len
8581 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8582 return NULL_TREE;
8584 wide_int result = wi::from_buffer (ptr, total_bytes);
8586 return wide_int_to_tree (type, result);
8590 /* Subroutine of native_interpret_expr. Interpret the contents of
8591 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8592 If the buffer cannot be interpreted, return NULL_TREE. */
8594 static tree
8595 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8597 scalar_mode mode = SCALAR_TYPE_MODE (type);
8598 int total_bytes = GET_MODE_SIZE (mode);
8599 double_int result;
8600 FIXED_VALUE_TYPE fixed_value;
8602 if (total_bytes > len
8603 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8604 return NULL_TREE;
8606 result = double_int::from_buffer (ptr, total_bytes);
8607 fixed_value = fixed_from_double_int (result, mode);
8609 return build_fixed (type, fixed_value);
8613 /* Subroutine of native_interpret_expr. Interpret the contents of
8614 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8615 If the buffer cannot be interpreted, return NULL_TREE. */
8617 static tree
8618 native_interpret_real (tree type, const unsigned char *ptr, int len)
8620 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8621 int total_bytes = GET_MODE_SIZE (mode);
8622 unsigned char value;
8623 /* There are always 32 bits in each long, no matter the size of
8624 the hosts long. We handle floating point representations with
8625 up to 192 bits. */
8626 REAL_VALUE_TYPE r;
8627 long tmp[6];
8629 if (total_bytes > len || total_bytes > 24)
8630 return NULL_TREE;
8631 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8633 memset (tmp, 0, sizeof (tmp));
8634 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8635 bitpos += BITS_PER_UNIT)
8637 /* Both OFFSET and BYTE index within a long;
8638 bitpos indexes the whole float. */
8639 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8640 if (UNITS_PER_WORD < 4)
8642 int word = byte / UNITS_PER_WORD;
8643 if (WORDS_BIG_ENDIAN)
8644 word = (words - 1) - word;
8645 offset = word * UNITS_PER_WORD;
8646 if (BYTES_BIG_ENDIAN)
8647 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8648 else
8649 offset += byte % UNITS_PER_WORD;
8651 else
8653 offset = byte;
8654 if (BYTES_BIG_ENDIAN)
8656 /* Reverse bytes within each long, or within the entire float
8657 if it's smaller than a long (for HFmode). */
8658 offset = MIN (3, total_bytes - 1) - offset;
8659 gcc_assert (offset >= 0);
8662 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8664 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8667 real_from_target (&r, tmp, mode);
8668 tree ret = build_real (type, r);
8669 if (MODE_COMPOSITE_P (mode))
8671 /* For floating point values in composite modes, punt if this folding
8672 doesn't preserve bit representation. As the mode doesn't have fixed
8673 precision while GCC pretends it does, there could be valid values that
8674 GCC can't really represent accurately. See PR95450. */
8675 unsigned char buf[24];
8676 if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
8677 || memcmp (ptr, buf, total_bytes) != 0)
8678 ret = NULL_TREE;
8680 return ret;
8684 /* Subroutine of native_interpret_expr. Interpret the contents of
8685 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8686 If the buffer cannot be interpreted, return NULL_TREE. */
8688 static tree
8689 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8691 tree etype, rpart, ipart;
8692 int size;
8694 etype = TREE_TYPE (type);
8695 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8696 if (size * 2 > len)
8697 return NULL_TREE;
8698 rpart = native_interpret_expr (etype, ptr, size);
8699 if (!rpart)
8700 return NULL_TREE;
8701 ipart = native_interpret_expr (etype, ptr+size, size);
8702 if (!ipart)
8703 return NULL_TREE;
8704 return build_complex (type, rpart, ipart);
8707 /* Read a vector of type TYPE from the target memory image given by BYTES,
8708 which contains LEN bytes. The vector is known to be encodable using
8709 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8711 Return the vector on success, otherwise return null. */
8713 static tree
8714 native_interpret_vector_part (tree type, const unsigned char *bytes,
8715 unsigned int len, unsigned int npatterns,
8716 unsigned int nelts_per_pattern)
8718 tree elt_type = TREE_TYPE (type);
8719 if (VECTOR_BOOLEAN_TYPE_P (type)
8720 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8722 /* This is the only case in which elements can be smaller than a byte.
8723 Element 0 is always in the lsb of the containing byte. */
8724 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8725 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8726 return NULL_TREE;
8728 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8729 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8731 unsigned int bit_index = i * elt_bits;
8732 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8733 unsigned int lsb = bit_index % BITS_PER_UNIT;
8734 builder.quick_push (bytes[byte_index] & (1 << lsb)
8735 ? build_all_ones_cst (elt_type)
8736 : build_zero_cst (elt_type));
8738 return builder.build ();
8741 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8742 if (elt_bytes * npatterns * nelts_per_pattern > len)
8743 return NULL_TREE;
8745 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8746 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8748 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8749 if (!elt)
8750 return NULL_TREE;
8751 builder.quick_push (elt);
8752 bytes += elt_bytes;
8754 return builder.build ();
8757 /* Subroutine of native_interpret_expr. Interpret the contents of
8758 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8759 If the buffer cannot be interpreted, return NULL_TREE. */
8761 static tree
8762 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8764 tree etype;
8765 unsigned int size;
8766 unsigned HOST_WIDE_INT count;
8768 etype = TREE_TYPE (type);
8769 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8770 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
8771 || size * count > len)
8772 return NULL_TREE;
8774 return native_interpret_vector_part (type, ptr, len, count, 1);
8778 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8779 the buffer PTR of length LEN as a constant of type TYPE. For
8780 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8781 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8782 return NULL_TREE. */
8784 tree
8785 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8787 switch (TREE_CODE (type))
8789 case INTEGER_TYPE:
8790 case ENUMERAL_TYPE:
8791 case BOOLEAN_TYPE:
8792 case POINTER_TYPE:
8793 case REFERENCE_TYPE:
8794 case OFFSET_TYPE:
8795 return native_interpret_int (type, ptr, len);
8797 case REAL_TYPE:
8798 return native_interpret_real (type, ptr, len);
8800 case FIXED_POINT_TYPE:
8801 return native_interpret_fixed (type, ptr, len);
8803 case COMPLEX_TYPE:
8804 return native_interpret_complex (type, ptr, len);
8806 case VECTOR_TYPE:
8807 return native_interpret_vector (type, ptr, len);
8809 default:
8810 return NULL_TREE;
8814 /* Returns true if we can interpret the contents of a native encoding
8815 as TYPE. */
8817 bool
8818 can_native_interpret_type_p (tree type)
8820 switch (TREE_CODE (type))
8822 case INTEGER_TYPE:
8823 case ENUMERAL_TYPE:
8824 case BOOLEAN_TYPE:
8825 case POINTER_TYPE:
8826 case REFERENCE_TYPE:
8827 case FIXED_POINT_TYPE:
8828 case REAL_TYPE:
8829 case COMPLEX_TYPE:
8830 case VECTOR_TYPE:
8831 case OFFSET_TYPE:
8832 return true;
8833 default:
8834 return false;
8838 /* Attempt to interpret aggregate of TYPE from bytes encoded in target
8839 byte order at PTR + OFF with LEN bytes. Does not handle unions. */
8841 tree
8842 native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
8843 int len)
8845 vec<constructor_elt, va_gc> *elts = NULL;
8846 if (TREE_CODE (type) == ARRAY_TYPE)
8848 HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
8849 if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
8850 return NULL_TREE;
8852 HOST_WIDE_INT cnt = 0;
8853 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
8855 if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
8856 return NULL_TREE;
8857 cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
8859 if (eltsz == 0)
8860 cnt = 0;
8861 HOST_WIDE_INT pos = 0;
8862 for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
8864 tree v = NULL_TREE;
8865 if (pos >= len || pos + eltsz > len)
8866 return NULL_TREE;
8867 if (can_native_interpret_type_p (TREE_TYPE (type)))
8869 v = native_interpret_expr (TREE_TYPE (type),
8870 ptr + off + pos, eltsz);
8871 if (v == NULL_TREE)
8872 return NULL_TREE;
8874 else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
8875 || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
8876 v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos,
8877 eltsz);
8878 if (v == NULL_TREE)
8879 return NULL_TREE;
8880 CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
8882 return build_constructor (type, elts);
8884 if (TREE_CODE (type) != RECORD_TYPE)
8885 return NULL_TREE;
8886 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8888 if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field))
8889 continue;
8890 tree fld = field;
8891 HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
8892 int diff = 0;
8893 tree v = NULL_TREE;
8894 if (DECL_BIT_FIELD (field))
8896 fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
8897 if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
8899 poly_int64 bitoffset;
8900 poly_uint64 field_offset, fld_offset;
8901 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8902 && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
8903 bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
8904 else
8905 bitoffset = 0;
8906 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8907 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
8908 diff = (TYPE_PRECISION (TREE_TYPE (fld))
8909 - TYPE_PRECISION (TREE_TYPE (field)));
8910 if (!bitoffset.is_constant (&bitoff)
8911 || bitoff < 0
8912 || bitoff > diff)
8913 return NULL_TREE;
8915 else
8917 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8918 return NULL_TREE;
8919 int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8920 int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8921 bpos %= BITS_PER_UNIT;
8922 fieldsize += bpos;
8923 fieldsize += BITS_PER_UNIT - 1;
8924 fieldsize /= BITS_PER_UNIT;
8925 tree repr_type = find_bitfield_repr_type (fieldsize, len);
8926 if (repr_type == NULL_TREE)
8927 return NULL_TREE;
8928 sz = int_size_in_bytes (repr_type);
8929 if (sz < 0 || sz > len)
8930 return NULL_TREE;
8931 pos = int_byte_position (field);
8932 if (pos < 0 || pos > len || pos + fieldsize > len)
8933 return NULL_TREE;
8934 HOST_WIDE_INT rpos;
8935 if (pos + sz <= len)
8936 rpos = pos;
8937 else
8939 rpos = len - sz;
8940 gcc_assert (rpos <= pos);
8942 bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
8943 pos = rpos;
8944 diff = (TYPE_PRECISION (repr_type)
8945 - TYPE_PRECISION (TREE_TYPE (field)));
8946 v = native_interpret_expr (repr_type, ptr + off + pos, sz);
8947 if (v == NULL_TREE)
8948 return NULL_TREE;
8949 fld = NULL_TREE;
8953 if (fld)
8955 sz = int_size_in_bytes (TREE_TYPE (fld));
8956 if (sz < 0 || sz > len)
8957 return NULL_TREE;
8958 tree byte_pos = byte_position (fld);
8959 if (!tree_fits_shwi_p (byte_pos))
8960 return NULL_TREE;
8961 pos = tree_to_shwi (byte_pos);
8962 if (pos < 0 || pos > len || pos + sz > len)
8963 return NULL_TREE;
8965 if (fld == NULL_TREE)
8966 /* Already handled above. */;
8967 else if (can_native_interpret_type_p (TREE_TYPE (fld)))
8969 v = native_interpret_expr (TREE_TYPE (fld),
8970 ptr + off + pos, sz);
8971 if (v == NULL_TREE)
8972 return NULL_TREE;
8974 else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
8975 || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
8976 v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz);
8977 if (v == NULL_TREE)
8978 return NULL_TREE;
8979 if (fld != field)
8981 if (TREE_CODE (v) != INTEGER_CST)
8982 return NULL_TREE;
8984 /* FIXME: Figure out how to handle PDP endian bitfields. */
8985 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8986 return NULL_TREE;
8987 if (!BYTES_BIG_ENDIAN)
8988 v = wide_int_to_tree (TREE_TYPE (field),
8989 wi::lrshift (wi::to_wide (v), bitoff));
8990 else
8991 v = wide_int_to_tree (TREE_TYPE (field),
8992 wi::lrshift (wi::to_wide (v),
8993 diff - bitoff));
8995 CONSTRUCTOR_APPEND_ELT (elts, field, v);
8997 return build_constructor (type, elts);
9000 /* Routines for manipulation of native_encode_expr encoded data if the encoded
9001 or extracted constant positions and/or sizes aren't byte aligned. */
9003 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
9004 bits between adjacent elements. AMNT should be within
9005 [0, BITS_PER_UNIT).
9006 Example, AMNT = 2:
9007 00011111|11100000 << 2 = 01111111|10000000
9008 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
9010 void
9011 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
9012 unsigned int amnt)
9014 if (amnt == 0)
9015 return;
9017 unsigned char carry_over = 0U;
9018 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
9019 unsigned char clear_mask = (~0U) << amnt;
9021 for (unsigned int i = 0; i < sz; i++)
9023 unsigned prev_carry_over = carry_over;
9024 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
9026 ptr[i] <<= amnt;
9027 if (i != 0)
9029 ptr[i] &= clear_mask;
9030 ptr[i] |= prev_carry_over;
9035 /* Like shift_bytes_in_array_left but for big-endian.
9036 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
9037 bits between adjacent elements. AMNT should be within
9038 [0, BITS_PER_UNIT).
9039 Example, AMNT = 2:
9040 00011111|11100000 >> 2 = 00000111|11111000
9041 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
9043 void
9044 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
9045 unsigned int amnt)
9047 if (amnt == 0)
9048 return;
9050 unsigned char carry_over = 0U;
9051 unsigned char carry_mask = ~(~0U << amnt);
9053 for (unsigned int i = 0; i < sz; i++)
9055 unsigned prev_carry_over = carry_over;
9056 carry_over = ptr[i] & carry_mask;
9058 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
9059 ptr[i] >>= amnt;
9060 ptr[i] |= prev_carry_over;
9064 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9065 directly on the VECTOR_CST encoding, in a way that works for variable-
9066 length vectors. Return the resulting VECTOR_CST on success or null
9067 on failure. */
9069 static tree
9070 fold_view_convert_vector_encoding (tree type, tree expr)
9072 tree expr_type = TREE_TYPE (expr);
9073 poly_uint64 type_bits, expr_bits;
9074 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
9075 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
9076 return NULL_TREE;
9078 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
9079 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
9080 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
9081 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
9083 /* We can only preserve the semantics of a stepped pattern if the new
9084 vector element is an integer of the same size. */
9085 if (VECTOR_CST_STEPPED_P (expr)
9086 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
9087 return NULL_TREE;
9089 /* The number of bits needed to encode one element from every pattern
9090 of the original vector. */
9091 unsigned int expr_sequence_bits
9092 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
9094 /* The number of bits needed to encode one element from every pattern
9095 of the result. */
9096 unsigned int type_sequence_bits
9097 = least_common_multiple (expr_sequence_bits, type_elt_bits);
9099 /* Don't try to read more bytes than are available, which can happen
9100 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9101 The general VIEW_CONVERT handling can cope with that case, so there's
9102 no point complicating things here. */
9103 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
9104 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
9105 BITS_PER_UNIT);
9106 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
9107 if (known_gt (buffer_bits, expr_bits))
9108 return NULL_TREE;
9110 /* Get enough bytes of EXPR to form the new encoding. */
9111 auto_vec<unsigned char, 128> buffer (buffer_bytes);
9112 buffer.quick_grow (buffer_bytes);
9113 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
9114 buffer_bits / expr_elt_bits)
9115 != (int) buffer_bytes)
9116 return NULL_TREE;
9118 /* Reencode the bytes as TYPE. */
9119 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
9120 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
9121 type_npatterns, nelts_per_pattern);
9124 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9125 TYPE at compile-time. If we're unable to perform the conversion
9126 return NULL_TREE. */
9128 static tree
9129 fold_view_convert_expr (tree type, tree expr)
9131 /* We support up to 512-bit values (for V8DFmode). */
9132 unsigned char buffer[64];
9133 int len;
9135 /* Check that the host and target are sane. */
9136 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
9137 return NULL_TREE;
9139 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
9140 if (tree res = fold_view_convert_vector_encoding (type, expr))
9141 return res;
9143 len = native_encode_expr (expr, buffer, sizeof (buffer));
9144 if (len == 0)
9145 return NULL_TREE;
9147 return native_interpret_expr (type, buffer, len);
9150 /* Build an expression for the address of T. Folds away INDIRECT_REF
9151 to avoid confusing the gimplify process. */
9153 tree
9154 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
9156 /* The size of the object is not relevant when talking about its address. */
9157 if (TREE_CODE (t) == WITH_SIZE_EXPR)
9158 t = TREE_OPERAND (t, 0);
9160 if (TREE_CODE (t) == INDIRECT_REF)
9162 t = TREE_OPERAND (t, 0);
9164 if (TREE_TYPE (t) != ptrtype)
9165 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
9167 else if (TREE_CODE (t) == MEM_REF
9168 && integer_zerop (TREE_OPERAND (t, 1)))
9170 t = TREE_OPERAND (t, 0);
9172 if (TREE_TYPE (t) != ptrtype)
9173 t = fold_convert_loc (loc, ptrtype, t);
9175 else if (TREE_CODE (t) == MEM_REF
9176 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
9177 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
9178 TREE_OPERAND (t, 0),
9179 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
9180 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
9182 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
9184 if (TREE_TYPE (t) != ptrtype)
9185 t = fold_convert_loc (loc, ptrtype, t);
9187 else
9188 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
9190 return t;
9193 /* Build an expression for the address of T. */
9195 tree
9196 build_fold_addr_expr_loc (location_t loc, tree t)
9198 tree ptrtype = build_pointer_type (TREE_TYPE (t));
9200 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
9203 /* Fold a unary expression of code CODE and type TYPE with operand
9204 OP0. Return the folded expression if folding is successful.
9205 Otherwise, return NULL_TREE. */
9207 tree
9208 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
9210 tree tem;
9211 tree arg0;
9212 enum tree_code_class kind = TREE_CODE_CLASS (code);
9214 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9215 && TREE_CODE_LENGTH (code) == 1);
9217 arg0 = op0;
9218 if (arg0)
9220 if (CONVERT_EXPR_CODE_P (code)
9221 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
9223 /* Don't use STRIP_NOPS, because signedness of argument type
9224 matters. */
9225 STRIP_SIGN_NOPS (arg0);
9227 else
9229 /* Strip any conversions that don't change the mode. This
9230 is safe for every expression, except for a comparison
9231 expression because its signedness is derived from its
9232 operands.
9234 Note that this is done as an internal manipulation within
9235 the constant folder, in order to find the simplest
9236 representation of the arguments so that their form can be
9237 studied. In any cases, the appropriate type conversions
9238 should be put back in the tree that will get out of the
9239 constant folder. */
9240 STRIP_NOPS (arg0);
9243 if (CONSTANT_CLASS_P (arg0))
9245 tree tem = const_unop (code, type, arg0);
9246 if (tem)
9248 if (TREE_TYPE (tem) != type)
9249 tem = fold_convert_loc (loc, type, tem);
9250 return tem;
9255 tem = generic_simplify (loc, code, type, op0);
9256 if (tem)
9257 return tem;
9259 if (TREE_CODE_CLASS (code) == tcc_unary)
9261 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9262 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9263 fold_build1_loc (loc, code, type,
9264 fold_convert_loc (loc, TREE_TYPE (op0),
9265 TREE_OPERAND (arg0, 1))));
9266 else if (TREE_CODE (arg0) == COND_EXPR)
9268 tree arg01 = TREE_OPERAND (arg0, 1);
9269 tree arg02 = TREE_OPERAND (arg0, 2);
9270 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
9271 arg01 = fold_build1_loc (loc, code, type,
9272 fold_convert_loc (loc,
9273 TREE_TYPE (op0), arg01));
9274 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
9275 arg02 = fold_build1_loc (loc, code, type,
9276 fold_convert_loc (loc,
9277 TREE_TYPE (op0), arg02));
9278 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
9279 arg01, arg02);
9281 /* If this was a conversion, and all we did was to move into
9282 inside the COND_EXPR, bring it back out. But leave it if
9283 it is a conversion from integer to integer and the
9284 result precision is no wider than a word since such a
9285 conversion is cheap and may be optimized away by combine,
9286 while it couldn't if it were outside the COND_EXPR. Then return
9287 so we don't get into an infinite recursion loop taking the
9288 conversion out and then back in. */
9290 if ((CONVERT_EXPR_CODE_P (code)
9291 || code == NON_LVALUE_EXPR)
9292 && TREE_CODE (tem) == COND_EXPR
9293 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
9294 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
9295 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
9296 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
9297 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
9298 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
9299 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9300 && (INTEGRAL_TYPE_P
9301 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
9302 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
9303 || flag_syntax_only))
9304 tem = build1_loc (loc, code, type,
9305 build3 (COND_EXPR,
9306 TREE_TYPE (TREE_OPERAND
9307 (TREE_OPERAND (tem, 1), 0)),
9308 TREE_OPERAND (tem, 0),
9309 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
9310 TREE_OPERAND (TREE_OPERAND (tem, 2),
9311 0)));
9312 return tem;
9316 switch (code)
9318 case NON_LVALUE_EXPR:
9319 if (!maybe_lvalue_p (op0))
9320 return fold_convert_loc (loc, type, op0);
9321 return NULL_TREE;
9323 CASE_CONVERT:
9324 case FLOAT_EXPR:
9325 case FIX_TRUNC_EXPR:
9326 if (COMPARISON_CLASS_P (op0))
9328 /* If we have (type) (a CMP b) and type is an integral type, return
9329 new expression involving the new type. Canonicalize
9330 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9331 non-integral type.
9332 Do not fold the result as that would not simplify further, also
9333 folding again results in recursions. */
9334 if (TREE_CODE (type) == BOOLEAN_TYPE)
9335 return build2_loc (loc, TREE_CODE (op0), type,
9336 TREE_OPERAND (op0, 0),
9337 TREE_OPERAND (op0, 1));
9338 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
9339 && TREE_CODE (type) != VECTOR_TYPE)
9340 return build3_loc (loc, COND_EXPR, type, op0,
9341 constant_boolean_node (true, type),
9342 constant_boolean_node (false, type));
9345 /* Handle (T *)&A.B.C for A being of type T and B and C
9346 living at offset zero. This occurs frequently in
9347 C++ upcasting and then accessing the base. */
9348 if (TREE_CODE (op0) == ADDR_EXPR
9349 && POINTER_TYPE_P (type)
9350 && handled_component_p (TREE_OPERAND (op0, 0)))
9352 poly_int64 bitsize, bitpos;
9353 tree offset;
9354 machine_mode mode;
9355 int unsignedp, reversep, volatilep;
9356 tree base
9357 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
9358 &offset, &mode, &unsignedp, &reversep,
9359 &volatilep);
9360 /* If the reference was to a (constant) zero offset, we can use
9361 the address of the base if it has the same base type
9362 as the result type and the pointer type is unqualified. */
9363 if (!offset
9364 && known_eq (bitpos, 0)
9365 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
9366 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
9367 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
9368 return fold_convert_loc (loc, type,
9369 build_fold_addr_expr_loc (loc, base));
9372 if (TREE_CODE (op0) == MODIFY_EXPR
9373 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
9374 /* Detect assigning a bitfield. */
9375 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
9376 && DECL_BIT_FIELD
9377 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
9379 /* Don't leave an assignment inside a conversion
9380 unless assigning a bitfield. */
9381 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
9382 /* First do the assignment, then return converted constant. */
9383 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
9384 suppress_warning (tem /* What warning? */);
9385 TREE_USED (tem) = 1;
9386 return tem;
9389 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9390 constants (if x has signed type, the sign bit cannot be set
9391 in c). This folds extension into the BIT_AND_EXPR.
9392 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9393 very likely don't have maximal range for their precision and this
9394 transformation effectively doesn't preserve non-maximal ranges. */
9395 if (TREE_CODE (type) == INTEGER_TYPE
9396 && TREE_CODE (op0) == BIT_AND_EXPR
9397 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9399 tree and_expr = op0;
9400 tree and0 = TREE_OPERAND (and_expr, 0);
9401 tree and1 = TREE_OPERAND (and_expr, 1);
9402 int change = 0;
9404 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
9405 || (TYPE_PRECISION (type)
9406 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
9407 change = 1;
9408 else if (TYPE_PRECISION (TREE_TYPE (and1))
9409 <= HOST_BITS_PER_WIDE_INT
9410 && tree_fits_uhwi_p (and1))
9412 unsigned HOST_WIDE_INT cst;
9414 cst = tree_to_uhwi (and1);
9415 cst &= HOST_WIDE_INT_M1U
9416 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
9417 change = (cst == 0);
9418 if (change
9419 && !flag_syntax_only
9420 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
9421 == ZERO_EXTEND))
9423 tree uns = unsigned_type_for (TREE_TYPE (and0));
9424 and0 = fold_convert_loc (loc, uns, and0);
9425 and1 = fold_convert_loc (loc, uns, and1);
9428 if (change)
9430 tem = force_fit_type (type, wi::to_widest (and1), 0,
9431 TREE_OVERFLOW (and1));
9432 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9433 fold_convert_loc (loc, type, and0), tem);
9437 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9438 cast (T1)X will fold away. We assume that this happens when X itself
9439 is a cast. */
9440 if (POINTER_TYPE_P (type)
9441 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
9442 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
9444 tree arg00 = TREE_OPERAND (arg0, 0);
9445 tree arg01 = TREE_OPERAND (arg0, 1);
9447 /* If -fsanitize=alignment, avoid this optimization in GENERIC
9448 when the pointed type needs higher alignment than
9449 the p+ first operand's pointed type. */
9450 if (!in_gimple_form
9451 && sanitize_flags_p (SANITIZE_ALIGNMENT)
9452 && (min_align_of_type (TREE_TYPE (type))
9453 > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00)))))
9454 return NULL_TREE;
9456 arg00 = fold_convert_loc (loc, type, arg00);
9457 return fold_build_pointer_plus_loc (loc, arg00, arg01);
9460 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9461 of the same precision, and X is an integer type not narrower than
9462 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9463 if (INTEGRAL_TYPE_P (type)
9464 && TREE_CODE (op0) == BIT_NOT_EXPR
9465 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9466 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
9467 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
9469 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
9470 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9471 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9472 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9473 fold_convert_loc (loc, type, tem));
9476 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9477 type of X and Y (integer types only). */
9478 if (INTEGRAL_TYPE_P (type)
9479 && TREE_CODE (op0) == MULT_EXPR
9480 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9481 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
9483 /* Be careful not to introduce new overflows. */
9484 tree mult_type;
9485 if (TYPE_OVERFLOW_WRAPS (type))
9486 mult_type = type;
9487 else
9488 mult_type = unsigned_type_for (type);
9490 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9492 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9493 fold_convert_loc (loc, mult_type,
9494 TREE_OPERAND (op0, 0)),
9495 fold_convert_loc (loc, mult_type,
9496 TREE_OPERAND (op0, 1)));
9497 return fold_convert_loc (loc, type, tem);
9501 return NULL_TREE;
9503 case VIEW_CONVERT_EXPR:
9504 if (TREE_CODE (op0) == MEM_REF)
9506 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9507 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9508 tem = fold_build2_loc (loc, MEM_REF, type,
9509 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9510 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9511 return tem;
9514 return NULL_TREE;
9516 case NEGATE_EXPR:
9517 tem = fold_negate_expr (loc, arg0);
9518 if (tem)
9519 return fold_convert_loc (loc, type, tem);
9520 return NULL_TREE;
9522 case ABS_EXPR:
9523 /* Convert fabs((double)float) into (double)fabsf(float). */
9524 if (TREE_CODE (arg0) == NOP_EXPR
9525 && TREE_CODE (type) == REAL_TYPE)
9527 tree targ0 = strip_float_extensions (arg0);
9528 if (targ0 != arg0)
9529 return fold_convert_loc (loc, type,
9530 fold_build1_loc (loc, ABS_EXPR,
9531 TREE_TYPE (targ0),
9532 targ0));
9534 return NULL_TREE;
9536 case BIT_NOT_EXPR:
9537 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9538 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9539 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9540 fold_convert_loc (loc, type,
9541 TREE_OPERAND (arg0, 0)))))
9542 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9543 fold_convert_loc (loc, type,
9544 TREE_OPERAND (arg0, 1)));
9545 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9546 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9547 fold_convert_loc (loc, type,
9548 TREE_OPERAND (arg0, 1)))))
9549 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9550 fold_convert_loc (loc, type,
9551 TREE_OPERAND (arg0, 0)), tem);
9553 return NULL_TREE;
9555 case TRUTH_NOT_EXPR:
9556 /* Note that the operand of this must be an int
9557 and its values must be 0 or 1.
9558 ("true" is a fixed value perhaps depending on the language,
9559 but we don't handle values other than 1 correctly yet.) */
9560 tem = fold_truth_not_expr (loc, arg0);
9561 if (!tem)
9562 return NULL_TREE;
9563 return fold_convert_loc (loc, type, tem);
9565 case INDIRECT_REF:
9566 /* Fold *&X to X if X is an lvalue. */
9567 if (TREE_CODE (op0) == ADDR_EXPR)
9569 tree op00 = TREE_OPERAND (op0, 0);
9570 if ((VAR_P (op00)
9571 || TREE_CODE (op00) == PARM_DECL
9572 || TREE_CODE (op00) == RESULT_DECL)
9573 && !TREE_READONLY (op00))
9574 return op00;
9576 return NULL_TREE;
9578 default:
9579 return NULL_TREE;
9580 } /* switch (code) */
9584 /* If the operation was a conversion do _not_ mark a resulting constant
9585 with TREE_OVERFLOW if the original constant was not. These conversions
9586 have implementation defined behavior and retaining the TREE_OVERFLOW
9587 flag here would confuse later passes such as VRP. */
9588 tree
9589 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9590 tree type, tree op0)
9592 tree res = fold_unary_loc (loc, code, type, op0);
9593 if (res
9594 && TREE_CODE (res) == INTEGER_CST
9595 && TREE_CODE (op0) == INTEGER_CST
9596 && CONVERT_EXPR_CODE_P (code))
9597 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9599 return res;
9602 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9603 operands OP0 and OP1. LOC is the location of the resulting expression.
9604 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9605 Return the folded expression if folding is successful. Otherwise,
9606 return NULL_TREE. */
9607 static tree
9608 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9609 tree arg0, tree arg1, tree op0, tree op1)
9611 tree tem;
9613 /* We only do these simplifications if we are optimizing. */
9614 if (!optimize)
9615 return NULL_TREE;
9617 /* Check for things like (A || B) && (A || C). We can convert this
9618 to A || (B && C). Note that either operator can be any of the four
9619 truth and/or operations and the transformation will still be
9620 valid. Also note that we only care about order for the
9621 ANDIF and ORIF operators. If B contains side effects, this
9622 might change the truth-value of A. */
9623 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9624 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9625 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9626 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9627 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9628 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9630 tree a00 = TREE_OPERAND (arg0, 0);
9631 tree a01 = TREE_OPERAND (arg0, 1);
9632 tree a10 = TREE_OPERAND (arg1, 0);
9633 tree a11 = TREE_OPERAND (arg1, 1);
9634 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9635 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9636 && (code == TRUTH_AND_EXPR
9637 || code == TRUTH_OR_EXPR));
9639 if (operand_equal_p (a00, a10, 0))
9640 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9641 fold_build2_loc (loc, code, type, a01, a11));
9642 else if (commutative && operand_equal_p (a00, a11, 0))
9643 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9644 fold_build2_loc (loc, code, type, a01, a10));
9645 else if (commutative && operand_equal_p (a01, a10, 0))
9646 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9647 fold_build2_loc (loc, code, type, a00, a11));
9649 /* This case if tricky because we must either have commutative
9650 operators or else A10 must not have side-effects. */
9652 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9653 && operand_equal_p (a01, a11, 0))
9654 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9655 fold_build2_loc (loc, code, type, a00, a10),
9656 a01);
9659 /* See if we can build a range comparison. */
9660 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9661 return tem;
9663 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9664 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9666 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9667 if (tem)
9668 return fold_build2_loc (loc, code, type, tem, arg1);
9671 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9672 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9674 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9675 if (tem)
9676 return fold_build2_loc (loc, code, type, arg0, tem);
9679 /* Check for the possibility of merging component references. If our
9680 lhs is another similar operation, try to merge its rhs with our
9681 rhs. Then try to merge our lhs and rhs. */
9682 if (TREE_CODE (arg0) == code
9683 && (tem = fold_truth_andor_1 (loc, code, type,
9684 TREE_OPERAND (arg0, 1), arg1)) != 0)
9685 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9687 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9688 return tem;
9690 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9691 if (param_logical_op_non_short_circuit != -1)
9692 logical_op_non_short_circuit
9693 = param_logical_op_non_short_circuit;
9694 if (logical_op_non_short_circuit
9695 && !sanitize_coverage_p ()
9696 && (code == TRUTH_AND_EXPR
9697 || code == TRUTH_ANDIF_EXPR
9698 || code == TRUTH_OR_EXPR
9699 || code == TRUTH_ORIF_EXPR))
9701 enum tree_code ncode, icode;
9703 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9704 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9705 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9707 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9708 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9709 We don't want to pack more than two leafs to a non-IF AND/OR
9710 expression.
9711 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9712 equal to IF-CODE, then we don't want to add right-hand operand.
9713 If the inner right-hand side of left-hand operand has
9714 side-effects, or isn't simple, then we can't add to it,
9715 as otherwise we might destroy if-sequence. */
9716 if (TREE_CODE (arg0) == icode
9717 && simple_operand_p_2 (arg1)
9718 /* Needed for sequence points to handle trappings, and
9719 side-effects. */
9720 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
9722 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9723 arg1);
9724 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9725 tem);
9727 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9728 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9729 else if (TREE_CODE (arg1) == icode
9730 && simple_operand_p_2 (arg0)
9731 /* Needed for sequence points to handle trappings, and
9732 side-effects. */
9733 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
9735 tem = fold_build2_loc (loc, ncode, type,
9736 arg0, TREE_OPERAND (arg1, 0));
9737 return fold_build2_loc (loc, icode, type, tem,
9738 TREE_OPERAND (arg1, 1));
9740 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9741 into (A OR B).
9742 For sequence point consistancy, we need to check for trapping,
9743 and side-effects. */
9744 else if (code == icode && simple_operand_p_2 (arg0)
9745 && simple_operand_p_2 (arg1))
9746 return fold_build2_loc (loc, ncode, type, arg0, arg1);
9749 return NULL_TREE;
9752 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9753 by changing CODE to reduce the magnitude of constants involved in
9754 ARG0 of the comparison.
9755 Returns a canonicalized comparison tree if a simplification was
9756 possible, otherwise returns NULL_TREE.
9757 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9758 valid if signed overflow is undefined. */
9760 static tree
9761 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9762 tree arg0, tree arg1,
9763 bool *strict_overflow_p)
9765 enum tree_code code0 = TREE_CODE (arg0);
9766 tree t, cst0 = NULL_TREE;
9767 int sgn0;
9769 /* Match A +- CST code arg1. We can change this only if overflow
9770 is undefined. */
9771 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9772 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9773 /* In principle pointers also have undefined overflow behavior,
9774 but that causes problems elsewhere. */
9775 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9776 && (code0 == MINUS_EXPR
9777 || code0 == PLUS_EXPR)
9778 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9779 return NULL_TREE;
9781 /* Identify the constant in arg0 and its sign. */
9782 cst0 = TREE_OPERAND (arg0, 1);
9783 sgn0 = tree_int_cst_sgn (cst0);
9785 /* Overflowed constants and zero will cause problems. */
9786 if (integer_zerop (cst0)
9787 || TREE_OVERFLOW (cst0))
9788 return NULL_TREE;
9790 /* See if we can reduce the magnitude of the constant in
9791 arg0 by changing the comparison code. */
9792 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9793 if (code == LT_EXPR
9794 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9795 code = LE_EXPR;
9796 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9797 else if (code == GT_EXPR
9798 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9799 code = GE_EXPR;
9800 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9801 else if (code == LE_EXPR
9802 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9803 code = LT_EXPR;
9804 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9805 else if (code == GE_EXPR
9806 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9807 code = GT_EXPR;
9808 else
9809 return NULL_TREE;
9810 *strict_overflow_p = true;
9812 /* Now build the constant reduced in magnitude. But not if that
9813 would produce one outside of its types range. */
9814 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9815 && ((sgn0 == 1
9816 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9817 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9818 || (sgn0 == -1
9819 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9820 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9821 return NULL_TREE;
9823 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9824 cst0, build_int_cst (TREE_TYPE (cst0), 1));
9825 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9826 t = fold_convert (TREE_TYPE (arg1), t);
9828 return fold_build2_loc (loc, code, type, t, arg1);
9831 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9832 overflow further. Try to decrease the magnitude of constants involved
9833 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9834 and put sole constants at the second argument position.
9835 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9837 static tree
9838 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9839 tree arg0, tree arg1)
9841 tree t;
9842 bool strict_overflow_p;
9843 const char * const warnmsg = G_("assuming signed overflow does not occur "
9844 "when reducing constant in comparison");
9846 /* Try canonicalization by simplifying arg0. */
9847 strict_overflow_p = false;
9848 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9849 &strict_overflow_p);
9850 if (t)
9852 if (strict_overflow_p)
9853 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9854 return t;
9857 /* Try canonicalization by simplifying arg1 using the swapped
9858 comparison. */
9859 code = swap_tree_comparison (code);
9860 strict_overflow_p = false;
9861 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9862 &strict_overflow_p);
9863 if (t && strict_overflow_p)
9864 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9865 return t;
9868 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9869 space. This is used to avoid issuing overflow warnings for
9870 expressions like &p->x which cannot wrap. */
9872 static bool
9873 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9875 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9876 return true;
9878 if (maybe_lt (bitpos, 0))
9879 return true;
9881 poly_wide_int wi_offset;
9882 int precision = TYPE_PRECISION (TREE_TYPE (base));
9883 if (offset == NULL_TREE)
9884 wi_offset = wi::zero (precision);
9885 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9886 return true;
9887 else
9888 wi_offset = wi::to_poly_wide (offset);
9890 wi::overflow_type overflow;
9891 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9892 precision);
9893 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9894 if (overflow)
9895 return true;
9897 poly_uint64 total_hwi, size;
9898 if (!total.to_uhwi (&total_hwi)
9899 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
9900 &size)
9901 || known_eq (size, 0U))
9902 return true;
9904 if (known_le (total_hwi, size))
9905 return false;
9907 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9908 array. */
9909 if (TREE_CODE (base) == ADDR_EXPR
9910 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
9911 &size)
9912 && maybe_ne (size, 0U)
9913 && known_le (total_hwi, size))
9914 return false;
9916 return true;
9919 /* Return a positive integer when the symbol DECL is known to have
9920 a nonzero address, zero when it's known not to (e.g., it's a weak
9921 symbol), and a negative integer when the symbol is not yet in the
9922 symbol table and so whether or not its address is zero is unknown.
9923 For function local objects always return positive integer. */
9924 static int
9925 maybe_nonzero_address (tree decl)
9927 if (DECL_P (decl) && decl_in_symtab_p (decl))
9928 if (struct symtab_node *symbol = symtab_node::get_create (decl))
9929 return symbol->nonzero_address ();
9931 /* Function local objects are never NULL. */
9932 if (DECL_P (decl)
9933 && (DECL_CONTEXT (decl)
9934 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
9935 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
9936 return 1;
9938 return -1;
9941 /* Subroutine of fold_binary. This routine performs all of the
9942 transformations that are common to the equality/inequality
9943 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9944 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9945 fold_binary should call fold_binary. Fold a comparison with
9946 tree code CODE and type TYPE with operands OP0 and OP1. Return
9947 the folded comparison or NULL_TREE. */
9949 static tree
9950 fold_comparison (location_t loc, enum tree_code code, tree type,
9951 tree op0, tree op1)
9953 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
9954 tree arg0, arg1, tem;
9956 arg0 = op0;
9957 arg1 = op1;
9959 STRIP_SIGN_NOPS (arg0);
9960 STRIP_SIGN_NOPS (arg1);
9962 /* For comparisons of pointers we can decompose it to a compile time
9963 comparison of the base objects and the offsets into the object.
9964 This requires at least one operand being an ADDR_EXPR or a
9965 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9966 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9967 && (TREE_CODE (arg0) == ADDR_EXPR
9968 || TREE_CODE (arg1) == ADDR_EXPR
9969 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9970 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9972 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9973 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
9974 machine_mode mode;
9975 int volatilep, reversep, unsignedp;
9976 bool indirect_base0 = false, indirect_base1 = false;
9978 /* Get base and offset for the access. Strip ADDR_EXPR for
9979 get_inner_reference, but put it back by stripping INDIRECT_REF
9980 off the base object if possible. indirect_baseN will be true
9981 if baseN is not an address but refers to the object itself. */
9982 base0 = arg0;
9983 if (TREE_CODE (arg0) == ADDR_EXPR)
9985 base0
9986 = get_inner_reference (TREE_OPERAND (arg0, 0),
9987 &bitsize, &bitpos0, &offset0, &mode,
9988 &unsignedp, &reversep, &volatilep);
9989 if (TREE_CODE (base0) == INDIRECT_REF)
9990 base0 = TREE_OPERAND (base0, 0);
9991 else
9992 indirect_base0 = true;
9994 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9996 base0 = TREE_OPERAND (arg0, 0);
9997 STRIP_SIGN_NOPS (base0);
9998 if (TREE_CODE (base0) == ADDR_EXPR)
10000 base0
10001 = get_inner_reference (TREE_OPERAND (base0, 0),
10002 &bitsize, &bitpos0, &offset0, &mode,
10003 &unsignedp, &reversep, &volatilep);
10004 if (TREE_CODE (base0) == INDIRECT_REF)
10005 base0 = TREE_OPERAND (base0, 0);
10006 else
10007 indirect_base0 = true;
10009 if (offset0 == NULL_TREE || integer_zerop (offset0))
10010 offset0 = TREE_OPERAND (arg0, 1);
10011 else
10012 offset0 = size_binop (PLUS_EXPR, offset0,
10013 TREE_OPERAND (arg0, 1));
10014 if (poly_int_tree_p (offset0))
10016 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
10017 TYPE_PRECISION (sizetype));
10018 tem <<= LOG2_BITS_PER_UNIT;
10019 tem += bitpos0;
10020 if (tem.to_shwi (&bitpos0))
10021 offset0 = NULL_TREE;
10025 base1 = arg1;
10026 if (TREE_CODE (arg1) == ADDR_EXPR)
10028 base1
10029 = get_inner_reference (TREE_OPERAND (arg1, 0),
10030 &bitsize, &bitpos1, &offset1, &mode,
10031 &unsignedp, &reversep, &volatilep);
10032 if (TREE_CODE (base1) == INDIRECT_REF)
10033 base1 = TREE_OPERAND (base1, 0);
10034 else
10035 indirect_base1 = true;
10037 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10039 base1 = TREE_OPERAND (arg1, 0);
10040 STRIP_SIGN_NOPS (base1);
10041 if (TREE_CODE (base1) == ADDR_EXPR)
10043 base1
10044 = get_inner_reference (TREE_OPERAND (base1, 0),
10045 &bitsize, &bitpos1, &offset1, &mode,
10046 &unsignedp, &reversep, &volatilep);
10047 if (TREE_CODE (base1) == INDIRECT_REF)
10048 base1 = TREE_OPERAND (base1, 0);
10049 else
10050 indirect_base1 = true;
10052 if (offset1 == NULL_TREE || integer_zerop (offset1))
10053 offset1 = TREE_OPERAND (arg1, 1);
10054 else
10055 offset1 = size_binop (PLUS_EXPR, offset1,
10056 TREE_OPERAND (arg1, 1));
10057 if (poly_int_tree_p (offset1))
10059 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
10060 TYPE_PRECISION (sizetype));
10061 tem <<= LOG2_BITS_PER_UNIT;
10062 tem += bitpos1;
10063 if (tem.to_shwi (&bitpos1))
10064 offset1 = NULL_TREE;
10068 /* If we have equivalent bases we might be able to simplify. */
10069 if (indirect_base0 == indirect_base1
10070 && operand_equal_p (base0, base1,
10071 indirect_base0 ? OEP_ADDRESS_OF : 0))
10073 /* We can fold this expression to a constant if the non-constant
10074 offset parts are equal. */
10075 if ((offset0 == offset1
10076 || (offset0 && offset1
10077 && operand_equal_p (offset0, offset1, 0)))
10078 && (equality_code
10079 || (indirect_base0
10080 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10081 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10083 if (!equality_code
10084 && maybe_ne (bitpos0, bitpos1)
10085 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10086 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10087 fold_overflow_warning (("assuming pointer wraparound does not "
10088 "occur when comparing P +- C1 with "
10089 "P +- C2"),
10090 WARN_STRICT_OVERFLOW_CONDITIONAL);
10092 switch (code)
10094 case EQ_EXPR:
10095 if (known_eq (bitpos0, bitpos1))
10096 return constant_boolean_node (true, type);
10097 if (known_ne (bitpos0, bitpos1))
10098 return constant_boolean_node (false, type);
10099 break;
10100 case NE_EXPR:
10101 if (known_ne (bitpos0, bitpos1))
10102 return constant_boolean_node (true, type);
10103 if (known_eq (bitpos0, bitpos1))
10104 return constant_boolean_node (false, type);
10105 break;
10106 case LT_EXPR:
10107 if (known_lt (bitpos0, bitpos1))
10108 return constant_boolean_node (true, type);
10109 if (known_ge (bitpos0, bitpos1))
10110 return constant_boolean_node (false, type);
10111 break;
10112 case LE_EXPR:
10113 if (known_le (bitpos0, bitpos1))
10114 return constant_boolean_node (true, type);
10115 if (known_gt (bitpos0, bitpos1))
10116 return constant_boolean_node (false, type);
10117 break;
10118 case GE_EXPR:
10119 if (known_ge (bitpos0, bitpos1))
10120 return constant_boolean_node (true, type);
10121 if (known_lt (bitpos0, bitpos1))
10122 return constant_boolean_node (false, type);
10123 break;
10124 case GT_EXPR:
10125 if (known_gt (bitpos0, bitpos1))
10126 return constant_boolean_node (true, type);
10127 if (known_le (bitpos0, bitpos1))
10128 return constant_boolean_node (false, type);
10129 break;
10130 default:;
10133 /* We can simplify the comparison to a comparison of the variable
10134 offset parts if the constant offset parts are equal.
10135 Be careful to use signed sizetype here because otherwise we
10136 mess with array offsets in the wrong way. This is possible
10137 because pointer arithmetic is restricted to retain within an
10138 object and overflow on pointer differences is undefined as of
10139 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10140 else if (known_eq (bitpos0, bitpos1)
10141 && (equality_code
10142 || (indirect_base0
10143 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10144 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10146 /* By converting to signed sizetype we cover middle-end pointer
10147 arithmetic which operates on unsigned pointer types of size
10148 type size and ARRAY_REF offsets which are properly sign or
10149 zero extended from their type in case it is narrower than
10150 sizetype. */
10151 if (offset0 == NULL_TREE)
10152 offset0 = build_int_cst (ssizetype, 0);
10153 else
10154 offset0 = fold_convert_loc (loc, ssizetype, offset0);
10155 if (offset1 == NULL_TREE)
10156 offset1 = build_int_cst (ssizetype, 0);
10157 else
10158 offset1 = fold_convert_loc (loc, ssizetype, offset1);
10160 if (!equality_code
10161 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10162 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10163 fold_overflow_warning (("assuming pointer wraparound does not "
10164 "occur when comparing P +- C1 with "
10165 "P +- C2"),
10166 WARN_STRICT_OVERFLOW_COMPARISON);
10168 return fold_build2_loc (loc, code, type, offset0, offset1);
10171 /* For equal offsets we can simplify to a comparison of the
10172 base addresses. */
10173 else if (known_eq (bitpos0, bitpos1)
10174 && (indirect_base0
10175 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
10176 && (indirect_base1
10177 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
10178 && ((offset0 == offset1)
10179 || (offset0 && offset1
10180 && operand_equal_p (offset0, offset1, 0))))
10182 if (indirect_base0)
10183 base0 = build_fold_addr_expr_loc (loc, base0);
10184 if (indirect_base1)
10185 base1 = build_fold_addr_expr_loc (loc, base1);
10186 return fold_build2_loc (loc, code, type, base0, base1);
10188 /* Comparison between an ordinary (non-weak) symbol and a null
10189 pointer can be eliminated since such symbols must have a non
10190 null address. In C, relational expressions between pointers
10191 to objects and null pointers are undefined. The results
10192 below follow the C++ rules with the additional property that
10193 every object pointer compares greater than a null pointer.
10195 else if (((DECL_P (base0)
10196 && maybe_nonzero_address (base0) > 0
10197 /* Avoid folding references to struct members at offset 0 to
10198 prevent tests like '&ptr->firstmember == 0' from getting
10199 eliminated. When ptr is null, although the -> expression
10200 is strictly speaking invalid, GCC retains it as a matter
10201 of QoI. See PR c/44555. */
10202 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
10203 || CONSTANT_CLASS_P (base0))
10204 && indirect_base0
10205 /* The caller guarantees that when one of the arguments is
10206 constant (i.e., null in this case) it is second. */
10207 && integer_zerop (arg1))
10209 switch (code)
10211 case EQ_EXPR:
10212 case LE_EXPR:
10213 case LT_EXPR:
10214 return constant_boolean_node (false, type);
10215 case GE_EXPR:
10216 case GT_EXPR:
10217 case NE_EXPR:
10218 return constant_boolean_node (true, type);
10219 default:
10220 gcc_unreachable ();
10225 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10226 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10227 the resulting offset is smaller in absolute value than the
10228 original one and has the same sign. */
10229 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10230 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
10231 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10232 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10233 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
10234 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
10235 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10236 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
10238 tree const1 = TREE_OPERAND (arg0, 1);
10239 tree const2 = TREE_OPERAND (arg1, 1);
10240 tree variable1 = TREE_OPERAND (arg0, 0);
10241 tree variable2 = TREE_OPERAND (arg1, 0);
10242 tree cst;
10243 const char * const warnmsg = G_("assuming signed overflow does not "
10244 "occur when combining constants around "
10245 "a comparison");
10247 /* Put the constant on the side where it doesn't overflow and is
10248 of lower absolute value and of same sign than before. */
10249 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10250 ? MINUS_EXPR : PLUS_EXPR,
10251 const2, const1);
10252 if (!TREE_OVERFLOW (cst)
10253 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
10254 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
10256 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10257 return fold_build2_loc (loc, code, type,
10258 variable1,
10259 fold_build2_loc (loc, TREE_CODE (arg1),
10260 TREE_TYPE (arg1),
10261 variable2, cst));
10264 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10265 ? MINUS_EXPR : PLUS_EXPR,
10266 const1, const2);
10267 if (!TREE_OVERFLOW (cst)
10268 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
10269 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
10271 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10272 return fold_build2_loc (loc, code, type,
10273 fold_build2_loc (loc, TREE_CODE (arg0),
10274 TREE_TYPE (arg0),
10275 variable1, cst),
10276 variable2);
10280 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
10281 if (tem)
10282 return tem;
10284 /* If we are comparing an expression that just has comparisons
10285 of two integer values, arithmetic expressions of those comparisons,
10286 and constants, we can simplify it. There are only three cases
10287 to check: the two values can either be equal, the first can be
10288 greater, or the second can be greater. Fold the expression for
10289 those three values. Since each value must be 0 or 1, we have
10290 eight possibilities, each of which corresponds to the constant 0
10291 or 1 or one of the six possible comparisons.
10293 This handles common cases like (a > b) == 0 but also handles
10294 expressions like ((x > y) - (y > x)) > 0, which supposedly
10295 occur in macroized code. */
10297 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
10299 tree cval1 = 0, cval2 = 0;
10301 if (twoval_comparison_p (arg0, &cval1, &cval2)
10302 /* Don't handle degenerate cases here; they should already
10303 have been handled anyway. */
10304 && cval1 != 0 && cval2 != 0
10305 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
10306 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
10307 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
10308 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
10309 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
10310 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
10311 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
10313 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
10314 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
10316 /* We can't just pass T to eval_subst in case cval1 or cval2
10317 was the same as ARG1. */
10319 tree high_result
10320 = fold_build2_loc (loc, code, type,
10321 eval_subst (loc, arg0, cval1, maxval,
10322 cval2, minval),
10323 arg1);
10324 tree equal_result
10325 = fold_build2_loc (loc, code, type,
10326 eval_subst (loc, arg0, cval1, maxval,
10327 cval2, maxval),
10328 arg1);
10329 tree low_result
10330 = fold_build2_loc (loc, code, type,
10331 eval_subst (loc, arg0, cval1, minval,
10332 cval2, maxval),
10333 arg1);
10335 /* All three of these results should be 0 or 1. Confirm they are.
10336 Then use those values to select the proper code to use. */
10338 if (TREE_CODE (high_result) == INTEGER_CST
10339 && TREE_CODE (equal_result) == INTEGER_CST
10340 && TREE_CODE (low_result) == INTEGER_CST)
10342 /* Make a 3-bit mask with the high-order bit being the
10343 value for `>', the next for '=', and the low for '<'. */
10344 switch ((integer_onep (high_result) * 4)
10345 + (integer_onep (equal_result) * 2)
10346 + integer_onep (low_result))
10348 case 0:
10349 /* Always false. */
10350 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10351 case 1:
10352 code = LT_EXPR;
10353 break;
10354 case 2:
10355 code = EQ_EXPR;
10356 break;
10357 case 3:
10358 code = LE_EXPR;
10359 break;
10360 case 4:
10361 code = GT_EXPR;
10362 break;
10363 case 5:
10364 code = NE_EXPR;
10365 break;
10366 case 6:
10367 code = GE_EXPR;
10368 break;
10369 case 7:
10370 /* Always true. */
10371 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10374 return fold_build2_loc (loc, code, type, cval1, cval2);
10379 return NULL_TREE;
10383 /* Subroutine of fold_binary. Optimize complex multiplications of the
10384 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10385 argument EXPR represents the expression "z" of type TYPE. */
10387 static tree
10388 fold_mult_zconjz (location_t loc, tree type, tree expr)
10390 tree itype = TREE_TYPE (type);
10391 tree rpart, ipart, tem;
10393 if (TREE_CODE (expr) == COMPLEX_EXPR)
10395 rpart = TREE_OPERAND (expr, 0);
10396 ipart = TREE_OPERAND (expr, 1);
10398 else if (TREE_CODE (expr) == COMPLEX_CST)
10400 rpart = TREE_REALPART (expr);
10401 ipart = TREE_IMAGPART (expr);
10403 else
10405 expr = save_expr (expr);
10406 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
10407 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
10410 rpart = save_expr (rpart);
10411 ipart = save_expr (ipart);
10412 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
10413 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
10414 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
10415 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
10416 build_zero_cst (itype));
10420 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10421 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10422 true if successful. */
10424 static bool
10425 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
10427 unsigned HOST_WIDE_INT i, nunits;
10429 if (TREE_CODE (arg) == VECTOR_CST
10430 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
10432 for (i = 0; i < nunits; ++i)
10433 elts[i] = VECTOR_CST_ELT (arg, i);
10435 else if (TREE_CODE (arg) == CONSTRUCTOR)
10437 constructor_elt *elt;
10439 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
10440 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
10441 return false;
10442 else
10443 elts[i] = elt->value;
10445 else
10446 return false;
10447 for (; i < nelts; i++)
10448 elts[i]
10449 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
10450 return true;
10453 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10454 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10455 NULL_TREE otherwise. */
10457 tree
10458 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
10460 unsigned int i;
10461 unsigned HOST_WIDE_INT nelts;
10462 bool need_ctor = false;
10464 if (!sel.length ().is_constant (&nelts))
10465 return NULL_TREE;
10466 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
10467 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
10468 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
10469 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
10470 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10471 return NULL_TREE;
10473 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10474 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
10475 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
10476 return NULL_TREE;
10478 tree_vector_builder out_elts (type, nelts, 1);
10479 for (i = 0; i < nelts; i++)
10481 HOST_WIDE_INT index;
10482 if (!sel[i].is_constant (&index))
10483 return NULL_TREE;
10484 if (!CONSTANT_CLASS_P (in_elts[index]))
10485 need_ctor = true;
10486 out_elts.quick_push (unshare_expr (in_elts[index]));
10489 if (need_ctor)
10491 vec<constructor_elt, va_gc> *v;
10492 vec_alloc (v, nelts);
10493 for (i = 0; i < nelts; i++)
10494 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
10495 return build_constructor (type, v);
10497 else
10498 return out_elts.build ();
10501 /* Try to fold a pointer difference of type TYPE two address expressions of
10502 array references AREF0 and AREF1 using location LOC. Return a
10503 simplified expression for the difference or NULL_TREE. */
10505 static tree
10506 fold_addr_of_array_ref_difference (location_t loc, tree type,
10507 tree aref0, tree aref1,
10508 bool use_pointer_diff)
10510 tree base0 = TREE_OPERAND (aref0, 0);
10511 tree base1 = TREE_OPERAND (aref1, 0);
10512 tree base_offset = build_int_cst (type, 0);
10514 /* If the bases are array references as well, recurse. If the bases
10515 are pointer indirections compute the difference of the pointers.
10516 If the bases are equal, we are set. */
10517 if ((TREE_CODE (base0) == ARRAY_REF
10518 && TREE_CODE (base1) == ARRAY_REF
10519 && (base_offset
10520 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
10521 use_pointer_diff)))
10522 || (INDIRECT_REF_P (base0)
10523 && INDIRECT_REF_P (base1)
10524 && (base_offset
10525 = use_pointer_diff
10526 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
10527 TREE_OPERAND (base0, 0),
10528 TREE_OPERAND (base1, 0))
10529 : fold_binary_loc (loc, MINUS_EXPR, type,
10530 fold_convert (type,
10531 TREE_OPERAND (base0, 0)),
10532 fold_convert (type,
10533 TREE_OPERAND (base1, 0)))))
10534 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
10536 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10537 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10538 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
10539 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
10540 return fold_build2_loc (loc, PLUS_EXPR, type,
10541 base_offset,
10542 fold_build2_loc (loc, MULT_EXPR, type,
10543 diff, esz));
10545 return NULL_TREE;
10548 /* If the real or vector real constant CST of type TYPE has an exact
10549 inverse, return it, else return NULL. */
10551 tree
10552 exact_inverse (tree type, tree cst)
10554 REAL_VALUE_TYPE r;
10555 tree unit_type;
10556 machine_mode mode;
10558 switch (TREE_CODE (cst))
10560 case REAL_CST:
10561 r = TREE_REAL_CST (cst);
10563 if (exact_real_inverse (TYPE_MODE (type), &r))
10564 return build_real (type, r);
10566 return NULL_TREE;
10568 case VECTOR_CST:
10570 unit_type = TREE_TYPE (type);
10571 mode = TYPE_MODE (unit_type);
10573 tree_vector_builder elts;
10574 if (!elts.new_unary_operation (type, cst, false))
10575 return NULL_TREE;
10576 unsigned int count = elts.encoded_nelts ();
10577 for (unsigned int i = 0; i < count; ++i)
10579 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10580 if (!exact_real_inverse (mode, &r))
10581 return NULL_TREE;
10582 elts.quick_push (build_real (unit_type, r));
10585 return elts.build ();
10588 default:
10589 return NULL_TREE;
10593 /* Mask out the tz least significant bits of X of type TYPE where
10594 tz is the number of trailing zeroes in Y. */
10595 static wide_int
10596 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10598 int tz = wi::ctz (y);
10599 if (tz > 0)
10600 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10601 return x;
10604 /* Return true when T is an address and is known to be nonzero.
10605 For floating point we further ensure that T is not denormal.
10606 Similar logic is present in nonzero_address in rtlanal.h.
10608 If the return value is based on the assumption that signed overflow
10609 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10610 change *STRICT_OVERFLOW_P. */
10612 static bool
10613 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10615 tree type = TREE_TYPE (t);
10616 enum tree_code code;
10618 /* Doing something useful for floating point would need more work. */
10619 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10620 return false;
10622 code = TREE_CODE (t);
10623 switch (TREE_CODE_CLASS (code))
10625 case tcc_unary:
10626 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10627 strict_overflow_p);
10628 case tcc_binary:
10629 case tcc_comparison:
10630 return tree_binary_nonzero_warnv_p (code, type,
10631 TREE_OPERAND (t, 0),
10632 TREE_OPERAND (t, 1),
10633 strict_overflow_p);
10634 case tcc_constant:
10635 case tcc_declaration:
10636 case tcc_reference:
10637 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10639 default:
10640 break;
10643 switch (code)
10645 case TRUTH_NOT_EXPR:
10646 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10647 strict_overflow_p);
10649 case TRUTH_AND_EXPR:
10650 case TRUTH_OR_EXPR:
10651 case TRUTH_XOR_EXPR:
10652 return tree_binary_nonzero_warnv_p (code, type,
10653 TREE_OPERAND (t, 0),
10654 TREE_OPERAND (t, 1),
10655 strict_overflow_p);
10657 case COND_EXPR:
10658 case CONSTRUCTOR:
10659 case OBJ_TYPE_REF:
10660 case ASSERT_EXPR:
10661 case ADDR_EXPR:
10662 case WITH_SIZE_EXPR:
10663 case SSA_NAME:
10664 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10666 case COMPOUND_EXPR:
10667 case MODIFY_EXPR:
10668 case BIND_EXPR:
10669 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10670 strict_overflow_p);
10672 case SAVE_EXPR:
10673 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10674 strict_overflow_p);
10676 case CALL_EXPR:
10678 tree fndecl = get_callee_fndecl (t);
10679 if (!fndecl) return false;
10680 if (flag_delete_null_pointer_checks && !flag_check_new
10681 && DECL_IS_OPERATOR_NEW_P (fndecl)
10682 && !TREE_NOTHROW (fndecl))
10683 return true;
10684 if (flag_delete_null_pointer_checks
10685 && lookup_attribute ("returns_nonnull",
10686 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10687 return true;
10688 return alloca_call_p (t);
10691 default:
10692 break;
10694 return false;
10697 /* Return true when T is an address and is known to be nonzero.
10698 Handle warnings about undefined signed overflow. */
10700 bool
10701 tree_expr_nonzero_p (tree t)
10703 bool ret, strict_overflow_p;
10705 strict_overflow_p = false;
10706 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10707 if (strict_overflow_p)
10708 fold_overflow_warning (("assuming signed overflow does not occur when "
10709 "determining that expression is always "
10710 "non-zero"),
10711 WARN_STRICT_OVERFLOW_MISC);
10712 return ret;
10715 /* Return true if T is known not to be equal to an integer W. */
10717 bool
10718 expr_not_equal_to (tree t, const wide_int &w)
10720 value_range vr;
10721 switch (TREE_CODE (t))
10723 case INTEGER_CST:
10724 return wi::to_wide (t) != w;
10726 case SSA_NAME:
10727 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10728 return false;
10730 if (cfun)
10731 get_range_query (cfun)->range_of_expr (vr, t);
10732 else
10733 get_global_range_query ()->range_of_expr (vr, t);
10735 if (!vr.undefined_p ()
10736 && !vr.contains_p (wide_int_to_tree (TREE_TYPE (t), w)))
10737 return true;
10738 /* If T has some known zero bits and W has any of those bits set,
10739 then T is known not to be equal to W. */
10740 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10741 TYPE_PRECISION (TREE_TYPE (t))), 0))
10742 return true;
10743 return false;
10745 default:
10746 return false;
10750 /* Fold a binary expression of code CODE and type TYPE with operands
10751 OP0 and OP1. LOC is the location of the resulting expression.
10752 Return the folded expression if folding is successful. Otherwise,
10753 return NULL_TREE. */
10755 tree
10756 fold_binary_loc (location_t loc, enum tree_code code, tree type,
10757 tree op0, tree op1)
10759 enum tree_code_class kind = TREE_CODE_CLASS (code);
10760 tree arg0, arg1, tem;
10761 tree t1 = NULL_TREE;
10762 bool strict_overflow_p;
10763 unsigned int prec;
10765 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10766 && TREE_CODE_LENGTH (code) == 2
10767 && op0 != NULL_TREE
10768 && op1 != NULL_TREE);
10770 arg0 = op0;
10771 arg1 = op1;
10773 /* Strip any conversions that don't change the mode. This is
10774 safe for every expression, except for a comparison expression
10775 because its signedness is derived from its operands. So, in
10776 the latter case, only strip conversions that don't change the
10777 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10778 preserved.
10780 Note that this is done as an internal manipulation within the
10781 constant folder, in order to find the simplest representation
10782 of the arguments so that their form can be studied. In any
10783 cases, the appropriate type conversions should be put back in
10784 the tree that will get out of the constant folder. */
10786 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10788 STRIP_SIGN_NOPS (arg0);
10789 STRIP_SIGN_NOPS (arg1);
10791 else
10793 STRIP_NOPS (arg0);
10794 STRIP_NOPS (arg1);
10797 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10798 constant but we can't do arithmetic on them. */
10799 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
10801 tem = const_binop (code, type, arg0, arg1);
10802 if (tem != NULL_TREE)
10804 if (TREE_TYPE (tem) != type)
10805 tem = fold_convert_loc (loc, type, tem);
10806 return tem;
10810 /* If this is a commutative operation, and ARG0 is a constant, move it
10811 to ARG1 to reduce the number of tests below. */
10812 if (commutative_tree_code (code)
10813 && tree_swap_operands_p (arg0, arg1))
10814 return fold_build2_loc (loc, code, type, op1, op0);
10816 /* Likewise if this is a comparison, and ARG0 is a constant, move it
10817 to ARG1 to reduce the number of tests below. */
10818 if (kind == tcc_comparison
10819 && tree_swap_operands_p (arg0, arg1))
10820 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
10822 tem = generic_simplify (loc, code, type, op0, op1);
10823 if (tem)
10824 return tem;
10826 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10828 First check for cases where an arithmetic operation is applied to a
10829 compound, conditional, or comparison operation. Push the arithmetic
10830 operation inside the compound or conditional to see if any folding
10831 can then be done. Convert comparison to conditional for this purpose.
10832 The also optimizes non-constant cases that used to be done in
10833 expand_expr.
10835 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10836 one of the operands is a comparison and the other is a comparison, a
10837 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10838 code below would make the expression more complex. Change it to a
10839 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10840 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10842 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10843 || code == EQ_EXPR || code == NE_EXPR)
10844 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
10845 && ((truth_value_p (TREE_CODE (arg0))
10846 && (truth_value_p (TREE_CODE (arg1))
10847 || (TREE_CODE (arg1) == BIT_AND_EXPR
10848 && integer_onep (TREE_OPERAND (arg1, 1)))))
10849 || (truth_value_p (TREE_CODE (arg1))
10850 && (truth_value_p (TREE_CODE (arg0))
10851 || (TREE_CODE (arg0) == BIT_AND_EXPR
10852 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10854 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10855 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10856 : TRUTH_XOR_EXPR,
10857 boolean_type_node,
10858 fold_convert_loc (loc, boolean_type_node, arg0),
10859 fold_convert_loc (loc, boolean_type_node, arg1));
10861 if (code == EQ_EXPR)
10862 tem = invert_truthvalue_loc (loc, tem);
10864 return fold_convert_loc (loc, type, tem);
10867 if (TREE_CODE_CLASS (code) == tcc_binary
10868 || TREE_CODE_CLASS (code) == tcc_comparison)
10870 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10872 tem = fold_build2_loc (loc, code, type,
10873 fold_convert_loc (loc, TREE_TYPE (op0),
10874 TREE_OPERAND (arg0, 1)), op1);
10875 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10876 tem);
10878 if (TREE_CODE (arg1) == COMPOUND_EXPR)
10880 tem = fold_build2_loc (loc, code, type, op0,
10881 fold_convert_loc (loc, TREE_TYPE (op1),
10882 TREE_OPERAND (arg1, 1)));
10883 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10884 tem);
10887 if (TREE_CODE (arg0) == COND_EXPR
10888 || TREE_CODE (arg0) == VEC_COND_EXPR
10889 || COMPARISON_CLASS_P (arg0))
10891 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10892 arg0, arg1,
10893 /*cond_first_p=*/1);
10894 if (tem != NULL_TREE)
10895 return tem;
10898 if (TREE_CODE (arg1) == COND_EXPR
10899 || TREE_CODE (arg1) == VEC_COND_EXPR
10900 || COMPARISON_CLASS_P (arg1))
10902 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10903 arg1, arg0,
10904 /*cond_first_p=*/0);
10905 if (tem != NULL_TREE)
10906 return tem;
10910 switch (code)
10912 case MEM_REF:
10913 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10914 if (TREE_CODE (arg0) == ADDR_EXPR
10915 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10917 tree iref = TREE_OPERAND (arg0, 0);
10918 return fold_build2 (MEM_REF, type,
10919 TREE_OPERAND (iref, 0),
10920 int_const_binop (PLUS_EXPR, arg1,
10921 TREE_OPERAND (iref, 1)));
10924 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10925 if (TREE_CODE (arg0) == ADDR_EXPR
10926 && handled_component_p (TREE_OPERAND (arg0, 0)))
10928 tree base;
10929 poly_int64 coffset;
10930 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10931 &coffset);
10932 if (!base)
10933 return NULL_TREE;
10934 return fold_build2 (MEM_REF, type,
10935 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
10936 int_const_binop (PLUS_EXPR, arg1,
10937 size_int (coffset)));
10940 return NULL_TREE;
10942 case POINTER_PLUS_EXPR:
10943 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10944 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10945 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10946 return fold_convert_loc (loc, type,
10947 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10948 fold_convert_loc (loc, sizetype,
10949 arg1),
10950 fold_convert_loc (loc, sizetype,
10951 arg0)));
10953 return NULL_TREE;
10955 case PLUS_EXPR:
10956 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10958 /* X + (X / CST) * -CST is X % CST. */
10959 if (TREE_CODE (arg1) == MULT_EXPR
10960 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10961 && operand_equal_p (arg0,
10962 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10964 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10965 tree cst1 = TREE_OPERAND (arg1, 1);
10966 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10967 cst1, cst0);
10968 if (sum && integer_zerop (sum))
10969 return fold_convert_loc (loc, type,
10970 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10971 TREE_TYPE (arg0), arg0,
10972 cst0));
10976 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10977 one. Make sure the type is not saturating and has the signedness of
10978 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10979 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10980 if ((TREE_CODE (arg0) == MULT_EXPR
10981 || TREE_CODE (arg1) == MULT_EXPR)
10982 && !TYPE_SATURATING (type)
10983 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10984 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10985 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10987 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10988 if (tem)
10989 return tem;
10992 if (! FLOAT_TYPE_P (type))
10994 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10995 (plus (plus (mult) (mult)) (foo)) so that we can
10996 take advantage of the factoring cases below. */
10997 if (ANY_INTEGRAL_TYPE_P (type)
10998 && TYPE_OVERFLOW_WRAPS (type)
10999 && (((TREE_CODE (arg0) == PLUS_EXPR
11000 || TREE_CODE (arg0) == MINUS_EXPR)
11001 && TREE_CODE (arg1) == MULT_EXPR)
11002 || ((TREE_CODE (arg1) == PLUS_EXPR
11003 || TREE_CODE (arg1) == MINUS_EXPR)
11004 && TREE_CODE (arg0) == MULT_EXPR)))
11006 tree parg0, parg1, parg, marg;
11007 enum tree_code pcode;
11009 if (TREE_CODE (arg1) == MULT_EXPR)
11010 parg = arg0, marg = arg1;
11011 else
11012 parg = arg1, marg = arg0;
11013 pcode = TREE_CODE (parg);
11014 parg0 = TREE_OPERAND (parg, 0);
11015 parg1 = TREE_OPERAND (parg, 1);
11016 STRIP_NOPS (parg0);
11017 STRIP_NOPS (parg1);
11019 if (TREE_CODE (parg0) == MULT_EXPR
11020 && TREE_CODE (parg1) != MULT_EXPR)
11021 return fold_build2_loc (loc, pcode, type,
11022 fold_build2_loc (loc, PLUS_EXPR, type,
11023 fold_convert_loc (loc, type,
11024 parg0),
11025 fold_convert_loc (loc, type,
11026 marg)),
11027 fold_convert_loc (loc, type, parg1));
11028 if (TREE_CODE (parg0) != MULT_EXPR
11029 && TREE_CODE (parg1) == MULT_EXPR)
11030 return
11031 fold_build2_loc (loc, PLUS_EXPR, type,
11032 fold_convert_loc (loc, type, parg0),
11033 fold_build2_loc (loc, pcode, type,
11034 fold_convert_loc (loc, type, marg),
11035 fold_convert_loc (loc, type,
11036 parg1)));
11039 else
11041 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
11042 to __complex__ ( x, y ). This is not the same for SNaNs or
11043 if signed zeros are involved. */
11044 if (!HONOR_SNANS (arg0)
11045 && !HONOR_SIGNED_ZEROS (arg0)
11046 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11048 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11049 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11050 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11051 bool arg0rz = false, arg0iz = false;
11052 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11053 || (arg0i && (arg0iz = real_zerop (arg0i))))
11055 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11056 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11057 if (arg0rz && arg1i && real_zerop (arg1i))
11059 tree rp = arg1r ? arg1r
11060 : build1 (REALPART_EXPR, rtype, arg1);
11061 tree ip = arg0i ? arg0i
11062 : build1 (IMAGPART_EXPR, rtype, arg0);
11063 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11065 else if (arg0iz && arg1r && real_zerop (arg1r))
11067 tree rp = arg0r ? arg0r
11068 : build1 (REALPART_EXPR, rtype, arg0);
11069 tree ip = arg1i ? arg1i
11070 : build1 (IMAGPART_EXPR, rtype, arg1);
11071 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11076 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11077 We associate floats only if the user has specified
11078 -fassociative-math. */
11079 if (flag_associative_math
11080 && TREE_CODE (arg1) == PLUS_EXPR
11081 && TREE_CODE (arg0) != MULT_EXPR)
11083 tree tree10 = TREE_OPERAND (arg1, 0);
11084 tree tree11 = TREE_OPERAND (arg1, 1);
11085 if (TREE_CODE (tree11) == MULT_EXPR
11086 && TREE_CODE (tree10) == MULT_EXPR)
11088 tree tree0;
11089 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
11090 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
11093 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11094 We associate floats only if the user has specified
11095 -fassociative-math. */
11096 if (flag_associative_math
11097 && TREE_CODE (arg0) == PLUS_EXPR
11098 && TREE_CODE (arg1) != MULT_EXPR)
11100 tree tree00 = TREE_OPERAND (arg0, 0);
11101 tree tree01 = TREE_OPERAND (arg0, 1);
11102 if (TREE_CODE (tree01) == MULT_EXPR
11103 && TREE_CODE (tree00) == MULT_EXPR)
11105 tree tree0;
11106 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
11107 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
11112 bit_rotate:
11113 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11114 is a rotate of A by C1 bits. */
11115 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11116 is a rotate of A by B bits.
11117 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11118 though in this case CODE must be | and not + or ^, otherwise
11119 it doesn't return A when B is 0. */
11121 enum tree_code code0, code1;
11122 tree rtype;
11123 code0 = TREE_CODE (arg0);
11124 code1 = TREE_CODE (arg1);
11125 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
11126 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
11127 && operand_equal_p (TREE_OPERAND (arg0, 0),
11128 TREE_OPERAND (arg1, 0), 0)
11129 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
11130 TYPE_UNSIGNED (rtype))
11131 /* Only create rotates in complete modes. Other cases are not
11132 expanded properly. */
11133 && (element_precision (rtype)
11134 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
11136 tree tree01, tree11;
11137 tree orig_tree01, orig_tree11;
11138 enum tree_code code01, code11;
11140 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
11141 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
11142 STRIP_NOPS (tree01);
11143 STRIP_NOPS (tree11);
11144 code01 = TREE_CODE (tree01);
11145 code11 = TREE_CODE (tree11);
11146 if (code11 != MINUS_EXPR
11147 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
11149 std::swap (code0, code1);
11150 std::swap (code01, code11);
11151 std::swap (tree01, tree11);
11152 std::swap (orig_tree01, orig_tree11);
11154 if (code01 == INTEGER_CST
11155 && code11 == INTEGER_CST
11156 && (wi::to_widest (tree01) + wi::to_widest (tree11)
11157 == element_precision (rtype)))
11159 tem = build2_loc (loc, LROTATE_EXPR,
11160 rtype, TREE_OPERAND (arg0, 0),
11161 code0 == LSHIFT_EXPR
11162 ? orig_tree01 : orig_tree11);
11163 return fold_convert_loc (loc, type, tem);
11165 else if (code11 == MINUS_EXPR)
11167 tree tree110, tree111;
11168 tree110 = TREE_OPERAND (tree11, 0);
11169 tree111 = TREE_OPERAND (tree11, 1);
11170 STRIP_NOPS (tree110);
11171 STRIP_NOPS (tree111);
11172 if (TREE_CODE (tree110) == INTEGER_CST
11173 && compare_tree_int (tree110,
11174 element_precision (rtype)) == 0
11175 && operand_equal_p (tree01, tree111, 0))
11177 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11178 ? LROTATE_EXPR : RROTATE_EXPR),
11179 rtype, TREE_OPERAND (arg0, 0),
11180 orig_tree01);
11181 return fold_convert_loc (loc, type, tem);
11184 else if (code == BIT_IOR_EXPR
11185 && code11 == BIT_AND_EXPR
11186 && pow2p_hwi (element_precision (rtype)))
11188 tree tree110, tree111;
11189 tree110 = TREE_OPERAND (tree11, 0);
11190 tree111 = TREE_OPERAND (tree11, 1);
11191 STRIP_NOPS (tree110);
11192 STRIP_NOPS (tree111);
11193 if (TREE_CODE (tree110) == NEGATE_EXPR
11194 && TREE_CODE (tree111) == INTEGER_CST
11195 && compare_tree_int (tree111,
11196 element_precision (rtype) - 1) == 0
11197 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
11199 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11200 ? LROTATE_EXPR : RROTATE_EXPR),
11201 rtype, TREE_OPERAND (arg0, 0),
11202 orig_tree01);
11203 return fold_convert_loc (loc, type, tem);
11209 associate:
11210 /* In most languages, can't associate operations on floats through
11211 parentheses. Rather than remember where the parentheses were, we
11212 don't associate floats at all, unless the user has specified
11213 -fassociative-math.
11214 And, we need to make sure type is not saturating. */
11216 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
11217 && !TYPE_SATURATING (type))
11219 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
11220 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
11221 tree atype = type;
11222 bool ok = true;
11224 /* Split both trees into variables, constants, and literals. Then
11225 associate each group together, the constants with literals,
11226 then the result with variables. This increases the chances of
11227 literals being recombined later and of generating relocatable
11228 expressions for the sum of a constant and literal. */
11229 var0 = split_tree (arg0, type, code,
11230 &minus_var0, &con0, &minus_con0,
11231 &lit0, &minus_lit0, 0);
11232 var1 = split_tree (arg1, type, code,
11233 &minus_var1, &con1, &minus_con1,
11234 &lit1, &minus_lit1, code == MINUS_EXPR);
11236 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11237 if (code == MINUS_EXPR)
11238 code = PLUS_EXPR;
11240 /* With undefined overflow prefer doing association in a type
11241 which wraps on overflow, if that is one of the operand types. */
11242 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
11243 && !TYPE_OVERFLOW_WRAPS (type))
11245 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11246 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11247 atype = TREE_TYPE (arg0);
11248 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11249 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
11250 atype = TREE_TYPE (arg1);
11251 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
11254 /* With undefined overflow we can only associate constants with one
11255 variable, and constants whose association doesn't overflow. */
11256 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
11257 && !TYPE_OVERFLOW_WRAPS (atype))
11259 if ((var0 && var1) || (minus_var0 && minus_var1))
11261 /* ??? If split_tree would handle NEGATE_EXPR we could
11262 simply reject these cases and the allowed cases would
11263 be the var0/minus_var1 ones. */
11264 tree tmp0 = var0 ? var0 : minus_var0;
11265 tree tmp1 = var1 ? var1 : minus_var1;
11266 bool one_neg = false;
11268 if (TREE_CODE (tmp0) == NEGATE_EXPR)
11270 tmp0 = TREE_OPERAND (tmp0, 0);
11271 one_neg = !one_neg;
11273 if (CONVERT_EXPR_P (tmp0)
11274 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11275 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11276 <= TYPE_PRECISION (atype)))
11277 tmp0 = TREE_OPERAND (tmp0, 0);
11278 if (TREE_CODE (tmp1) == NEGATE_EXPR)
11280 tmp1 = TREE_OPERAND (tmp1, 0);
11281 one_neg = !one_neg;
11283 if (CONVERT_EXPR_P (tmp1)
11284 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11285 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11286 <= TYPE_PRECISION (atype)))
11287 tmp1 = TREE_OPERAND (tmp1, 0);
11288 /* The only case we can still associate with two variables
11289 is if they cancel out. */
11290 if (!one_neg
11291 || !operand_equal_p (tmp0, tmp1, 0))
11292 ok = false;
11294 else if ((var0 && minus_var1
11295 && ! operand_equal_p (var0, minus_var1, 0))
11296 || (minus_var0 && var1
11297 && ! operand_equal_p (minus_var0, var1, 0)))
11298 ok = false;
11301 /* Only do something if we found more than two objects. Otherwise,
11302 nothing has changed and we risk infinite recursion. */
11303 if (ok
11304 && ((var0 != 0) + (var1 != 0)
11305 + (minus_var0 != 0) + (minus_var1 != 0)
11306 + (con0 != 0) + (con1 != 0)
11307 + (minus_con0 != 0) + (minus_con1 != 0)
11308 + (lit0 != 0) + (lit1 != 0)
11309 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
11311 var0 = associate_trees (loc, var0, var1, code, atype);
11312 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
11313 code, atype);
11314 con0 = associate_trees (loc, con0, con1, code, atype);
11315 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
11316 code, atype);
11317 lit0 = associate_trees (loc, lit0, lit1, code, atype);
11318 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
11319 code, atype);
11321 if (minus_var0 && var0)
11323 var0 = associate_trees (loc, var0, minus_var0,
11324 MINUS_EXPR, atype);
11325 minus_var0 = 0;
11327 if (minus_con0 && con0)
11329 con0 = associate_trees (loc, con0, minus_con0,
11330 MINUS_EXPR, atype);
11331 minus_con0 = 0;
11334 /* Preserve the MINUS_EXPR if the negative part of the literal is
11335 greater than the positive part. Otherwise, the multiplicative
11336 folding code (i.e extract_muldiv) may be fooled in case
11337 unsigned constants are subtracted, like in the following
11338 example: ((X*2 + 4) - 8U)/2. */
11339 if (minus_lit0 && lit0)
11341 if (TREE_CODE (lit0) == INTEGER_CST
11342 && TREE_CODE (minus_lit0) == INTEGER_CST
11343 && tree_int_cst_lt (lit0, minus_lit0)
11344 /* But avoid ending up with only negated parts. */
11345 && (var0 || con0))
11347 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
11348 MINUS_EXPR, atype);
11349 lit0 = 0;
11351 else
11353 lit0 = associate_trees (loc, lit0, minus_lit0,
11354 MINUS_EXPR, atype);
11355 minus_lit0 = 0;
11359 /* Don't introduce overflows through reassociation. */
11360 if ((lit0 && TREE_OVERFLOW_P (lit0))
11361 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
11362 return NULL_TREE;
11364 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11365 con0 = associate_trees (loc, con0, lit0, code, atype);
11366 lit0 = 0;
11367 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
11368 code, atype);
11369 minus_lit0 = 0;
11371 /* Eliminate minus_con0. */
11372 if (minus_con0)
11374 if (con0)
11375 con0 = associate_trees (loc, con0, minus_con0,
11376 MINUS_EXPR, atype);
11377 else if (var0)
11378 var0 = associate_trees (loc, var0, minus_con0,
11379 MINUS_EXPR, atype);
11380 else
11381 gcc_unreachable ();
11382 minus_con0 = 0;
11385 /* Eliminate minus_var0. */
11386 if (minus_var0)
11388 if (con0)
11389 con0 = associate_trees (loc, con0, minus_var0,
11390 MINUS_EXPR, atype);
11391 else
11392 gcc_unreachable ();
11393 minus_var0 = 0;
11396 return
11397 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
11398 code, atype));
11402 return NULL_TREE;
11404 case POINTER_DIFF_EXPR:
11405 case MINUS_EXPR:
11406 /* Fold &a[i] - &a[j] to i-j. */
11407 if (TREE_CODE (arg0) == ADDR_EXPR
11408 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11409 && TREE_CODE (arg1) == ADDR_EXPR
11410 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11412 tree tem = fold_addr_of_array_ref_difference (loc, type,
11413 TREE_OPERAND (arg0, 0),
11414 TREE_OPERAND (arg1, 0),
11415 code
11416 == POINTER_DIFF_EXPR);
11417 if (tem)
11418 return tem;
11421 /* Further transformations are not for pointers. */
11422 if (code == POINTER_DIFF_EXPR)
11423 return NULL_TREE;
11425 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11426 if (TREE_CODE (arg0) == NEGATE_EXPR
11427 && negate_expr_p (op1)
11428 /* If arg0 is e.g. unsigned int and type is int, then this could
11429 introduce UB, because if A is INT_MIN at runtime, the original
11430 expression can be well defined while the latter is not.
11431 See PR83269. */
11432 && !(ANY_INTEGRAL_TYPE_P (type)
11433 && TYPE_OVERFLOW_UNDEFINED (type)
11434 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11435 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
11436 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
11437 fold_convert_loc (loc, type,
11438 TREE_OPERAND (arg0, 0)));
11440 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11441 __complex__ ( x, -y ). This is not the same for SNaNs or if
11442 signed zeros are involved. */
11443 if (!HONOR_SNANS (arg0)
11444 && !HONOR_SIGNED_ZEROS (arg0)
11445 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11447 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11448 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11449 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11450 bool arg0rz = false, arg0iz = false;
11451 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11452 || (arg0i && (arg0iz = real_zerop (arg0i))))
11454 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11455 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11456 if (arg0rz && arg1i && real_zerop (arg1i))
11458 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11459 arg1r ? arg1r
11460 : build1 (REALPART_EXPR, rtype, arg1));
11461 tree ip = arg0i ? arg0i
11462 : build1 (IMAGPART_EXPR, rtype, arg0);
11463 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11465 else if (arg0iz && arg1r && real_zerop (arg1r))
11467 tree rp = arg0r ? arg0r
11468 : build1 (REALPART_EXPR, rtype, arg0);
11469 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11470 arg1i ? arg1i
11471 : build1 (IMAGPART_EXPR, rtype, arg1));
11472 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11477 /* A - B -> A + (-B) if B is easily negatable. */
11478 if (negate_expr_p (op1)
11479 && ! TYPE_OVERFLOW_SANITIZED (type)
11480 && ((FLOAT_TYPE_P (type)
11481 /* Avoid this transformation if B is a positive REAL_CST. */
11482 && (TREE_CODE (op1) != REAL_CST
11483 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
11484 || INTEGRAL_TYPE_P (type)))
11485 return fold_build2_loc (loc, PLUS_EXPR, type,
11486 fold_convert_loc (loc, type, arg0),
11487 negate_expr (op1));
11489 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11490 one. Make sure the type is not saturating and has the signedness of
11491 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11492 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11493 if ((TREE_CODE (arg0) == MULT_EXPR
11494 || TREE_CODE (arg1) == MULT_EXPR)
11495 && !TYPE_SATURATING (type)
11496 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11497 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11498 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11500 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11501 if (tem)
11502 return tem;
11505 goto associate;
11507 case MULT_EXPR:
11508 if (! FLOAT_TYPE_P (type))
11510 /* Transform x * -C into -x * C if x is easily negatable. */
11511 if (TREE_CODE (op1) == INTEGER_CST
11512 && tree_int_cst_sgn (op1) == -1
11513 && negate_expr_p (op0)
11514 && negate_expr_p (op1)
11515 && (tem = negate_expr (op1)) != op1
11516 && ! TREE_OVERFLOW (tem))
11517 return fold_build2_loc (loc, MULT_EXPR, type,
11518 fold_convert_loc (loc, type,
11519 negate_expr (op0)), tem);
11521 strict_overflow_p = false;
11522 if (TREE_CODE (arg1) == INTEGER_CST
11523 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11524 &strict_overflow_p)) != 0)
11526 if (strict_overflow_p)
11527 fold_overflow_warning (("assuming signed overflow does not "
11528 "occur when simplifying "
11529 "multiplication"),
11530 WARN_STRICT_OVERFLOW_MISC);
11531 return fold_convert_loc (loc, type, tem);
11534 /* Optimize z * conj(z) for integer complex numbers. */
11535 if (TREE_CODE (arg0) == CONJ_EXPR
11536 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11537 return fold_mult_zconjz (loc, type, arg1);
11538 if (TREE_CODE (arg1) == CONJ_EXPR
11539 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11540 return fold_mult_zconjz (loc, type, arg0);
11542 else
11544 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11545 This is not the same for NaNs or if signed zeros are
11546 involved. */
11547 if (!HONOR_NANS (arg0)
11548 && !HONOR_SIGNED_ZEROS (arg0)
11549 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11550 && TREE_CODE (arg1) == COMPLEX_CST
11551 && real_zerop (TREE_REALPART (arg1)))
11553 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11554 if (real_onep (TREE_IMAGPART (arg1)))
11555 return
11556 fold_build2_loc (loc, COMPLEX_EXPR, type,
11557 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11558 rtype, arg0)),
11559 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11560 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11561 return
11562 fold_build2_loc (loc, COMPLEX_EXPR, type,
11563 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11564 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11565 rtype, arg0)));
11568 /* Optimize z * conj(z) for floating point complex numbers.
11569 Guarded by flag_unsafe_math_optimizations as non-finite
11570 imaginary components don't produce scalar results. */
11571 if (flag_unsafe_math_optimizations
11572 && TREE_CODE (arg0) == CONJ_EXPR
11573 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11574 return fold_mult_zconjz (loc, type, arg1);
11575 if (flag_unsafe_math_optimizations
11576 && TREE_CODE (arg1) == CONJ_EXPR
11577 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11578 return fold_mult_zconjz (loc, type, arg0);
11580 goto associate;
11582 case BIT_IOR_EXPR:
11583 /* Canonicalize (X & C1) | C2. */
11584 if (TREE_CODE (arg0) == BIT_AND_EXPR
11585 && TREE_CODE (arg1) == INTEGER_CST
11586 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11588 int width = TYPE_PRECISION (type), w;
11589 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
11590 wide_int c2 = wi::to_wide (arg1);
11592 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11593 if ((c1 & c2) == c1)
11594 return omit_one_operand_loc (loc, type, arg1,
11595 TREE_OPERAND (arg0, 0));
11597 wide_int msk = wi::mask (width, false,
11598 TYPE_PRECISION (TREE_TYPE (arg1)));
11600 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11601 if (wi::bit_and_not (msk, c1 | c2) == 0)
11603 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11604 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11607 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11608 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11609 mode which allows further optimizations. */
11610 c1 &= msk;
11611 c2 &= msk;
11612 wide_int c3 = wi::bit_and_not (c1, c2);
11613 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11615 wide_int mask = wi::mask (w, false,
11616 TYPE_PRECISION (type));
11617 if (((c1 | c2) & mask) == mask
11618 && wi::bit_and_not (c1, mask) == 0)
11620 c3 = mask;
11621 break;
11625 if (c3 != c1)
11627 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11628 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
11629 wide_int_to_tree (type, c3));
11630 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11634 /* See if this can be simplified into a rotate first. If that
11635 is unsuccessful continue in the association code. */
11636 goto bit_rotate;
11638 case BIT_XOR_EXPR:
11639 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11640 if (TREE_CODE (arg0) == BIT_AND_EXPR
11641 && INTEGRAL_TYPE_P (type)
11642 && integer_onep (TREE_OPERAND (arg0, 1))
11643 && integer_onep (arg1))
11644 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11645 build_zero_cst (TREE_TYPE (arg0)));
11647 /* See if this can be simplified into a rotate first. If that
11648 is unsuccessful continue in the association code. */
11649 goto bit_rotate;
11651 case BIT_AND_EXPR:
11652 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11653 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11654 && INTEGRAL_TYPE_P (type)
11655 && integer_onep (TREE_OPERAND (arg0, 1))
11656 && integer_onep (arg1))
11658 tree tem2;
11659 tem = TREE_OPERAND (arg0, 0);
11660 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11661 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11662 tem, tem2);
11663 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11664 build_zero_cst (TREE_TYPE (tem)));
11666 /* Fold ~X & 1 as (X & 1) == 0. */
11667 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11668 && INTEGRAL_TYPE_P (type)
11669 && integer_onep (arg1))
11671 tree tem2;
11672 tem = TREE_OPERAND (arg0, 0);
11673 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11674 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11675 tem, tem2);
11676 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11677 build_zero_cst (TREE_TYPE (tem)));
11679 /* Fold !X & 1 as X == 0. */
11680 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11681 && integer_onep (arg1))
11683 tem = TREE_OPERAND (arg0, 0);
11684 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11685 build_zero_cst (TREE_TYPE (tem)));
11688 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11689 multiple of 1 << CST. */
11690 if (TREE_CODE (arg1) == INTEGER_CST)
11692 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11693 wide_int ncst1 = -cst1;
11694 if ((cst1 & ncst1) == ncst1
11695 && multiple_of_p (type, arg0,
11696 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11697 return fold_convert_loc (loc, type, arg0);
11700 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11701 bits from CST2. */
11702 if (TREE_CODE (arg1) == INTEGER_CST
11703 && TREE_CODE (arg0) == MULT_EXPR
11704 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11706 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11707 wide_int masked
11708 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11710 if (masked == 0)
11711 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11712 arg0, arg1);
11713 else if (masked != warg1)
11715 /* Avoid the transform if arg1 is a mask of some
11716 mode which allows further optimizations. */
11717 int pop = wi::popcount (warg1);
11718 if (!(pop >= BITS_PER_UNIT
11719 && pow2p_hwi (pop)
11720 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11721 return fold_build2_loc (loc, code, type, op0,
11722 wide_int_to_tree (type, masked));
11726 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11727 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11728 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11730 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11732 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11733 if (mask == -1)
11734 return
11735 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11738 goto associate;
11740 case RDIV_EXPR:
11741 /* Don't touch a floating-point divide by zero unless the mode
11742 of the constant can represent infinity. */
11743 if (TREE_CODE (arg1) == REAL_CST
11744 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11745 && real_zerop (arg1))
11746 return NULL_TREE;
11748 /* (-A) / (-B) -> A / B */
11749 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11750 return fold_build2_loc (loc, RDIV_EXPR, type,
11751 TREE_OPERAND (arg0, 0),
11752 negate_expr (arg1));
11753 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11754 return fold_build2_loc (loc, RDIV_EXPR, type,
11755 negate_expr (arg0),
11756 TREE_OPERAND (arg1, 0));
11757 return NULL_TREE;
11759 case TRUNC_DIV_EXPR:
11760 /* Fall through */
11762 case FLOOR_DIV_EXPR:
11763 /* Simplify A / (B << N) where A and B are positive and B is
11764 a power of 2, to A >> (N + log2(B)). */
11765 strict_overflow_p = false;
11766 if (TREE_CODE (arg1) == LSHIFT_EXPR
11767 && (TYPE_UNSIGNED (type)
11768 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11770 tree sval = TREE_OPERAND (arg1, 0);
11771 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11773 tree sh_cnt = TREE_OPERAND (arg1, 1);
11774 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11775 wi::exact_log2 (wi::to_wide (sval)));
11777 if (strict_overflow_p)
11778 fold_overflow_warning (("assuming signed overflow does not "
11779 "occur when simplifying A / (B << N)"),
11780 WARN_STRICT_OVERFLOW_MISC);
11782 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11783 sh_cnt, pow2);
11784 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11785 fold_convert_loc (loc, type, arg0), sh_cnt);
11789 /* Fall through */
11791 case ROUND_DIV_EXPR:
11792 case CEIL_DIV_EXPR:
11793 case EXACT_DIV_EXPR:
11794 if (integer_zerop (arg1))
11795 return NULL_TREE;
11797 /* Convert -A / -B to A / B when the type is signed and overflow is
11798 undefined. */
11799 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11800 && TREE_CODE (op0) == NEGATE_EXPR
11801 && negate_expr_p (op1))
11803 if (ANY_INTEGRAL_TYPE_P (type))
11804 fold_overflow_warning (("assuming signed overflow does not occur "
11805 "when distributing negation across "
11806 "division"),
11807 WARN_STRICT_OVERFLOW_MISC);
11808 return fold_build2_loc (loc, code, type,
11809 fold_convert_loc (loc, type,
11810 TREE_OPERAND (arg0, 0)),
11811 negate_expr (op1));
11813 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11814 && TREE_CODE (arg1) == NEGATE_EXPR
11815 && negate_expr_p (op0))
11817 if (ANY_INTEGRAL_TYPE_P (type))
11818 fold_overflow_warning (("assuming signed overflow does not occur "
11819 "when distributing negation across "
11820 "division"),
11821 WARN_STRICT_OVERFLOW_MISC);
11822 return fold_build2_loc (loc, code, type,
11823 negate_expr (op0),
11824 fold_convert_loc (loc, type,
11825 TREE_OPERAND (arg1, 0)));
11828 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11829 operation, EXACT_DIV_EXPR.
11831 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11832 At one time others generated faster code, it's not clear if they do
11833 after the last round to changes to the DIV code in expmed.c. */
11834 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11835 && multiple_of_p (type, arg0, arg1))
11836 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
11837 fold_convert (type, arg0),
11838 fold_convert (type, arg1));
11840 strict_overflow_p = false;
11841 if (TREE_CODE (arg1) == INTEGER_CST
11842 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11843 &strict_overflow_p)) != 0)
11845 if (strict_overflow_p)
11846 fold_overflow_warning (("assuming signed overflow does not occur "
11847 "when simplifying division"),
11848 WARN_STRICT_OVERFLOW_MISC);
11849 return fold_convert_loc (loc, type, tem);
11852 return NULL_TREE;
11854 case CEIL_MOD_EXPR:
11855 case FLOOR_MOD_EXPR:
11856 case ROUND_MOD_EXPR:
11857 case TRUNC_MOD_EXPR:
11858 strict_overflow_p = false;
11859 if (TREE_CODE (arg1) == INTEGER_CST
11860 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11861 &strict_overflow_p)) != 0)
11863 if (strict_overflow_p)
11864 fold_overflow_warning (("assuming signed overflow does not occur "
11865 "when simplifying modulus"),
11866 WARN_STRICT_OVERFLOW_MISC);
11867 return fold_convert_loc (loc, type, tem);
11870 return NULL_TREE;
11872 case LROTATE_EXPR:
11873 case RROTATE_EXPR:
11874 case RSHIFT_EXPR:
11875 case LSHIFT_EXPR:
11876 /* Since negative shift count is not well-defined,
11877 don't try to compute it in the compiler. */
11878 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11879 return NULL_TREE;
11881 prec = element_precision (type);
11883 /* If we have a rotate of a bit operation with the rotate count and
11884 the second operand of the bit operation both constant,
11885 permute the two operations. */
11886 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11887 && (TREE_CODE (arg0) == BIT_AND_EXPR
11888 || TREE_CODE (arg0) == BIT_IOR_EXPR
11889 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11890 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11892 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11893 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11894 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11895 fold_build2_loc (loc, code, type,
11896 arg00, arg1),
11897 fold_build2_loc (loc, code, type,
11898 arg01, arg1));
11901 /* Two consecutive rotates adding up to the some integer
11902 multiple of the precision of the type can be ignored. */
11903 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11904 && TREE_CODE (arg0) == RROTATE_EXPR
11905 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11906 && wi::umod_trunc (wi::to_wide (arg1)
11907 + wi::to_wide (TREE_OPERAND (arg0, 1)),
11908 prec) == 0)
11909 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11911 return NULL_TREE;
11913 case MIN_EXPR:
11914 case MAX_EXPR:
11915 goto associate;
11917 case TRUTH_ANDIF_EXPR:
11918 /* Note that the operands of this must be ints
11919 and their values must be 0 or 1.
11920 ("true" is a fixed value perhaps depending on the language.) */
11921 /* If first arg is constant zero, return it. */
11922 if (integer_zerop (arg0))
11923 return fold_convert_loc (loc, type, arg0);
11924 /* FALLTHRU */
11925 case TRUTH_AND_EXPR:
11926 /* If either arg is constant true, drop it. */
11927 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11928 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11929 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11930 /* Preserve sequence points. */
11931 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11932 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11933 /* If second arg is constant zero, result is zero, but first arg
11934 must be evaluated. */
11935 if (integer_zerop (arg1))
11936 return omit_one_operand_loc (loc, type, arg1, arg0);
11937 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11938 case will be handled here. */
11939 if (integer_zerop (arg0))
11940 return omit_one_operand_loc (loc, type, arg0, arg1);
11942 /* !X && X is always false. */
11943 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11944 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11945 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11946 /* X && !X is always false. */
11947 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11948 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11949 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11951 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11952 means A >= Y && A != MAX, but in this case we know that
11953 A < X <= MAX. */
11955 if (!TREE_SIDE_EFFECTS (arg0)
11956 && !TREE_SIDE_EFFECTS (arg1))
11958 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11959 if (tem && !operand_equal_p (tem, arg0, 0))
11960 return fold_build2_loc (loc, code, type, tem, arg1);
11962 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11963 if (tem && !operand_equal_p (tem, arg1, 0))
11964 return fold_build2_loc (loc, code, type, arg0, tem);
11967 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11968 != NULL_TREE)
11969 return tem;
11971 return NULL_TREE;
11973 case TRUTH_ORIF_EXPR:
11974 /* Note that the operands of this must be ints
11975 and their values must be 0 or true.
11976 ("true" is a fixed value perhaps depending on the language.) */
11977 /* If first arg is constant true, return it. */
11978 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11979 return fold_convert_loc (loc, type, arg0);
11980 /* FALLTHRU */
11981 case TRUTH_OR_EXPR:
11982 /* If either arg is constant zero, drop it. */
11983 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11984 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11985 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11986 /* Preserve sequence points. */
11987 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11988 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11989 /* If second arg is constant true, result is true, but we must
11990 evaluate first arg. */
11991 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11992 return omit_one_operand_loc (loc, type, arg1, arg0);
11993 /* Likewise for first arg, but note this only occurs here for
11994 TRUTH_OR_EXPR. */
11995 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11996 return omit_one_operand_loc (loc, type, arg0, arg1);
11998 /* !X || X is always true. */
11999 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12000 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12001 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12002 /* X || !X is always true. */
12003 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12004 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12005 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12007 /* (X && !Y) || (!X && Y) is X ^ Y */
12008 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12009 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12011 tree a0, a1, l0, l1, n0, n1;
12013 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12014 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12016 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12017 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12019 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12020 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12022 if ((operand_equal_p (n0, a0, 0)
12023 && operand_equal_p (n1, a1, 0))
12024 || (operand_equal_p (n0, a1, 0)
12025 && operand_equal_p (n1, a0, 0)))
12026 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12029 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12030 != NULL_TREE)
12031 return tem;
12033 return NULL_TREE;
12035 case TRUTH_XOR_EXPR:
12036 /* If the second arg is constant zero, drop it. */
12037 if (integer_zerop (arg1))
12038 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12039 /* If the second arg is constant true, this is a logical inversion. */
12040 if (integer_onep (arg1))
12042 tem = invert_truthvalue_loc (loc, arg0);
12043 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12045 /* Identical arguments cancel to zero. */
12046 if (operand_equal_p (arg0, arg1, 0))
12047 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12049 /* !X ^ X is always true. */
12050 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12051 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12052 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12054 /* X ^ !X is always true. */
12055 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12056 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12057 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12059 return NULL_TREE;
12061 case EQ_EXPR:
12062 case NE_EXPR:
12063 STRIP_NOPS (arg0);
12064 STRIP_NOPS (arg1);
12066 tem = fold_comparison (loc, code, type, op0, op1);
12067 if (tem != NULL_TREE)
12068 return tem;
12070 /* bool_var != 1 becomes !bool_var. */
12071 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12072 && code == NE_EXPR)
12073 return fold_convert_loc (loc, type,
12074 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12075 TREE_TYPE (arg0), arg0));
12077 /* bool_var == 0 becomes !bool_var. */
12078 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12079 && code == EQ_EXPR)
12080 return fold_convert_loc (loc, type,
12081 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12082 TREE_TYPE (arg0), arg0));
12084 /* !exp != 0 becomes !exp */
12085 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12086 && code == NE_EXPR)
12087 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12089 /* If this is an EQ or NE comparison with zero and ARG0 is
12090 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12091 two operations, but the latter can be done in one less insn
12092 on machines that have only two-operand insns or on which a
12093 constant cannot be the first operand. */
12094 if (TREE_CODE (arg0) == BIT_AND_EXPR
12095 && integer_zerop (arg1))
12097 tree arg00 = TREE_OPERAND (arg0, 0);
12098 tree arg01 = TREE_OPERAND (arg0, 1);
12099 if (TREE_CODE (arg00) == LSHIFT_EXPR
12100 && integer_onep (TREE_OPERAND (arg00, 0)))
12102 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12103 arg01, TREE_OPERAND (arg00, 1));
12104 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12105 build_one_cst (TREE_TYPE (arg0)));
12106 return fold_build2_loc (loc, code, type,
12107 fold_convert_loc (loc, TREE_TYPE (arg1),
12108 tem), arg1);
12110 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12111 && integer_onep (TREE_OPERAND (arg01, 0)))
12113 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12114 arg00, TREE_OPERAND (arg01, 1));
12115 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12116 build_one_cst (TREE_TYPE (arg0)));
12117 return fold_build2_loc (loc, code, type,
12118 fold_convert_loc (loc, TREE_TYPE (arg1),
12119 tem), arg1);
12123 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12124 C1 is a valid shift constant, and C2 is a power of two, i.e.
12125 a single bit. */
12126 if (TREE_CODE (arg0) == BIT_AND_EXPR
12127 && integer_pow2p (TREE_OPERAND (arg0, 1))
12128 && integer_zerop (arg1))
12130 tree arg00 = TREE_OPERAND (arg0, 0);
12131 STRIP_NOPS (arg00);
12132 if (TREE_CODE (arg00) == RSHIFT_EXPR
12133 && TREE_CODE (TREE_OPERAND (arg00, 1)) == INTEGER_CST)
12135 tree itype = TREE_TYPE (arg00);
12136 tree arg001 = TREE_OPERAND (arg00, 1);
12137 prec = TYPE_PRECISION (itype);
12139 /* Check for a valid shift count. */
12140 if (wi::ltu_p (wi::to_wide (arg001), prec))
12142 tree arg01 = TREE_OPERAND (arg0, 1);
12143 tree arg000 = TREE_OPERAND (arg00, 0);
12144 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12145 /* If (C2 << C1) doesn't overflow, then
12146 ((X >> C1) & C2) != 0 can be rewritten as
12147 (X & (C2 << C1)) != 0. */
12148 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12150 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype,
12151 arg01, arg001);
12152 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype,
12153 arg000, tem);
12154 return fold_build2_loc (loc, code, type, tem,
12155 fold_convert_loc (loc, itype, arg1));
12157 /* Otherwise, for signed (arithmetic) shifts,
12158 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12159 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12160 else if (!TYPE_UNSIGNED (itype))
12161 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR
12162 : LT_EXPR,
12163 type, arg000,
12164 build_int_cst (itype, 0));
12165 /* Otherwise, of unsigned (logical) shifts,
12166 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12167 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12168 else
12169 return omit_one_operand_loc (loc, type,
12170 code == EQ_EXPR ? integer_one_node
12171 : integer_zero_node,
12172 arg000);
12177 /* If this is a comparison of a field, we may be able to simplify it. */
12178 if ((TREE_CODE (arg0) == COMPONENT_REF
12179 || TREE_CODE (arg0) == BIT_FIELD_REF)
12180 /* Handle the constant case even without -O
12181 to make sure the warnings are given. */
12182 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12184 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12185 if (t1)
12186 return t1;
12189 /* Optimize comparisons of strlen vs zero to a compare of the
12190 first character of the string vs zero. To wit,
12191 strlen(ptr) == 0 => *ptr == 0
12192 strlen(ptr) != 0 => *ptr != 0
12193 Other cases should reduce to one of these two (or a constant)
12194 due to the return value of strlen being unsigned. */
12195 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
12197 tree fndecl = get_callee_fndecl (arg0);
12199 if (fndecl
12200 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
12201 && call_expr_nargs (arg0) == 1
12202 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
12203 == POINTER_TYPE))
12205 tree ptrtype
12206 = build_pointer_type (build_qualified_type (char_type_node,
12207 TYPE_QUAL_CONST));
12208 tree ptr = fold_convert_loc (loc, ptrtype,
12209 CALL_EXPR_ARG (arg0, 0));
12210 tree iref = build_fold_indirect_ref_loc (loc, ptr);
12211 return fold_build2_loc (loc, code, type, iref,
12212 build_int_cst (TREE_TYPE (iref), 0));
12216 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12217 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12218 if (TREE_CODE (arg0) == RSHIFT_EXPR
12219 && integer_zerop (arg1)
12220 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12222 tree arg00 = TREE_OPERAND (arg0, 0);
12223 tree arg01 = TREE_OPERAND (arg0, 1);
12224 tree itype = TREE_TYPE (arg00);
12225 if (wi::to_wide (arg01) == element_precision (itype) - 1)
12227 if (TYPE_UNSIGNED (itype))
12229 itype = signed_type_for (itype);
12230 arg00 = fold_convert_loc (loc, itype, arg00);
12232 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12233 type, arg00, build_zero_cst (itype));
12237 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12238 (X & C) == 0 when C is a single bit. */
12239 if (TREE_CODE (arg0) == BIT_AND_EXPR
12240 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12241 && integer_zerop (arg1)
12242 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12244 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12245 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12246 TREE_OPERAND (arg0, 1));
12247 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12248 type, tem,
12249 fold_convert_loc (loc, TREE_TYPE (arg0),
12250 arg1));
12253 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12254 constant C is a power of two, i.e. a single bit. */
12255 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12256 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12257 && integer_zerop (arg1)
12258 && integer_pow2p (TREE_OPERAND (arg0, 1))
12259 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12260 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12262 tree arg00 = TREE_OPERAND (arg0, 0);
12263 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12264 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12267 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12268 when is C is a power of two, i.e. a single bit. */
12269 if (TREE_CODE (arg0) == BIT_AND_EXPR
12270 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12271 && integer_zerop (arg1)
12272 && integer_pow2p (TREE_OPERAND (arg0, 1))
12273 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12274 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12276 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12277 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12278 arg000, TREE_OPERAND (arg0, 1));
12279 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12280 tem, build_int_cst (TREE_TYPE (tem), 0));
12283 if (integer_zerop (arg1)
12284 && tree_expr_nonzero_p (arg0))
12286 tree res = constant_boolean_node (code==NE_EXPR, type);
12287 return omit_one_operand_loc (loc, type, res, arg0);
12290 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12291 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12293 tree arg00 = TREE_OPERAND (arg0, 0);
12294 tree arg01 = TREE_OPERAND (arg0, 1);
12295 tree arg10 = TREE_OPERAND (arg1, 0);
12296 tree arg11 = TREE_OPERAND (arg1, 1);
12297 tree itype = TREE_TYPE (arg0);
12299 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12300 operand_equal_p guarantees no side-effects so we don't need
12301 to use omit_one_operand on Z. */
12302 if (operand_equal_p (arg01, arg11, 0))
12303 return fold_build2_loc (loc, code, type, arg00,
12304 fold_convert_loc (loc, TREE_TYPE (arg00),
12305 arg10));
12306 if (operand_equal_p (arg01, arg10, 0))
12307 return fold_build2_loc (loc, code, type, arg00,
12308 fold_convert_loc (loc, TREE_TYPE (arg00),
12309 arg11));
12310 if (operand_equal_p (arg00, arg11, 0))
12311 return fold_build2_loc (loc, code, type, arg01,
12312 fold_convert_loc (loc, TREE_TYPE (arg01),
12313 arg10));
12314 if (operand_equal_p (arg00, arg10, 0))
12315 return fold_build2_loc (loc, code, type, arg01,
12316 fold_convert_loc (loc, TREE_TYPE (arg01),
12317 arg11));
12319 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12320 if (TREE_CODE (arg01) == INTEGER_CST
12321 && TREE_CODE (arg11) == INTEGER_CST)
12323 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12324 fold_convert_loc (loc, itype, arg11));
12325 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12326 return fold_build2_loc (loc, code, type, tem,
12327 fold_convert_loc (loc, itype, arg10));
12331 /* Attempt to simplify equality/inequality comparisons of complex
12332 values. Only lower the comparison if the result is known or
12333 can be simplified to a single scalar comparison. */
12334 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12335 || TREE_CODE (arg0) == COMPLEX_CST)
12336 && (TREE_CODE (arg1) == COMPLEX_EXPR
12337 || TREE_CODE (arg1) == COMPLEX_CST))
12339 tree real0, imag0, real1, imag1;
12340 tree rcond, icond;
12342 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12344 real0 = TREE_OPERAND (arg0, 0);
12345 imag0 = TREE_OPERAND (arg0, 1);
12347 else
12349 real0 = TREE_REALPART (arg0);
12350 imag0 = TREE_IMAGPART (arg0);
12353 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12355 real1 = TREE_OPERAND (arg1, 0);
12356 imag1 = TREE_OPERAND (arg1, 1);
12358 else
12360 real1 = TREE_REALPART (arg1);
12361 imag1 = TREE_IMAGPART (arg1);
12364 rcond = fold_binary_loc (loc, code, type, real0, real1);
12365 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12367 if (integer_zerop (rcond))
12369 if (code == EQ_EXPR)
12370 return omit_two_operands_loc (loc, type, boolean_false_node,
12371 imag0, imag1);
12372 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12374 else
12376 if (code == NE_EXPR)
12377 return omit_two_operands_loc (loc, type, boolean_true_node,
12378 imag0, imag1);
12379 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12383 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12384 if (icond && TREE_CODE (icond) == INTEGER_CST)
12386 if (integer_zerop (icond))
12388 if (code == EQ_EXPR)
12389 return omit_two_operands_loc (loc, type, boolean_false_node,
12390 real0, real1);
12391 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12393 else
12395 if (code == NE_EXPR)
12396 return omit_two_operands_loc (loc, type, boolean_true_node,
12397 real0, real1);
12398 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12403 return NULL_TREE;
12405 case LT_EXPR:
12406 case GT_EXPR:
12407 case LE_EXPR:
12408 case GE_EXPR:
12409 tem = fold_comparison (loc, code, type, op0, op1);
12410 if (tem != NULL_TREE)
12411 return tem;
12413 /* Transform comparisons of the form X +- C CMP X. */
12414 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12415 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12416 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12417 && !HONOR_SNANS (arg0))
12419 tree arg01 = TREE_OPERAND (arg0, 1);
12420 enum tree_code code0 = TREE_CODE (arg0);
12421 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12423 /* (X - c) > X becomes false. */
12424 if (code == GT_EXPR
12425 && ((code0 == MINUS_EXPR && is_positive >= 0)
12426 || (code0 == PLUS_EXPR && is_positive <= 0)))
12427 return constant_boolean_node (0, type);
12429 /* Likewise (X + c) < X becomes false. */
12430 if (code == LT_EXPR
12431 && ((code0 == PLUS_EXPR && is_positive >= 0)
12432 || (code0 == MINUS_EXPR && is_positive <= 0)))
12433 return constant_boolean_node (0, type);
12435 /* Convert (X - c) <= X to true. */
12436 if (!HONOR_NANS (arg1)
12437 && code == LE_EXPR
12438 && ((code0 == MINUS_EXPR && is_positive >= 0)
12439 || (code0 == PLUS_EXPR && is_positive <= 0)))
12440 return constant_boolean_node (1, type);
12442 /* Convert (X + c) >= X to true. */
12443 if (!HONOR_NANS (arg1)
12444 && code == GE_EXPR
12445 && ((code0 == PLUS_EXPR && is_positive >= 0)
12446 || (code0 == MINUS_EXPR && is_positive <= 0)))
12447 return constant_boolean_node (1, type);
12450 /* If we are comparing an ABS_EXPR with a constant, we can
12451 convert all the cases into explicit comparisons, but they may
12452 well not be faster than doing the ABS and one comparison.
12453 But ABS (X) <= C is a range comparison, which becomes a subtraction
12454 and a comparison, and is probably faster. */
12455 if (code == LE_EXPR
12456 && TREE_CODE (arg1) == INTEGER_CST
12457 && TREE_CODE (arg0) == ABS_EXPR
12458 && ! TREE_SIDE_EFFECTS (arg0)
12459 && (tem = negate_expr (arg1)) != 0
12460 && TREE_CODE (tem) == INTEGER_CST
12461 && !TREE_OVERFLOW (tem))
12462 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12463 build2 (GE_EXPR, type,
12464 TREE_OPERAND (arg0, 0), tem),
12465 build2 (LE_EXPR, type,
12466 TREE_OPERAND (arg0, 0), arg1));
12468 /* Convert ABS_EXPR<x> >= 0 to true. */
12469 strict_overflow_p = false;
12470 if (code == GE_EXPR
12471 && (integer_zerop (arg1)
12472 || (! HONOR_NANS (arg0)
12473 && real_zerop (arg1)))
12474 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12476 if (strict_overflow_p)
12477 fold_overflow_warning (("assuming signed overflow does not occur "
12478 "when simplifying comparison of "
12479 "absolute value and zero"),
12480 WARN_STRICT_OVERFLOW_CONDITIONAL);
12481 return omit_one_operand_loc (loc, type,
12482 constant_boolean_node (true, type),
12483 arg0);
12486 /* Convert ABS_EXPR<x> < 0 to false. */
12487 strict_overflow_p = false;
12488 if (code == LT_EXPR
12489 && (integer_zerop (arg1) || real_zerop (arg1))
12490 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12492 if (strict_overflow_p)
12493 fold_overflow_warning (("assuming signed overflow does not occur "
12494 "when simplifying comparison of "
12495 "absolute value and zero"),
12496 WARN_STRICT_OVERFLOW_CONDITIONAL);
12497 return omit_one_operand_loc (loc, type,
12498 constant_boolean_node (false, type),
12499 arg0);
12502 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12503 and similarly for >= into !=. */
12504 if ((code == LT_EXPR || code == GE_EXPR)
12505 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12506 && TREE_CODE (arg1) == LSHIFT_EXPR
12507 && integer_onep (TREE_OPERAND (arg1, 0)))
12508 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12509 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12510 TREE_OPERAND (arg1, 1)),
12511 build_zero_cst (TREE_TYPE (arg0)));
12513 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12514 otherwise Y might be >= # of bits in X's type and thus e.g.
12515 (unsigned char) (1 << Y) for Y 15 might be 0.
12516 If the cast is widening, then 1 << Y should have unsigned type,
12517 otherwise if Y is number of bits in the signed shift type minus 1,
12518 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12519 31 might be 0xffffffff80000000. */
12520 if ((code == LT_EXPR || code == GE_EXPR)
12521 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12522 || VECTOR_INTEGER_TYPE_P (TREE_TYPE (arg0)))
12523 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12524 && CONVERT_EXPR_P (arg1)
12525 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12526 && (element_precision (TREE_TYPE (arg1))
12527 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12528 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12529 || (element_precision (TREE_TYPE (arg1))
12530 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12531 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12533 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12534 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12535 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12536 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12537 build_zero_cst (TREE_TYPE (arg0)));
12540 return NULL_TREE;
12542 case UNORDERED_EXPR:
12543 case ORDERED_EXPR:
12544 case UNLT_EXPR:
12545 case UNLE_EXPR:
12546 case UNGT_EXPR:
12547 case UNGE_EXPR:
12548 case UNEQ_EXPR:
12549 case LTGT_EXPR:
12550 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12552 tree targ0 = strip_float_extensions (arg0);
12553 tree targ1 = strip_float_extensions (arg1);
12554 tree newtype = TREE_TYPE (targ0);
12556 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12557 newtype = TREE_TYPE (targ1);
12559 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12560 return fold_build2_loc (loc, code, type,
12561 fold_convert_loc (loc, newtype, targ0),
12562 fold_convert_loc (loc, newtype, targ1));
12565 return NULL_TREE;
12567 case COMPOUND_EXPR:
12568 /* When pedantic, a compound expression can be neither an lvalue
12569 nor an integer constant expression. */
12570 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12571 return NULL_TREE;
12572 /* Don't let (0, 0) be null pointer constant. */
12573 tem = integer_zerop (arg1) ? build1_loc (loc, NOP_EXPR, type, arg1)
12574 : fold_convert_loc (loc, type, arg1);
12575 return tem;
12577 case ASSERT_EXPR:
12578 /* An ASSERT_EXPR should never be passed to fold_binary. */
12579 gcc_unreachable ();
12581 default:
12582 return NULL_TREE;
12583 } /* switch (code) */
12586 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12587 ((A & N) + B) & M -> (A + B) & M
12588 Similarly if (N & M) == 0,
12589 ((A | N) + B) & M -> (A + B) & M
12590 and for - instead of + (or unary - instead of +)
12591 and/or ^ instead of |.
12592 If B is constant and (B & M) == 0, fold into A & M.
12594 This function is a helper for match.pd patterns. Return non-NULL
12595 type in which the simplified operation should be performed only
12596 if any optimization is possible.
12598 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12599 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12600 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12601 +/-. */
12602 tree
12603 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12604 tree arg00, enum tree_code code00, tree arg000, tree arg001,
12605 tree arg01, enum tree_code code01, tree arg010, tree arg011,
12606 tree *pmop)
12608 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12609 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12610 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12611 if (~cst1 == 0
12612 || (cst1 & (cst1 + 1)) != 0
12613 || !INTEGRAL_TYPE_P (type)
12614 || (!TYPE_OVERFLOW_WRAPS (type)
12615 && TREE_CODE (type) != INTEGER_TYPE)
12616 || (wi::max_value (type) & cst1) != cst1)
12617 return NULL_TREE;
12619 enum tree_code codes[2] = { code00, code01 };
12620 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12621 int which = 0;
12622 wide_int cst0;
12624 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12625 arg1 (M) is == (1LL << cst) - 1.
12626 Store C into PMOP[0] and D into PMOP[1]. */
12627 pmop[0] = arg00;
12628 pmop[1] = arg01;
12629 which = code != NEGATE_EXPR;
12631 for (; which >= 0; which--)
12632 switch (codes[which])
12634 case BIT_AND_EXPR:
12635 case BIT_IOR_EXPR:
12636 case BIT_XOR_EXPR:
12637 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12638 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12639 if (codes[which] == BIT_AND_EXPR)
12641 if (cst0 != cst1)
12642 break;
12644 else if (cst0 != 0)
12645 break;
12646 /* If C or D is of the form (A & N) where
12647 (N & M) == M, or of the form (A | N) or
12648 (A ^ N) where (N & M) == 0, replace it with A. */
12649 pmop[which] = arg0xx[2 * which];
12650 break;
12651 case ERROR_MARK:
12652 if (TREE_CODE (pmop[which]) != INTEGER_CST)
12653 break;
12654 /* If C or D is a N where (N & M) == 0, it can be
12655 omitted (replaced with 0). */
12656 if ((code == PLUS_EXPR
12657 || (code == MINUS_EXPR && which == 0))
12658 && (cst1 & wi::to_wide (pmop[which])) == 0)
12659 pmop[which] = build_int_cst (type, 0);
12660 /* Similarly, with C - N where (-N & M) == 0. */
12661 if (code == MINUS_EXPR
12662 && which == 1
12663 && (cst1 & -wi::to_wide (pmop[which])) == 0)
12664 pmop[which] = build_int_cst (type, 0);
12665 break;
12666 default:
12667 gcc_unreachable ();
12670 /* Only build anything new if we optimized one or both arguments above. */
12671 if (pmop[0] == arg00 && pmop[1] == arg01)
12672 return NULL_TREE;
12674 if (TYPE_OVERFLOW_WRAPS (type))
12675 return type;
12676 else
12677 return unsigned_type_for (type);
12680 /* Used by contains_label_[p1]. */
12682 struct contains_label_data
12684 hash_set<tree> *pset;
12685 bool inside_switch_p;
12688 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12689 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12690 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12692 static tree
12693 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12695 contains_label_data *d = (contains_label_data *) data;
12696 switch (TREE_CODE (*tp))
12698 case LABEL_EXPR:
12699 return *tp;
12701 case CASE_LABEL_EXPR:
12702 if (!d->inside_switch_p)
12703 return *tp;
12704 return NULL_TREE;
12706 case SWITCH_EXPR:
12707 if (!d->inside_switch_p)
12709 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12710 return *tp;
12711 d->inside_switch_p = true;
12712 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12713 return *tp;
12714 d->inside_switch_p = false;
12715 *walk_subtrees = 0;
12717 return NULL_TREE;
12719 case GOTO_EXPR:
12720 *walk_subtrees = 0;
12721 return NULL_TREE;
12723 default:
12724 return NULL_TREE;
12728 /* Return whether the sub-tree ST contains a label which is accessible from
12729 outside the sub-tree. */
12731 static bool
12732 contains_label_p (tree st)
12734 hash_set<tree> pset;
12735 contains_label_data data = { &pset, false };
12736 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12739 /* Fold a ternary expression of code CODE and type TYPE with operands
12740 OP0, OP1, and OP2. Return the folded expression if folding is
12741 successful. Otherwise, return NULL_TREE. */
12743 tree
12744 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12745 tree op0, tree op1, tree op2)
12747 tree tem;
12748 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12749 enum tree_code_class kind = TREE_CODE_CLASS (code);
12751 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12752 && TREE_CODE_LENGTH (code) == 3);
12754 /* If this is a commutative operation, and OP0 is a constant, move it
12755 to OP1 to reduce the number of tests below. */
12756 if (commutative_ternary_tree_code (code)
12757 && tree_swap_operands_p (op0, op1))
12758 return fold_build3_loc (loc, code, type, op1, op0, op2);
12760 tem = generic_simplify (loc, code, type, op0, op1, op2);
12761 if (tem)
12762 return tem;
12764 /* Strip any conversions that don't change the mode. This is safe
12765 for every expression, except for a comparison expression because
12766 its signedness is derived from its operands. So, in the latter
12767 case, only strip conversions that don't change the signedness.
12769 Note that this is done as an internal manipulation within the
12770 constant folder, in order to find the simplest representation of
12771 the arguments so that their form can be studied. In any cases,
12772 the appropriate type conversions should be put back in the tree
12773 that will get out of the constant folder. */
12774 if (op0)
12776 arg0 = op0;
12777 STRIP_NOPS (arg0);
12780 if (op1)
12782 arg1 = op1;
12783 STRIP_NOPS (arg1);
12786 if (op2)
12788 arg2 = op2;
12789 STRIP_NOPS (arg2);
12792 switch (code)
12794 case COMPONENT_REF:
12795 if (TREE_CODE (arg0) == CONSTRUCTOR
12796 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12798 unsigned HOST_WIDE_INT idx;
12799 tree field, value;
12800 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12801 if (field == arg1)
12802 return value;
12804 return NULL_TREE;
12806 case COND_EXPR:
12807 case VEC_COND_EXPR:
12808 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12809 so all simple results must be passed through pedantic_non_lvalue. */
12810 if (TREE_CODE (arg0) == INTEGER_CST)
12812 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12813 tem = integer_zerop (arg0) ? op2 : op1;
12814 /* Only optimize constant conditions when the selected branch
12815 has the same type as the COND_EXPR. This avoids optimizing
12816 away "c ? x : throw", where the throw has a void type.
12817 Avoid throwing away that operand which contains label. */
12818 if ((!TREE_SIDE_EFFECTS (unused_op)
12819 || !contains_label_p (unused_op))
12820 && (! VOID_TYPE_P (TREE_TYPE (tem))
12821 || VOID_TYPE_P (type)))
12822 return protected_set_expr_location_unshare (tem, loc);
12823 return NULL_TREE;
12825 else if (TREE_CODE (arg0) == VECTOR_CST)
12827 unsigned HOST_WIDE_INT nelts;
12828 if ((TREE_CODE (arg1) == VECTOR_CST
12829 || TREE_CODE (arg1) == CONSTRUCTOR)
12830 && (TREE_CODE (arg2) == VECTOR_CST
12831 || TREE_CODE (arg2) == CONSTRUCTOR)
12832 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12834 vec_perm_builder sel (nelts, nelts, 1);
12835 for (unsigned int i = 0; i < nelts; i++)
12837 tree val = VECTOR_CST_ELT (arg0, i);
12838 if (integer_all_onesp (val))
12839 sel.quick_push (i);
12840 else if (integer_zerop (val))
12841 sel.quick_push (nelts + i);
12842 else /* Currently unreachable. */
12843 return NULL_TREE;
12845 vec_perm_indices indices (sel, 2, nelts);
12846 tree t = fold_vec_perm (type, arg1, arg2, indices);
12847 if (t != NULL_TREE)
12848 return t;
12852 /* If we have A op B ? A : C, we may be able to convert this to a
12853 simpler expression, depending on the operation and the values
12854 of B and C. Signed zeros prevent all of these transformations,
12855 for reasons given above each one.
12857 Also try swapping the arguments and inverting the conditional. */
12858 if (COMPARISON_CLASS_P (arg0)
12859 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
12860 && !HONOR_SIGNED_ZEROS (op1))
12862 tem = fold_cond_expr_with_comparison (loc, type, TREE_CODE (arg0),
12863 TREE_OPERAND (arg0, 0),
12864 TREE_OPERAND (arg0, 1),
12865 op1, op2);
12866 if (tem)
12867 return tem;
12870 if (COMPARISON_CLASS_P (arg0)
12871 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
12872 && !HONOR_SIGNED_ZEROS (op2))
12874 enum tree_code comp_code = TREE_CODE (arg0);
12875 tree arg00 = TREE_OPERAND (arg0, 0);
12876 tree arg01 = TREE_OPERAND (arg0, 1);
12877 comp_code = invert_tree_comparison (comp_code, HONOR_NANS (arg00));
12878 if (comp_code != ERROR_MARK)
12879 tem = fold_cond_expr_with_comparison (loc, type, comp_code,
12880 arg00,
12881 arg01,
12882 op2, op1);
12883 if (tem)
12884 return tem;
12887 /* If the second operand is simpler than the third, swap them
12888 since that produces better jump optimization results. */
12889 if (truth_value_p (TREE_CODE (arg0))
12890 && tree_swap_operands_p (op1, op2))
12892 location_t loc0 = expr_location_or (arg0, loc);
12893 /* See if this can be inverted. If it can't, possibly because
12894 it was a floating-point inequality comparison, don't do
12895 anything. */
12896 tem = fold_invert_truthvalue (loc0, arg0);
12897 if (tem)
12898 return fold_build3_loc (loc, code, type, tem, op2, op1);
12901 /* Convert A ? 1 : 0 to simply A. */
12902 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12903 : (integer_onep (op1)
12904 && !VECTOR_TYPE_P (type)))
12905 && integer_zerop (op2)
12906 /* If we try to convert OP0 to our type, the
12907 call to fold will try to move the conversion inside
12908 a COND, which will recurse. In that case, the COND_EXPR
12909 is probably the best choice, so leave it alone. */
12910 && type == TREE_TYPE (arg0))
12911 return protected_set_expr_location_unshare (arg0, loc);
12913 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12914 over COND_EXPR in cases such as floating point comparisons. */
12915 if (integer_zerop (op1)
12916 && code == COND_EXPR
12917 && integer_onep (op2)
12918 && !VECTOR_TYPE_P (type)
12919 && truth_value_p (TREE_CODE (arg0)))
12920 return fold_convert_loc (loc, type,
12921 invert_truthvalue_loc (loc, arg0));
12923 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12924 if (TREE_CODE (arg0) == LT_EXPR
12925 && integer_zerop (TREE_OPERAND (arg0, 1))
12926 && integer_zerop (op2)
12927 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12929 /* sign_bit_p looks through both zero and sign extensions,
12930 but for this optimization only sign extensions are
12931 usable. */
12932 tree tem2 = TREE_OPERAND (arg0, 0);
12933 while (tem != tem2)
12935 if (TREE_CODE (tem2) != NOP_EXPR
12936 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12938 tem = NULL_TREE;
12939 break;
12941 tem2 = TREE_OPERAND (tem2, 0);
12943 /* sign_bit_p only checks ARG1 bits within A's precision.
12944 If <sign bit of A> has wider type than A, bits outside
12945 of A's precision in <sign bit of A> need to be checked.
12946 If they are all 0, this optimization needs to be done
12947 in unsigned A's type, if they are all 1 in signed A's type,
12948 otherwise this can't be done. */
12949 if (tem
12950 && TYPE_PRECISION (TREE_TYPE (tem))
12951 < TYPE_PRECISION (TREE_TYPE (arg1))
12952 && TYPE_PRECISION (TREE_TYPE (tem))
12953 < TYPE_PRECISION (type))
12955 int inner_width, outer_width;
12956 tree tem_type;
12958 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12959 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12960 if (outer_width > TYPE_PRECISION (type))
12961 outer_width = TYPE_PRECISION (type);
12963 wide_int mask = wi::shifted_mask
12964 (inner_width, outer_width - inner_width, false,
12965 TYPE_PRECISION (TREE_TYPE (arg1)));
12967 wide_int common = mask & wi::to_wide (arg1);
12968 if (common == mask)
12970 tem_type = signed_type_for (TREE_TYPE (tem));
12971 tem = fold_convert_loc (loc, tem_type, tem);
12973 else if (common == 0)
12975 tem_type = unsigned_type_for (TREE_TYPE (tem));
12976 tem = fold_convert_loc (loc, tem_type, tem);
12978 else
12979 tem = NULL;
12982 if (tem)
12983 return
12984 fold_convert_loc (loc, type,
12985 fold_build2_loc (loc, BIT_AND_EXPR,
12986 TREE_TYPE (tem), tem,
12987 fold_convert_loc (loc,
12988 TREE_TYPE (tem),
12989 arg1)));
12992 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12993 already handled above. */
12994 if (TREE_CODE (arg0) == BIT_AND_EXPR
12995 && integer_onep (TREE_OPERAND (arg0, 1))
12996 && integer_zerop (op2)
12997 && integer_pow2p (arg1))
12999 tree tem = TREE_OPERAND (arg0, 0);
13000 STRIP_NOPS (tem);
13001 if (TREE_CODE (tem) == RSHIFT_EXPR
13002 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13003 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
13004 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
13005 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13006 fold_convert_loc (loc, type,
13007 TREE_OPERAND (tem, 0)),
13008 op1);
13011 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13012 is probably obsolete because the first operand should be a
13013 truth value (that's why we have the two cases above), but let's
13014 leave it in until we can confirm this for all front-ends. */
13015 if (integer_zerop (op2)
13016 && TREE_CODE (arg0) == NE_EXPR
13017 && integer_zerop (TREE_OPERAND (arg0, 1))
13018 && integer_pow2p (arg1)
13019 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13020 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13021 arg1, OEP_ONLY_CONST)
13022 /* operand_equal_p compares just value, not precision, so e.g.
13023 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
13024 second operand 32-bit -128, which is not a power of two (or vice
13025 versa. */
13026 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
13027 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
13029 /* Disable the transformations below for vectors, since
13030 fold_binary_op_with_conditional_arg may undo them immediately,
13031 yielding an infinite loop. */
13032 if (code == VEC_COND_EXPR)
13033 return NULL_TREE;
13035 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13036 if (integer_zerop (op2)
13037 && truth_value_p (TREE_CODE (arg0))
13038 && truth_value_p (TREE_CODE (arg1))
13039 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13040 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13041 : TRUTH_ANDIF_EXPR,
13042 type, fold_convert_loc (loc, type, arg0), op1);
13044 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13045 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13046 && truth_value_p (TREE_CODE (arg0))
13047 && truth_value_p (TREE_CODE (arg1))
13048 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13050 location_t loc0 = expr_location_or (arg0, loc);
13051 /* Only perform transformation if ARG0 is easily inverted. */
13052 tem = fold_invert_truthvalue (loc0, arg0);
13053 if (tem)
13054 return fold_build2_loc (loc, code == VEC_COND_EXPR
13055 ? BIT_IOR_EXPR
13056 : TRUTH_ORIF_EXPR,
13057 type, fold_convert_loc (loc, type, tem),
13058 op1);
13061 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13062 if (integer_zerop (arg1)
13063 && truth_value_p (TREE_CODE (arg0))
13064 && truth_value_p (TREE_CODE (op2))
13065 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13067 location_t loc0 = expr_location_or (arg0, loc);
13068 /* Only perform transformation if ARG0 is easily inverted. */
13069 tem = fold_invert_truthvalue (loc0, arg0);
13070 if (tem)
13071 return fold_build2_loc (loc, code == VEC_COND_EXPR
13072 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13073 type, fold_convert_loc (loc, type, tem),
13074 op2);
13077 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13078 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13079 && truth_value_p (TREE_CODE (arg0))
13080 && truth_value_p (TREE_CODE (op2))
13081 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13082 return fold_build2_loc (loc, code == VEC_COND_EXPR
13083 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13084 type, fold_convert_loc (loc, type, arg0), op2);
13086 return NULL_TREE;
13088 case CALL_EXPR:
13089 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13090 of fold_ternary on them. */
13091 gcc_unreachable ();
13093 case BIT_FIELD_REF:
13094 if (TREE_CODE (arg0) == VECTOR_CST
13095 && (type == TREE_TYPE (TREE_TYPE (arg0))
13096 || (VECTOR_TYPE_P (type)
13097 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
13098 && tree_fits_uhwi_p (op1)
13099 && tree_fits_uhwi_p (op2))
13101 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13102 unsigned HOST_WIDE_INT width
13103 = (TREE_CODE (eltype) == BOOLEAN_TYPE
13104 ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
13105 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13106 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13108 if (n != 0
13109 && (idx % width) == 0
13110 && (n % width) == 0
13111 && known_le ((idx + n) / width,
13112 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
13114 idx = idx / width;
13115 n = n / width;
13117 if (TREE_CODE (arg0) == VECTOR_CST)
13119 if (n == 1)
13121 tem = VECTOR_CST_ELT (arg0, idx);
13122 if (VECTOR_TYPE_P (type))
13123 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
13124 return tem;
13127 tree_vector_builder vals (type, n, 1);
13128 for (unsigned i = 0; i < n; ++i)
13129 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
13130 return vals.build ();
13135 /* On constants we can use native encode/interpret to constant
13136 fold (nearly) all BIT_FIELD_REFs. */
13137 if (CONSTANT_CLASS_P (arg0)
13138 && can_native_interpret_type_p (type)
13139 && BITS_PER_UNIT == 8
13140 && tree_fits_uhwi_p (op1)
13141 && tree_fits_uhwi_p (op2))
13143 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13144 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13145 /* Limit us to a reasonable amount of work. To relax the
13146 other limitations we need bit-shifting of the buffer
13147 and rounding up the size. */
13148 if (bitpos % BITS_PER_UNIT == 0
13149 && bitsize % BITS_PER_UNIT == 0
13150 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
13152 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
13153 unsigned HOST_WIDE_INT len
13154 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
13155 bitpos / BITS_PER_UNIT);
13156 if (len > 0
13157 && len * BITS_PER_UNIT >= bitsize)
13159 tree v = native_interpret_expr (type, b,
13160 bitsize / BITS_PER_UNIT);
13161 if (v)
13162 return v;
13167 return NULL_TREE;
13169 case VEC_PERM_EXPR:
13170 /* Perform constant folding of BIT_INSERT_EXPR. */
13171 if (TREE_CODE (arg2) == VECTOR_CST
13172 && TREE_CODE (op0) == VECTOR_CST
13173 && TREE_CODE (op1) == VECTOR_CST)
13175 /* Build a vector of integers from the tree mask. */
13176 vec_perm_builder builder;
13177 if (!tree_to_vec_perm_builder (&builder, arg2))
13178 return NULL_TREE;
13180 /* Create a vec_perm_indices for the integer vector. */
13181 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
13182 bool single_arg = (op0 == op1);
13183 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
13184 return fold_vec_perm (type, op0, op1, sel);
13186 return NULL_TREE;
13188 case BIT_INSERT_EXPR:
13189 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13190 if (TREE_CODE (arg0) == INTEGER_CST
13191 && TREE_CODE (arg1) == INTEGER_CST)
13193 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13194 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
13195 wide_int tem = (wi::to_wide (arg0)
13196 & wi::shifted_mask (bitpos, bitsize, true,
13197 TYPE_PRECISION (type)));
13198 wide_int tem2
13199 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
13200 bitsize), bitpos);
13201 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
13203 else if (TREE_CODE (arg0) == VECTOR_CST
13204 && CONSTANT_CLASS_P (arg1)
13205 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
13206 TREE_TYPE (arg1)))
13208 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13209 unsigned HOST_WIDE_INT elsize
13210 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
13211 if (bitpos % elsize == 0)
13213 unsigned k = bitpos / elsize;
13214 unsigned HOST_WIDE_INT nelts;
13215 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
13216 return arg0;
13217 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
13219 tree_vector_builder elts (type, nelts, 1);
13220 elts.quick_grow (nelts);
13221 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
13222 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
13223 return elts.build ();
13227 return NULL_TREE;
13229 default:
13230 return NULL_TREE;
13231 } /* switch (code) */
13234 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13235 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13236 constructor element index of the value returned. If the element is
13237 not found NULL_TREE is returned and *CTOR_IDX is updated to
13238 the index of the element after the ACCESS_INDEX position (which
13239 may be outside of the CTOR array). */
13241 tree
13242 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
13243 unsigned *ctor_idx)
13245 tree index_type = NULL_TREE;
13246 signop index_sgn = UNSIGNED;
13247 offset_int low_bound = 0;
13249 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
13251 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
13252 if (domain_type && TYPE_MIN_VALUE (domain_type))
13254 /* Static constructors for variably sized objects makes no sense. */
13255 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
13256 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
13257 /* ??? When it is obvious that the range is signed, treat it so. */
13258 if (TYPE_UNSIGNED (index_type)
13259 && TYPE_MAX_VALUE (domain_type)
13260 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
13261 TYPE_MIN_VALUE (domain_type)))
13263 index_sgn = SIGNED;
13264 low_bound
13265 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
13266 SIGNED);
13268 else
13270 index_sgn = TYPE_SIGN (index_type);
13271 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
13276 if (index_type)
13277 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
13278 index_sgn);
13280 offset_int index = low_bound;
13281 if (index_type)
13282 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13284 offset_int max_index = index;
13285 unsigned cnt;
13286 tree cfield, cval;
13287 bool first_p = true;
13289 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
13291 /* Array constructor might explicitly set index, or specify a range,
13292 or leave index NULL meaning that it is next index after previous
13293 one. */
13294 if (cfield)
13296 if (TREE_CODE (cfield) == INTEGER_CST)
13297 max_index = index
13298 = offset_int::from (wi::to_wide (cfield), index_sgn);
13299 else
13301 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
13302 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
13303 index_sgn);
13304 max_index
13305 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
13306 index_sgn);
13307 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
13310 else if (!first_p)
13312 index = max_index + 1;
13313 if (index_type)
13314 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13315 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
13316 max_index = index;
13318 else
13319 first_p = false;
13321 /* Do we have match? */
13322 if (wi::cmp (access_index, index, index_sgn) >= 0)
13324 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
13326 if (ctor_idx)
13327 *ctor_idx = cnt;
13328 return cval;
13331 else if (in_gimple_form)
13332 /* We're past the element we search for. Note during parsing
13333 the elements might not be sorted.
13334 ??? We should use a binary search and a flag on the
13335 CONSTRUCTOR as to whether elements are sorted in declaration
13336 order. */
13337 break;
13339 if (ctor_idx)
13340 *ctor_idx = cnt;
13341 return NULL_TREE;
13344 /* Perform constant folding and related simplification of EXPR.
13345 The related simplifications include x*1 => x, x*0 => 0, etc.,
13346 and application of the associative law.
13347 NOP_EXPR conversions may be removed freely (as long as we
13348 are careful not to change the type of the overall expression).
13349 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13350 but we can constant-fold them if they have constant operands. */
13352 #ifdef ENABLE_FOLD_CHECKING
13353 # define fold(x) fold_1 (x)
13354 static tree fold_1 (tree);
13355 static
13356 #endif
13357 tree
13358 fold (tree expr)
13360 const tree t = expr;
13361 enum tree_code code = TREE_CODE (t);
13362 enum tree_code_class kind = TREE_CODE_CLASS (code);
13363 tree tem;
13364 location_t loc = EXPR_LOCATION (expr);
13366 /* Return right away if a constant. */
13367 if (kind == tcc_constant)
13368 return t;
13370 /* CALL_EXPR-like objects with variable numbers of operands are
13371 treated specially. */
13372 if (kind == tcc_vl_exp)
13374 if (code == CALL_EXPR)
13376 tem = fold_call_expr (loc, expr, false);
13377 return tem ? tem : expr;
13379 return expr;
13382 if (IS_EXPR_CODE_CLASS (kind))
13384 tree type = TREE_TYPE (t);
13385 tree op0, op1, op2;
13387 switch (TREE_CODE_LENGTH (code))
13389 case 1:
13390 op0 = TREE_OPERAND (t, 0);
13391 tem = fold_unary_loc (loc, code, type, op0);
13392 return tem ? tem : expr;
13393 case 2:
13394 op0 = TREE_OPERAND (t, 0);
13395 op1 = TREE_OPERAND (t, 1);
13396 tem = fold_binary_loc (loc, code, type, op0, op1);
13397 return tem ? tem : expr;
13398 case 3:
13399 op0 = TREE_OPERAND (t, 0);
13400 op1 = TREE_OPERAND (t, 1);
13401 op2 = TREE_OPERAND (t, 2);
13402 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13403 return tem ? tem : expr;
13404 default:
13405 break;
13409 switch (code)
13411 case ARRAY_REF:
13413 tree op0 = TREE_OPERAND (t, 0);
13414 tree op1 = TREE_OPERAND (t, 1);
13416 if (TREE_CODE (op1) == INTEGER_CST
13417 && TREE_CODE (op0) == CONSTRUCTOR
13418 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13420 tree val = get_array_ctor_element_at_index (op0,
13421 wi::to_offset (op1));
13422 if (val)
13423 return val;
13426 return t;
13429 /* Return a VECTOR_CST if possible. */
13430 case CONSTRUCTOR:
13432 tree type = TREE_TYPE (t);
13433 if (TREE_CODE (type) != VECTOR_TYPE)
13434 return t;
13436 unsigned i;
13437 tree val;
13438 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
13439 if (! CONSTANT_CLASS_P (val))
13440 return t;
13442 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
13445 case CONST_DECL:
13446 return fold (DECL_INITIAL (t));
13448 default:
13449 return t;
13450 } /* switch (code) */
13453 #ifdef ENABLE_FOLD_CHECKING
13454 #undef fold
13456 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13457 hash_table<nofree_ptr_hash<const tree_node> > *);
13458 static void fold_check_failed (const_tree, const_tree);
13459 void print_fold_checksum (const_tree);
13461 /* When --enable-checking=fold, compute a digest of expr before
13462 and after actual fold call to see if fold did not accidentally
13463 change original expr. */
13465 tree
13466 fold (tree expr)
13468 tree ret;
13469 struct md5_ctx ctx;
13470 unsigned char checksum_before[16], checksum_after[16];
13471 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13473 md5_init_ctx (&ctx);
13474 fold_checksum_tree (expr, &ctx, &ht);
13475 md5_finish_ctx (&ctx, checksum_before);
13476 ht.empty ();
13478 ret = fold_1 (expr);
13480 md5_init_ctx (&ctx);
13481 fold_checksum_tree (expr, &ctx, &ht);
13482 md5_finish_ctx (&ctx, checksum_after);
13484 if (memcmp (checksum_before, checksum_after, 16))
13485 fold_check_failed (expr, ret);
13487 return ret;
13490 void
13491 print_fold_checksum (const_tree expr)
13493 struct md5_ctx ctx;
13494 unsigned char checksum[16], cnt;
13495 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13497 md5_init_ctx (&ctx);
13498 fold_checksum_tree (expr, &ctx, &ht);
13499 md5_finish_ctx (&ctx, checksum);
13500 for (cnt = 0; cnt < 16; ++cnt)
13501 fprintf (stderr, "%02x", checksum[cnt]);
13502 putc ('\n', stderr);
13505 static void
13506 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13508 internal_error ("fold check: original tree changed by fold");
13511 static void
13512 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13513 hash_table<nofree_ptr_hash <const tree_node> > *ht)
13515 const tree_node **slot;
13516 enum tree_code code;
13517 union tree_node *buf;
13518 int i, len;
13520 recursive_label:
13521 if (expr == NULL)
13522 return;
13523 slot = ht->find_slot (expr, INSERT);
13524 if (*slot != NULL)
13525 return;
13526 *slot = expr;
13527 code = TREE_CODE (expr);
13528 if (TREE_CODE_CLASS (code) == tcc_declaration
13529 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13531 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13532 size_t sz = tree_size (expr);
13533 buf = XALLOCAVAR (union tree_node, sz);
13534 memcpy ((char *) buf, expr, sz);
13535 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13536 buf->decl_with_vis.symtab_node = NULL;
13537 buf->base.nowarning_flag = 0;
13538 expr = (tree) buf;
13540 else if (TREE_CODE_CLASS (code) == tcc_type
13541 && (TYPE_POINTER_TO (expr)
13542 || TYPE_REFERENCE_TO (expr)
13543 || TYPE_CACHED_VALUES_P (expr)
13544 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13545 || TYPE_NEXT_VARIANT (expr)
13546 || TYPE_ALIAS_SET_KNOWN_P (expr)))
13548 /* Allow these fields to be modified. */
13549 tree tmp;
13550 size_t sz = tree_size (expr);
13551 buf = XALLOCAVAR (union tree_node, sz);
13552 memcpy ((char *) buf, expr, sz);
13553 expr = tmp = (tree) buf;
13554 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13555 TYPE_POINTER_TO (tmp) = NULL;
13556 TYPE_REFERENCE_TO (tmp) = NULL;
13557 TYPE_NEXT_VARIANT (tmp) = NULL;
13558 TYPE_ALIAS_SET (tmp) = -1;
13559 if (TYPE_CACHED_VALUES_P (tmp))
13561 TYPE_CACHED_VALUES_P (tmp) = 0;
13562 TYPE_CACHED_VALUES (tmp) = NULL;
13565 else if (warning_suppressed_p (expr) && (DECL_P (expr) || EXPR_P (expr)))
13567 /* Allow the no-warning bit to be set. Perhaps we shouldn't allow
13568 that and change builtins.c etc. instead - see PR89543. */
13569 size_t sz = tree_size (expr);
13570 buf = XALLOCAVAR (union tree_node, sz);
13571 memcpy ((char *) buf, expr, sz);
13572 buf->base.nowarning_flag = 0;
13573 expr = (tree) buf;
13575 md5_process_bytes (expr, tree_size (expr), ctx);
13576 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13577 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13578 if (TREE_CODE_CLASS (code) != tcc_type
13579 && TREE_CODE_CLASS (code) != tcc_declaration
13580 && code != TREE_LIST
13581 && code != SSA_NAME
13582 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13583 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13584 switch (TREE_CODE_CLASS (code))
13586 case tcc_constant:
13587 switch (code)
13589 case STRING_CST:
13590 md5_process_bytes (TREE_STRING_POINTER (expr),
13591 TREE_STRING_LENGTH (expr), ctx);
13592 break;
13593 case COMPLEX_CST:
13594 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13595 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13596 break;
13597 case VECTOR_CST:
13598 len = vector_cst_encoded_nelts (expr);
13599 for (i = 0; i < len; ++i)
13600 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13601 break;
13602 default:
13603 break;
13605 break;
13606 case tcc_exceptional:
13607 switch (code)
13609 case TREE_LIST:
13610 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13611 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13612 expr = TREE_CHAIN (expr);
13613 goto recursive_label;
13614 break;
13615 case TREE_VEC:
13616 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13617 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13618 break;
13619 default:
13620 break;
13622 break;
13623 case tcc_expression:
13624 case tcc_reference:
13625 case tcc_comparison:
13626 case tcc_unary:
13627 case tcc_binary:
13628 case tcc_statement:
13629 case tcc_vl_exp:
13630 len = TREE_OPERAND_LENGTH (expr);
13631 for (i = 0; i < len; ++i)
13632 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13633 break;
13634 case tcc_declaration:
13635 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13636 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13637 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13639 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13640 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13641 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13642 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13643 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13646 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13648 if (TREE_CODE (expr) == FUNCTION_DECL)
13650 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13651 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13653 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13655 break;
13656 case tcc_type:
13657 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13658 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13659 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13660 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13661 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13662 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13663 if (INTEGRAL_TYPE_P (expr)
13664 || SCALAR_FLOAT_TYPE_P (expr))
13666 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13667 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13669 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13670 if (TREE_CODE (expr) == RECORD_TYPE
13671 || TREE_CODE (expr) == UNION_TYPE
13672 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13673 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13674 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13675 break;
13676 default:
13677 break;
13681 /* Helper function for outputting the checksum of a tree T. When
13682 debugging with gdb, you can "define mynext" to be "next" followed
13683 by "call debug_fold_checksum (op0)", then just trace down till the
13684 outputs differ. */
13686 DEBUG_FUNCTION void
13687 debug_fold_checksum (const_tree t)
13689 int i;
13690 unsigned char checksum[16];
13691 struct md5_ctx ctx;
13692 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13694 md5_init_ctx (&ctx);
13695 fold_checksum_tree (t, &ctx, &ht);
13696 md5_finish_ctx (&ctx, checksum);
13697 ht.empty ();
13699 for (i = 0; i < 16; i++)
13700 fprintf (stderr, "%d ", checksum[i]);
13702 fprintf (stderr, "\n");
13705 #endif
13707 /* Fold a unary tree expression with code CODE of type TYPE with an
13708 operand OP0. LOC is the location of the resulting expression.
13709 Return a folded expression if successful. Otherwise, return a tree
13710 expression with code CODE of type TYPE with an operand OP0. */
13712 tree
13713 fold_build1_loc (location_t loc,
13714 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13716 tree tem;
13717 #ifdef ENABLE_FOLD_CHECKING
13718 unsigned char checksum_before[16], checksum_after[16];
13719 struct md5_ctx ctx;
13720 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13722 md5_init_ctx (&ctx);
13723 fold_checksum_tree (op0, &ctx, &ht);
13724 md5_finish_ctx (&ctx, checksum_before);
13725 ht.empty ();
13726 #endif
13728 tem = fold_unary_loc (loc, code, type, op0);
13729 if (!tem)
13730 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13732 #ifdef ENABLE_FOLD_CHECKING
13733 md5_init_ctx (&ctx);
13734 fold_checksum_tree (op0, &ctx, &ht);
13735 md5_finish_ctx (&ctx, checksum_after);
13737 if (memcmp (checksum_before, checksum_after, 16))
13738 fold_check_failed (op0, tem);
13739 #endif
13740 return tem;
13743 /* Fold a binary tree expression with code CODE of type TYPE with
13744 operands OP0 and OP1. LOC is the location of the resulting
13745 expression. Return a folded expression if successful. Otherwise,
13746 return a tree expression with code CODE of type TYPE with operands
13747 OP0 and OP1. */
13749 tree
13750 fold_build2_loc (location_t loc,
13751 enum tree_code code, tree type, tree op0, tree op1
13752 MEM_STAT_DECL)
13754 tree tem;
13755 #ifdef ENABLE_FOLD_CHECKING
13756 unsigned char checksum_before_op0[16],
13757 checksum_before_op1[16],
13758 checksum_after_op0[16],
13759 checksum_after_op1[16];
13760 struct md5_ctx ctx;
13761 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13763 md5_init_ctx (&ctx);
13764 fold_checksum_tree (op0, &ctx, &ht);
13765 md5_finish_ctx (&ctx, checksum_before_op0);
13766 ht.empty ();
13768 md5_init_ctx (&ctx);
13769 fold_checksum_tree (op1, &ctx, &ht);
13770 md5_finish_ctx (&ctx, checksum_before_op1);
13771 ht.empty ();
13772 #endif
13774 tem = fold_binary_loc (loc, code, type, op0, op1);
13775 if (!tem)
13776 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13778 #ifdef ENABLE_FOLD_CHECKING
13779 md5_init_ctx (&ctx);
13780 fold_checksum_tree (op0, &ctx, &ht);
13781 md5_finish_ctx (&ctx, checksum_after_op0);
13782 ht.empty ();
13784 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13785 fold_check_failed (op0, tem);
13787 md5_init_ctx (&ctx);
13788 fold_checksum_tree (op1, &ctx, &ht);
13789 md5_finish_ctx (&ctx, checksum_after_op1);
13791 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13792 fold_check_failed (op1, tem);
13793 #endif
13794 return tem;
13797 /* Fold a ternary tree expression with code CODE of type TYPE with
13798 operands OP0, OP1, and OP2. Return a folded expression if
13799 successful. Otherwise, return a tree expression with code CODE of
13800 type TYPE with operands OP0, OP1, and OP2. */
13802 tree
13803 fold_build3_loc (location_t loc, enum tree_code code, tree type,
13804 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13806 tree tem;
13807 #ifdef ENABLE_FOLD_CHECKING
13808 unsigned char checksum_before_op0[16],
13809 checksum_before_op1[16],
13810 checksum_before_op2[16],
13811 checksum_after_op0[16],
13812 checksum_after_op1[16],
13813 checksum_after_op2[16];
13814 struct md5_ctx ctx;
13815 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13817 md5_init_ctx (&ctx);
13818 fold_checksum_tree (op0, &ctx, &ht);
13819 md5_finish_ctx (&ctx, checksum_before_op0);
13820 ht.empty ();
13822 md5_init_ctx (&ctx);
13823 fold_checksum_tree (op1, &ctx, &ht);
13824 md5_finish_ctx (&ctx, checksum_before_op1);
13825 ht.empty ();
13827 md5_init_ctx (&ctx);
13828 fold_checksum_tree (op2, &ctx, &ht);
13829 md5_finish_ctx (&ctx, checksum_before_op2);
13830 ht.empty ();
13831 #endif
13833 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13834 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13835 if (!tem)
13836 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13838 #ifdef ENABLE_FOLD_CHECKING
13839 md5_init_ctx (&ctx);
13840 fold_checksum_tree (op0, &ctx, &ht);
13841 md5_finish_ctx (&ctx, checksum_after_op0);
13842 ht.empty ();
13844 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13845 fold_check_failed (op0, tem);
13847 md5_init_ctx (&ctx);
13848 fold_checksum_tree (op1, &ctx, &ht);
13849 md5_finish_ctx (&ctx, checksum_after_op1);
13850 ht.empty ();
13852 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13853 fold_check_failed (op1, tem);
13855 md5_init_ctx (&ctx);
13856 fold_checksum_tree (op2, &ctx, &ht);
13857 md5_finish_ctx (&ctx, checksum_after_op2);
13859 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13860 fold_check_failed (op2, tem);
13861 #endif
13862 return tem;
13865 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13866 arguments in ARGARRAY, and a null static chain.
13867 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13868 of type TYPE from the given operands as constructed by build_call_array. */
13870 tree
13871 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13872 int nargs, tree *argarray)
13874 tree tem;
13875 #ifdef ENABLE_FOLD_CHECKING
13876 unsigned char checksum_before_fn[16],
13877 checksum_before_arglist[16],
13878 checksum_after_fn[16],
13879 checksum_after_arglist[16];
13880 struct md5_ctx ctx;
13881 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13882 int i;
13884 md5_init_ctx (&ctx);
13885 fold_checksum_tree (fn, &ctx, &ht);
13886 md5_finish_ctx (&ctx, checksum_before_fn);
13887 ht.empty ();
13889 md5_init_ctx (&ctx);
13890 for (i = 0; i < nargs; i++)
13891 fold_checksum_tree (argarray[i], &ctx, &ht);
13892 md5_finish_ctx (&ctx, checksum_before_arglist);
13893 ht.empty ();
13894 #endif
13896 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13897 if (!tem)
13898 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13900 #ifdef ENABLE_FOLD_CHECKING
13901 md5_init_ctx (&ctx);
13902 fold_checksum_tree (fn, &ctx, &ht);
13903 md5_finish_ctx (&ctx, checksum_after_fn);
13904 ht.empty ();
13906 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13907 fold_check_failed (fn, tem);
13909 md5_init_ctx (&ctx);
13910 for (i = 0; i < nargs; i++)
13911 fold_checksum_tree (argarray[i], &ctx, &ht);
13912 md5_finish_ctx (&ctx, checksum_after_arglist);
13914 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13915 fold_check_failed (NULL_TREE, tem);
13916 #endif
13917 return tem;
13920 /* Perform constant folding and related simplification of initializer
13921 expression EXPR. These behave identically to "fold_buildN" but ignore
13922 potential run-time traps and exceptions that fold must preserve. */
13924 #define START_FOLD_INIT \
13925 int saved_signaling_nans = flag_signaling_nans;\
13926 int saved_trapping_math = flag_trapping_math;\
13927 int saved_rounding_math = flag_rounding_math;\
13928 int saved_trapv = flag_trapv;\
13929 int saved_folding_initializer = folding_initializer;\
13930 flag_signaling_nans = 0;\
13931 flag_trapping_math = 0;\
13932 flag_rounding_math = 0;\
13933 flag_trapv = 0;\
13934 folding_initializer = 1;
13936 #define END_FOLD_INIT \
13937 flag_signaling_nans = saved_signaling_nans;\
13938 flag_trapping_math = saved_trapping_math;\
13939 flag_rounding_math = saved_rounding_math;\
13940 flag_trapv = saved_trapv;\
13941 folding_initializer = saved_folding_initializer;
13943 tree
13944 fold_init (tree expr)
13946 tree result;
13947 START_FOLD_INIT;
13949 result = fold (expr);
13951 END_FOLD_INIT;
13952 return result;
13955 tree
13956 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13957 tree type, tree op)
13959 tree result;
13960 START_FOLD_INIT;
13962 result = fold_build1_loc (loc, code, type, op);
13964 END_FOLD_INIT;
13965 return result;
13968 tree
13969 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13970 tree type, tree op0, tree op1)
13972 tree result;
13973 START_FOLD_INIT;
13975 result = fold_build2_loc (loc, code, type, op0, op1);
13977 END_FOLD_INIT;
13978 return result;
13981 tree
13982 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
13983 int nargs, tree *argarray)
13985 tree result;
13986 START_FOLD_INIT;
13988 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
13990 END_FOLD_INIT;
13991 return result;
13994 #undef START_FOLD_INIT
13995 #undef END_FOLD_INIT
13997 /* Determine if first argument is a multiple of second argument. Return 0 if
13998 it is not, or we cannot easily determined it to be.
14000 An example of the sort of thing we care about (at this point; this routine
14001 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14002 fold cases do now) is discovering that
14004 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14006 is a multiple of
14008 SAVE_EXPR (J * 8)
14010 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14012 This code also handles discovering that
14014 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14016 is a multiple of 8 so we don't have to worry about dealing with a
14017 possible remainder.
14019 Note that we *look* inside a SAVE_EXPR only to determine how it was
14020 calculated; it is not safe for fold to do much of anything else with the
14021 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14022 at run time. For example, the latter example above *cannot* be implemented
14023 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14024 evaluation time of the original SAVE_EXPR is not necessarily the same at
14025 the time the new expression is evaluated. The only optimization of this
14026 sort that would be valid is changing
14028 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14030 divided by 8 to
14032 SAVE_EXPR (I) * SAVE_EXPR (J)
14034 (where the same SAVE_EXPR (J) is used in the original and the
14035 transformed version). */
14038 multiple_of_p (tree type, const_tree top, const_tree bottom)
14040 gimple *stmt;
14041 tree t1, op1, op2;
14043 if (operand_equal_p (top, bottom, 0))
14044 return 1;
14046 if (TREE_CODE (type) != INTEGER_TYPE)
14047 return 0;
14049 switch (TREE_CODE (top))
14051 case BIT_AND_EXPR:
14052 /* Bitwise and provides a power of two multiple. If the mask is
14053 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14054 if (!integer_pow2p (bottom))
14055 return 0;
14056 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14057 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
14059 case MULT_EXPR:
14060 if (TREE_CODE (bottom) == INTEGER_CST)
14062 op1 = TREE_OPERAND (top, 0);
14063 op2 = TREE_OPERAND (top, 1);
14064 if (TREE_CODE (op1) == INTEGER_CST)
14065 std::swap (op1, op2);
14066 if (TREE_CODE (op2) == INTEGER_CST)
14068 if (multiple_of_p (type, op2, bottom))
14069 return 1;
14070 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
14071 if (multiple_of_p (type, bottom, op2))
14073 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
14074 wi::to_widest (op2));
14075 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
14077 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
14078 return multiple_of_p (type, op1, op2);
14081 return multiple_of_p (type, op1, bottom);
14084 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14085 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
14087 case MINUS_EXPR:
14088 /* It is impossible to prove if op0 - op1 is multiple of bottom
14089 precisely, so be conservative here checking if both op0 and op1
14090 are multiple of bottom. Note we check the second operand first
14091 since it's usually simpler. */
14092 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14093 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
14095 case PLUS_EXPR:
14096 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
14097 as op0 - 3 if the expression has unsigned type. For example,
14098 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
14099 op1 = TREE_OPERAND (top, 1);
14100 if (TYPE_UNSIGNED (type)
14101 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
14102 op1 = fold_build1 (NEGATE_EXPR, type, op1);
14103 return (multiple_of_p (type, op1, bottom)
14104 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
14106 case LSHIFT_EXPR:
14107 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14109 op1 = TREE_OPERAND (top, 1);
14110 /* const_binop may not detect overflow correctly,
14111 so check for it explicitly here. */
14112 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
14113 wi::to_wide (op1))
14114 && (t1 = fold_convert (type,
14115 const_binop (LSHIFT_EXPR, size_one_node,
14116 op1))) != 0
14117 && !TREE_OVERFLOW (t1))
14118 return multiple_of_p (type, t1, bottom);
14120 return 0;
14122 case NOP_EXPR:
14123 /* Can't handle conversions from non-integral or wider integral type. */
14124 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14125 || (TYPE_PRECISION (type)
14126 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14127 return 0;
14129 /* fall through */
14131 case SAVE_EXPR:
14132 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14134 case COND_EXPR:
14135 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14136 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14138 case INTEGER_CST:
14139 if (TREE_CODE (bottom) != INTEGER_CST
14140 || integer_zerop (bottom)
14141 || (TYPE_UNSIGNED (type)
14142 && (tree_int_cst_sgn (top) < 0
14143 || tree_int_cst_sgn (bottom) < 0)))
14144 return 0;
14145 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14146 SIGNED);
14148 case SSA_NAME:
14149 if (TREE_CODE (bottom) == INTEGER_CST
14150 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
14151 && gimple_code (stmt) == GIMPLE_ASSIGN)
14153 enum tree_code code = gimple_assign_rhs_code (stmt);
14155 /* Check for special cases to see if top is defined as multiple
14156 of bottom:
14158 top = (X & ~(bottom - 1) ; bottom is power of 2
14162 Y = X % bottom
14163 top = X - Y. */
14164 if (code == BIT_AND_EXPR
14165 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14166 && TREE_CODE (op2) == INTEGER_CST
14167 && integer_pow2p (bottom)
14168 && wi::multiple_of_p (wi::to_widest (op2),
14169 wi::to_widest (bottom), UNSIGNED))
14170 return 1;
14172 op1 = gimple_assign_rhs1 (stmt);
14173 if (code == MINUS_EXPR
14174 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14175 && TREE_CODE (op2) == SSA_NAME
14176 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
14177 && gimple_code (stmt) == GIMPLE_ASSIGN
14178 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
14179 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
14180 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
14181 return 1;
14184 /* fall through */
14186 default:
14187 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
14188 return multiple_p (wi::to_poly_widest (top),
14189 wi::to_poly_widest (bottom));
14191 return 0;
14195 /* Return true if expression X cannot be (or contain) a NaN or infinity.
14196 This function returns true for integer expressions, and returns
14197 false if uncertain. */
14199 bool
14200 tree_expr_finite_p (const_tree x)
14202 machine_mode mode = element_mode (x);
14203 if (!HONOR_NANS (mode) && !HONOR_INFINITIES (mode))
14204 return true;
14205 switch (TREE_CODE (x))
14207 case REAL_CST:
14208 return real_isfinite (TREE_REAL_CST_PTR (x));
14209 case COMPLEX_CST:
14210 return tree_expr_finite_p (TREE_REALPART (x))
14211 && tree_expr_finite_p (TREE_IMAGPART (x));
14212 case FLOAT_EXPR:
14213 return true;
14214 case ABS_EXPR:
14215 case CONVERT_EXPR:
14216 case NON_LVALUE_EXPR:
14217 case NEGATE_EXPR:
14218 case SAVE_EXPR:
14219 return tree_expr_finite_p (TREE_OPERAND (x, 0));
14220 case MIN_EXPR:
14221 case MAX_EXPR:
14222 return tree_expr_finite_p (TREE_OPERAND (x, 0))
14223 && tree_expr_finite_p (TREE_OPERAND (x, 1));
14224 case COND_EXPR:
14225 return tree_expr_finite_p (TREE_OPERAND (x, 1))
14226 && tree_expr_finite_p (TREE_OPERAND (x, 2));
14227 case CALL_EXPR:
14228 switch (get_call_combined_fn (x))
14230 CASE_CFN_FABS:
14231 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0));
14232 CASE_CFN_FMAX:
14233 CASE_CFN_FMIN:
14234 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0))
14235 && tree_expr_finite_p (CALL_EXPR_ARG (x, 1));
14236 default:
14237 return false;
14240 default:
14241 return false;
14245 /* Return true if expression X evaluates to an infinity.
14246 This function returns false for integer expressions. */
14248 bool
14249 tree_expr_infinite_p (const_tree x)
14251 if (!HONOR_INFINITIES (x))
14252 return false;
14253 switch (TREE_CODE (x))
14255 case REAL_CST:
14256 return real_isinf (TREE_REAL_CST_PTR (x));
14257 case ABS_EXPR:
14258 case NEGATE_EXPR:
14259 case NON_LVALUE_EXPR:
14260 case SAVE_EXPR:
14261 return tree_expr_infinite_p (TREE_OPERAND (x, 0));
14262 case COND_EXPR:
14263 return tree_expr_infinite_p (TREE_OPERAND (x, 1))
14264 && tree_expr_infinite_p (TREE_OPERAND (x, 2));
14265 default:
14266 return false;
14270 /* Return true if expression X could evaluate to an infinity.
14271 This function returns false for integer expressions, and returns
14272 true if uncertain. */
14274 bool
14275 tree_expr_maybe_infinite_p (const_tree x)
14277 if (!HONOR_INFINITIES (x))
14278 return false;
14279 switch (TREE_CODE (x))
14281 case REAL_CST:
14282 return real_isinf (TREE_REAL_CST_PTR (x));
14283 case FLOAT_EXPR:
14284 return false;
14285 case ABS_EXPR:
14286 case NEGATE_EXPR:
14287 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 0));
14288 case COND_EXPR:
14289 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 1))
14290 || tree_expr_maybe_infinite_p (TREE_OPERAND (x, 2));
14291 default:
14292 return true;
14296 /* Return true if expression X evaluates to a signaling NaN.
14297 This function returns false for integer expressions. */
14299 bool
14300 tree_expr_signaling_nan_p (const_tree x)
14302 if (!HONOR_SNANS (x))
14303 return false;
14304 switch (TREE_CODE (x))
14306 case REAL_CST:
14307 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14308 case NON_LVALUE_EXPR:
14309 case SAVE_EXPR:
14310 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 0));
14311 case COND_EXPR:
14312 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 1))
14313 && tree_expr_signaling_nan_p (TREE_OPERAND (x, 2));
14314 default:
14315 return false;
14319 /* Return true if expression X could evaluate to a signaling NaN.
14320 This function returns false for integer expressions, and returns
14321 true if uncertain. */
14323 bool
14324 tree_expr_maybe_signaling_nan_p (const_tree x)
14326 if (!HONOR_SNANS (x))
14327 return false;
14328 switch (TREE_CODE (x))
14330 case REAL_CST:
14331 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14332 case FLOAT_EXPR:
14333 return false;
14334 case ABS_EXPR:
14335 case CONVERT_EXPR:
14336 case NEGATE_EXPR:
14337 case NON_LVALUE_EXPR:
14338 case SAVE_EXPR:
14339 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0));
14340 case MIN_EXPR:
14341 case MAX_EXPR:
14342 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0))
14343 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1));
14344 case COND_EXPR:
14345 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1))
14346 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 2));
14347 case CALL_EXPR:
14348 switch (get_call_combined_fn (x))
14350 CASE_CFN_FABS:
14351 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0));
14352 CASE_CFN_FMAX:
14353 CASE_CFN_FMIN:
14354 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0))
14355 || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 1));
14356 default:
14357 return true;
14359 default:
14360 return true;
14364 /* Return true if expression X evaluates to a NaN.
14365 This function returns false for integer expressions. */
14367 bool
14368 tree_expr_nan_p (const_tree x)
14370 if (!HONOR_NANS (x))
14371 return false;
14372 switch (TREE_CODE (x))
14374 case REAL_CST:
14375 return real_isnan (TREE_REAL_CST_PTR (x));
14376 case NON_LVALUE_EXPR:
14377 case SAVE_EXPR:
14378 return tree_expr_nan_p (TREE_OPERAND (x, 0));
14379 case COND_EXPR:
14380 return tree_expr_nan_p (TREE_OPERAND (x, 1))
14381 && tree_expr_nan_p (TREE_OPERAND (x, 2));
14382 default:
14383 return false;
14387 /* Return true if expression X could evaluate to a NaN.
14388 This function returns false for integer expressions, and returns
14389 true if uncertain. */
14391 bool
14392 tree_expr_maybe_nan_p (const_tree x)
14394 if (!HONOR_NANS (x))
14395 return false;
14396 switch (TREE_CODE (x))
14398 case REAL_CST:
14399 return real_isnan (TREE_REAL_CST_PTR (x));
14400 case FLOAT_EXPR:
14401 return false;
14402 case PLUS_EXPR:
14403 case MINUS_EXPR:
14404 case MULT_EXPR:
14405 return !tree_expr_finite_p (TREE_OPERAND (x, 0))
14406 || !tree_expr_finite_p (TREE_OPERAND (x, 1));
14407 case ABS_EXPR:
14408 case CONVERT_EXPR:
14409 case NEGATE_EXPR:
14410 case NON_LVALUE_EXPR:
14411 case SAVE_EXPR:
14412 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0));
14413 case MIN_EXPR:
14414 case MAX_EXPR:
14415 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0))
14416 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 1));
14417 case COND_EXPR:
14418 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 1))
14419 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 2));
14420 case CALL_EXPR:
14421 switch (get_call_combined_fn (x))
14423 CASE_CFN_FABS:
14424 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0));
14425 CASE_CFN_FMAX:
14426 CASE_CFN_FMIN:
14427 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0))
14428 || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 1));
14429 default:
14430 return true;
14432 default:
14433 return true;
14437 /* Return true if expression X could evaluate to -0.0.
14438 This function returns true if uncertain. */
14440 bool
14441 tree_expr_maybe_real_minus_zero_p (const_tree x)
14443 if (!HONOR_SIGNED_ZEROS (x))
14444 return false;
14445 switch (TREE_CODE (x))
14447 case REAL_CST:
14448 return REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (x));
14449 case INTEGER_CST:
14450 case FLOAT_EXPR:
14451 case ABS_EXPR:
14452 return false;
14453 case NON_LVALUE_EXPR:
14454 case SAVE_EXPR:
14455 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 0));
14456 case COND_EXPR:
14457 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 1))
14458 || tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 2));
14459 case CALL_EXPR:
14460 switch (get_call_combined_fn (x))
14462 CASE_CFN_FABS:
14463 return false;
14464 default:
14465 break;
14467 default:
14468 break;
14470 /* Ideally !(tree_expr_nonzero_p (X) || tree_expr_nonnegative_p (X))
14471 * but currently those predicates require tree and not const_tree. */
14472 return true;
14475 #define tree_expr_nonnegative_warnv_p(X, Y) \
14476 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14478 #define RECURSE(X) \
14479 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14481 /* Return true if CODE or TYPE is known to be non-negative. */
14483 static bool
14484 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14486 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14487 && truth_value_p (code))
14488 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14489 have a signed:1 type (where the value is -1 and 0). */
14490 return true;
14491 return false;
14494 /* Return true if (CODE OP0) is known to be non-negative. If the return
14495 value is based on the assumption that signed overflow is undefined,
14496 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14497 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14499 bool
14500 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14501 bool *strict_overflow_p, int depth)
14503 if (TYPE_UNSIGNED (type))
14504 return true;
14506 switch (code)
14508 case ABS_EXPR:
14509 /* We can't return 1 if flag_wrapv is set because
14510 ABS_EXPR<INT_MIN> = INT_MIN. */
14511 if (!ANY_INTEGRAL_TYPE_P (type))
14512 return true;
14513 if (TYPE_OVERFLOW_UNDEFINED (type))
14515 *strict_overflow_p = true;
14516 return true;
14518 break;
14520 case NON_LVALUE_EXPR:
14521 case FLOAT_EXPR:
14522 case FIX_TRUNC_EXPR:
14523 return RECURSE (op0);
14525 CASE_CONVERT:
14527 tree inner_type = TREE_TYPE (op0);
14528 tree outer_type = type;
14530 if (TREE_CODE (outer_type) == REAL_TYPE)
14532 if (TREE_CODE (inner_type) == REAL_TYPE)
14533 return RECURSE (op0);
14534 if (INTEGRAL_TYPE_P (inner_type))
14536 if (TYPE_UNSIGNED (inner_type))
14537 return true;
14538 return RECURSE (op0);
14541 else if (INTEGRAL_TYPE_P (outer_type))
14543 if (TREE_CODE (inner_type) == REAL_TYPE)
14544 return RECURSE (op0);
14545 if (INTEGRAL_TYPE_P (inner_type))
14546 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14547 && TYPE_UNSIGNED (inner_type);
14550 break;
14552 default:
14553 return tree_simple_nonnegative_warnv_p (code, type);
14556 /* We don't know sign of `t', so be conservative and return false. */
14557 return false;
14560 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14561 value is based on the assumption that signed overflow is undefined,
14562 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14563 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14565 bool
14566 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14567 tree op1, bool *strict_overflow_p,
14568 int depth)
14570 if (TYPE_UNSIGNED (type))
14571 return true;
14573 switch (code)
14575 case POINTER_PLUS_EXPR:
14576 case PLUS_EXPR:
14577 if (FLOAT_TYPE_P (type))
14578 return RECURSE (op0) && RECURSE (op1);
14580 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14581 both unsigned and at least 2 bits shorter than the result. */
14582 if (TREE_CODE (type) == INTEGER_TYPE
14583 && TREE_CODE (op0) == NOP_EXPR
14584 && TREE_CODE (op1) == NOP_EXPR)
14586 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14587 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14588 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14589 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14591 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14592 TYPE_PRECISION (inner2)) + 1;
14593 return prec < TYPE_PRECISION (type);
14596 break;
14598 case MULT_EXPR:
14599 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14601 /* x * x is always non-negative for floating point x
14602 or without overflow. */
14603 if (operand_equal_p (op0, op1, 0)
14604 || (RECURSE (op0) && RECURSE (op1)))
14606 if (ANY_INTEGRAL_TYPE_P (type)
14607 && TYPE_OVERFLOW_UNDEFINED (type))
14608 *strict_overflow_p = true;
14609 return true;
14613 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14614 both unsigned and their total bits is shorter than the result. */
14615 if (TREE_CODE (type) == INTEGER_TYPE
14616 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14617 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14619 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14620 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14621 : TREE_TYPE (op0);
14622 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14623 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14624 : TREE_TYPE (op1);
14626 bool unsigned0 = TYPE_UNSIGNED (inner0);
14627 bool unsigned1 = TYPE_UNSIGNED (inner1);
14629 if (TREE_CODE (op0) == INTEGER_CST)
14630 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14632 if (TREE_CODE (op1) == INTEGER_CST)
14633 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14635 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14636 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14638 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14639 ? tree_int_cst_min_precision (op0, UNSIGNED)
14640 : TYPE_PRECISION (inner0);
14642 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14643 ? tree_int_cst_min_precision (op1, UNSIGNED)
14644 : TYPE_PRECISION (inner1);
14646 return precision0 + precision1 < TYPE_PRECISION (type);
14649 return false;
14651 case BIT_AND_EXPR:
14652 return RECURSE (op0) || RECURSE (op1);
14654 case MAX_EXPR:
14655 /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
14656 things. */
14657 if (tree_expr_maybe_nan_p (op0) || tree_expr_maybe_nan_p (op1))
14658 return RECURSE (op0) && RECURSE (op1);
14659 return RECURSE (op0) || RECURSE (op1);
14661 case BIT_IOR_EXPR:
14662 case BIT_XOR_EXPR:
14663 case MIN_EXPR:
14664 case RDIV_EXPR:
14665 case TRUNC_DIV_EXPR:
14666 case CEIL_DIV_EXPR:
14667 case FLOOR_DIV_EXPR:
14668 case ROUND_DIV_EXPR:
14669 return RECURSE (op0) && RECURSE (op1);
14671 case TRUNC_MOD_EXPR:
14672 return RECURSE (op0);
14674 case FLOOR_MOD_EXPR:
14675 return RECURSE (op1);
14677 case CEIL_MOD_EXPR:
14678 case ROUND_MOD_EXPR:
14679 default:
14680 return tree_simple_nonnegative_warnv_p (code, type);
14683 /* We don't know sign of `t', so be conservative and return false. */
14684 return false;
14687 /* Return true if T is known to be non-negative. If the return
14688 value is based on the assumption that signed overflow is undefined,
14689 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14690 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14692 bool
14693 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14695 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14696 return true;
14698 switch (TREE_CODE (t))
14700 case INTEGER_CST:
14701 return tree_int_cst_sgn (t) >= 0;
14703 case REAL_CST:
14704 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14706 case FIXED_CST:
14707 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14709 case COND_EXPR:
14710 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14712 case SSA_NAME:
14713 /* Limit the depth of recursion to avoid quadratic behavior.
14714 This is expected to catch almost all occurrences in practice.
14715 If this code misses important cases that unbounded recursion
14716 would not, passes that need this information could be revised
14717 to provide it through dataflow propagation. */
14718 return (!name_registered_for_update_p (t)
14719 && depth < param_max_ssa_name_query_depth
14720 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
14721 strict_overflow_p, depth));
14723 default:
14724 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14728 /* Return true if T is known to be non-negative. If the return
14729 value is based on the assumption that signed overflow is undefined,
14730 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14731 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14733 bool
14734 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
14735 bool *strict_overflow_p, int depth)
14737 switch (fn)
14739 CASE_CFN_ACOS:
14740 CASE_CFN_ACOSH:
14741 CASE_CFN_CABS:
14742 CASE_CFN_COSH:
14743 CASE_CFN_ERFC:
14744 CASE_CFN_EXP:
14745 CASE_CFN_EXP10:
14746 CASE_CFN_EXP2:
14747 CASE_CFN_FABS:
14748 CASE_CFN_FDIM:
14749 CASE_CFN_HYPOT:
14750 CASE_CFN_POW10:
14751 CASE_CFN_FFS:
14752 CASE_CFN_PARITY:
14753 CASE_CFN_POPCOUNT:
14754 CASE_CFN_CLZ:
14755 CASE_CFN_CLRSB:
14756 case CFN_BUILT_IN_BSWAP16:
14757 case CFN_BUILT_IN_BSWAP32:
14758 case CFN_BUILT_IN_BSWAP64:
14759 case CFN_BUILT_IN_BSWAP128:
14760 /* Always true. */
14761 return true;
14763 CASE_CFN_SQRT:
14764 CASE_CFN_SQRT_FN:
14765 /* sqrt(-0.0) is -0.0. */
14766 if (!HONOR_SIGNED_ZEROS (type))
14767 return true;
14768 return RECURSE (arg0);
14770 CASE_CFN_ASINH:
14771 CASE_CFN_ATAN:
14772 CASE_CFN_ATANH:
14773 CASE_CFN_CBRT:
14774 CASE_CFN_CEIL:
14775 CASE_CFN_CEIL_FN:
14776 CASE_CFN_ERF:
14777 CASE_CFN_EXPM1:
14778 CASE_CFN_FLOOR:
14779 CASE_CFN_FLOOR_FN:
14780 CASE_CFN_FMOD:
14781 CASE_CFN_FREXP:
14782 CASE_CFN_ICEIL:
14783 CASE_CFN_IFLOOR:
14784 CASE_CFN_IRINT:
14785 CASE_CFN_IROUND:
14786 CASE_CFN_LCEIL:
14787 CASE_CFN_LDEXP:
14788 CASE_CFN_LFLOOR:
14789 CASE_CFN_LLCEIL:
14790 CASE_CFN_LLFLOOR:
14791 CASE_CFN_LLRINT:
14792 CASE_CFN_LLROUND:
14793 CASE_CFN_LRINT:
14794 CASE_CFN_LROUND:
14795 CASE_CFN_MODF:
14796 CASE_CFN_NEARBYINT:
14797 CASE_CFN_NEARBYINT_FN:
14798 CASE_CFN_RINT:
14799 CASE_CFN_RINT_FN:
14800 CASE_CFN_ROUND:
14801 CASE_CFN_ROUND_FN:
14802 CASE_CFN_ROUNDEVEN:
14803 CASE_CFN_ROUNDEVEN_FN:
14804 CASE_CFN_SCALB:
14805 CASE_CFN_SCALBLN:
14806 CASE_CFN_SCALBN:
14807 CASE_CFN_SIGNBIT:
14808 CASE_CFN_SIGNIFICAND:
14809 CASE_CFN_SINH:
14810 CASE_CFN_TANH:
14811 CASE_CFN_TRUNC:
14812 CASE_CFN_TRUNC_FN:
14813 /* True if the 1st argument is nonnegative. */
14814 return RECURSE (arg0);
14816 CASE_CFN_FMAX:
14817 CASE_CFN_FMAX_FN:
14818 /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
14819 things. In the presence of sNaNs, we're only guaranteed to be
14820 non-negative if both operands are non-negative. In the presence
14821 of qNaNs, we're non-negative if either operand is non-negative
14822 and can't be a qNaN, or if both operands are non-negative. */
14823 if (tree_expr_maybe_signaling_nan_p (arg0) ||
14824 tree_expr_maybe_signaling_nan_p (arg1))
14825 return RECURSE (arg0) && RECURSE (arg1);
14826 return RECURSE (arg0) ? (!tree_expr_maybe_nan_p (arg0)
14827 || RECURSE (arg1))
14828 : (RECURSE (arg1)
14829 && !tree_expr_maybe_nan_p (arg1));
14831 CASE_CFN_FMIN:
14832 CASE_CFN_FMIN_FN:
14833 /* True if the 1st AND 2nd arguments are nonnegative. */
14834 return RECURSE (arg0) && RECURSE (arg1);
14836 CASE_CFN_COPYSIGN:
14837 CASE_CFN_COPYSIGN_FN:
14838 /* True if the 2nd argument is nonnegative. */
14839 return RECURSE (arg1);
14841 CASE_CFN_POWI:
14842 /* True if the 1st argument is nonnegative or the second
14843 argument is an even integer. */
14844 if (TREE_CODE (arg1) == INTEGER_CST
14845 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14846 return true;
14847 return RECURSE (arg0);
14849 CASE_CFN_POW:
14850 /* True if the 1st argument is nonnegative or the second
14851 argument is an even integer valued real. */
14852 if (TREE_CODE (arg1) == REAL_CST)
14854 REAL_VALUE_TYPE c;
14855 HOST_WIDE_INT n;
14857 c = TREE_REAL_CST (arg1);
14858 n = real_to_integer (&c);
14859 if ((n & 1) == 0)
14861 REAL_VALUE_TYPE cint;
14862 real_from_integer (&cint, VOIDmode, n, SIGNED);
14863 if (real_identical (&c, &cint))
14864 return true;
14867 return RECURSE (arg0);
14869 default:
14870 break;
14872 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
14875 /* Return true if T is known to be non-negative. If the return
14876 value is based on the assumption that signed overflow is undefined,
14877 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14878 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14880 static bool
14881 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14883 enum tree_code code = TREE_CODE (t);
14884 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14885 return true;
14887 switch (code)
14889 case TARGET_EXPR:
14891 tree temp = TARGET_EXPR_SLOT (t);
14892 t = TARGET_EXPR_INITIAL (t);
14894 /* If the initializer is non-void, then it's a normal expression
14895 that will be assigned to the slot. */
14896 if (!VOID_TYPE_P (t))
14897 return RECURSE (t);
14899 /* Otherwise, the initializer sets the slot in some way. One common
14900 way is an assignment statement at the end of the initializer. */
14901 while (1)
14903 if (TREE_CODE (t) == BIND_EXPR)
14904 t = expr_last (BIND_EXPR_BODY (t));
14905 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14906 || TREE_CODE (t) == TRY_CATCH_EXPR)
14907 t = expr_last (TREE_OPERAND (t, 0));
14908 else if (TREE_CODE (t) == STATEMENT_LIST)
14909 t = expr_last (t);
14910 else
14911 break;
14913 if (TREE_CODE (t) == MODIFY_EXPR
14914 && TREE_OPERAND (t, 0) == temp)
14915 return RECURSE (TREE_OPERAND (t, 1));
14917 return false;
14920 case CALL_EXPR:
14922 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14923 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14925 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14926 get_call_combined_fn (t),
14927 arg0,
14928 arg1,
14929 strict_overflow_p, depth);
14931 case COMPOUND_EXPR:
14932 case MODIFY_EXPR:
14933 return RECURSE (TREE_OPERAND (t, 1));
14935 case BIND_EXPR:
14936 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
14938 case SAVE_EXPR:
14939 return RECURSE (TREE_OPERAND (t, 0));
14941 default:
14942 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14946 #undef RECURSE
14947 #undef tree_expr_nonnegative_warnv_p
14949 /* Return true if T is known to be non-negative. If the return
14950 value is based on the assumption that signed overflow is undefined,
14951 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14952 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14954 bool
14955 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14957 enum tree_code code;
14958 if (t == error_mark_node)
14959 return false;
14961 code = TREE_CODE (t);
14962 switch (TREE_CODE_CLASS (code))
14964 case tcc_binary:
14965 case tcc_comparison:
14966 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14967 TREE_TYPE (t),
14968 TREE_OPERAND (t, 0),
14969 TREE_OPERAND (t, 1),
14970 strict_overflow_p, depth);
14972 case tcc_unary:
14973 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14974 TREE_TYPE (t),
14975 TREE_OPERAND (t, 0),
14976 strict_overflow_p, depth);
14978 case tcc_constant:
14979 case tcc_declaration:
14980 case tcc_reference:
14981 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14983 default:
14984 break;
14987 switch (code)
14989 case TRUTH_AND_EXPR:
14990 case TRUTH_OR_EXPR:
14991 case TRUTH_XOR_EXPR:
14992 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14993 TREE_TYPE (t),
14994 TREE_OPERAND (t, 0),
14995 TREE_OPERAND (t, 1),
14996 strict_overflow_p, depth);
14997 case TRUTH_NOT_EXPR:
14998 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14999 TREE_TYPE (t),
15000 TREE_OPERAND (t, 0),
15001 strict_overflow_p, depth);
15003 case COND_EXPR:
15004 case CONSTRUCTOR:
15005 case OBJ_TYPE_REF:
15006 case ASSERT_EXPR:
15007 case ADDR_EXPR:
15008 case WITH_SIZE_EXPR:
15009 case SSA_NAME:
15010 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15012 default:
15013 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
15017 /* Return true if `t' is known to be non-negative. Handle warnings
15018 about undefined signed overflow. */
15020 bool
15021 tree_expr_nonnegative_p (tree t)
15023 bool ret, strict_overflow_p;
15025 strict_overflow_p = false;
15026 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15027 if (strict_overflow_p)
15028 fold_overflow_warning (("assuming signed overflow does not occur when "
15029 "determining that expression is always "
15030 "non-negative"),
15031 WARN_STRICT_OVERFLOW_MISC);
15032 return ret;
15036 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15037 For floating point we further ensure that T is not denormal.
15038 Similar logic is present in nonzero_address in rtlanal.h.
15040 If the return value is based on the assumption that signed overflow
15041 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15042 change *STRICT_OVERFLOW_P. */
15044 bool
15045 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15046 bool *strict_overflow_p)
15048 switch (code)
15050 case ABS_EXPR:
15051 return tree_expr_nonzero_warnv_p (op0,
15052 strict_overflow_p);
15054 case NOP_EXPR:
15056 tree inner_type = TREE_TYPE (op0);
15057 tree outer_type = type;
15059 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15060 && tree_expr_nonzero_warnv_p (op0,
15061 strict_overflow_p));
15063 break;
15065 case NON_LVALUE_EXPR:
15066 return tree_expr_nonzero_warnv_p (op0,
15067 strict_overflow_p);
15069 default:
15070 break;
15073 return false;
15076 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15077 For floating point we further ensure that T is not denormal.
15078 Similar logic is present in nonzero_address in rtlanal.h.
15080 If the return value is based on the assumption that signed overflow
15081 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15082 change *STRICT_OVERFLOW_P. */
15084 bool
15085 tree_binary_nonzero_warnv_p (enum tree_code code,
15086 tree type,
15087 tree op0,
15088 tree op1, bool *strict_overflow_p)
15090 bool sub_strict_overflow_p;
15091 switch (code)
15093 case POINTER_PLUS_EXPR:
15094 case PLUS_EXPR:
15095 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15097 /* With the presence of negative values it is hard
15098 to say something. */
15099 sub_strict_overflow_p = false;
15100 if (!tree_expr_nonnegative_warnv_p (op0,
15101 &sub_strict_overflow_p)
15102 || !tree_expr_nonnegative_warnv_p (op1,
15103 &sub_strict_overflow_p))
15104 return false;
15105 /* One of operands must be positive and the other non-negative. */
15106 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15107 overflows, on a twos-complement machine the sum of two
15108 nonnegative numbers can never be zero. */
15109 return (tree_expr_nonzero_warnv_p (op0,
15110 strict_overflow_p)
15111 || tree_expr_nonzero_warnv_p (op1,
15112 strict_overflow_p));
15114 break;
15116 case MULT_EXPR:
15117 if (TYPE_OVERFLOW_UNDEFINED (type))
15119 if (tree_expr_nonzero_warnv_p (op0,
15120 strict_overflow_p)
15121 && tree_expr_nonzero_warnv_p (op1,
15122 strict_overflow_p))
15124 *strict_overflow_p = true;
15125 return true;
15128 break;
15130 case MIN_EXPR:
15131 sub_strict_overflow_p = false;
15132 if (tree_expr_nonzero_warnv_p (op0,
15133 &sub_strict_overflow_p)
15134 && tree_expr_nonzero_warnv_p (op1,
15135 &sub_strict_overflow_p))
15137 if (sub_strict_overflow_p)
15138 *strict_overflow_p = true;
15140 break;
15142 case MAX_EXPR:
15143 sub_strict_overflow_p = false;
15144 if (tree_expr_nonzero_warnv_p (op0,
15145 &sub_strict_overflow_p))
15147 if (sub_strict_overflow_p)
15148 *strict_overflow_p = true;
15150 /* When both operands are nonzero, then MAX must be too. */
15151 if (tree_expr_nonzero_warnv_p (op1,
15152 strict_overflow_p))
15153 return true;
15155 /* MAX where operand 0 is positive is positive. */
15156 return tree_expr_nonnegative_warnv_p (op0,
15157 strict_overflow_p);
15159 /* MAX where operand 1 is positive is positive. */
15160 else if (tree_expr_nonzero_warnv_p (op1,
15161 &sub_strict_overflow_p)
15162 && tree_expr_nonnegative_warnv_p (op1,
15163 &sub_strict_overflow_p))
15165 if (sub_strict_overflow_p)
15166 *strict_overflow_p = true;
15167 return true;
15169 break;
15171 case BIT_IOR_EXPR:
15172 return (tree_expr_nonzero_warnv_p (op1,
15173 strict_overflow_p)
15174 || tree_expr_nonzero_warnv_p (op0,
15175 strict_overflow_p));
15177 default:
15178 break;
15181 return false;
15184 /* Return true when T is an address and is known to be nonzero.
15185 For floating point we further ensure that T is not denormal.
15186 Similar logic is present in nonzero_address in rtlanal.h.
15188 If the return value is based on the assumption that signed overflow
15189 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15190 change *STRICT_OVERFLOW_P. */
15192 bool
15193 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15195 bool sub_strict_overflow_p;
15196 switch (TREE_CODE (t))
15198 case INTEGER_CST:
15199 return !integer_zerop (t);
15201 case ADDR_EXPR:
15203 tree base = TREE_OPERAND (t, 0);
15205 if (!DECL_P (base))
15206 base = get_base_address (base);
15208 if (base && TREE_CODE (base) == TARGET_EXPR)
15209 base = TARGET_EXPR_SLOT (base);
15211 if (!base)
15212 return false;
15214 /* For objects in symbol table check if we know they are non-zero.
15215 Don't do anything for variables and functions before symtab is built;
15216 it is quite possible that they will be declared weak later. */
15217 int nonzero_addr = maybe_nonzero_address (base);
15218 if (nonzero_addr >= 0)
15219 return nonzero_addr;
15221 /* Constants are never weak. */
15222 if (CONSTANT_CLASS_P (base))
15223 return true;
15225 return false;
15228 case COND_EXPR:
15229 sub_strict_overflow_p = false;
15230 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15231 &sub_strict_overflow_p)
15232 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15233 &sub_strict_overflow_p))
15235 if (sub_strict_overflow_p)
15236 *strict_overflow_p = true;
15237 return true;
15239 break;
15241 case SSA_NAME:
15242 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
15243 break;
15244 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
15246 default:
15247 break;
15249 return false;
15252 #define integer_valued_real_p(X) \
15253 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15255 #define RECURSE(X) \
15256 ((integer_valued_real_p) (X, depth + 1))
15258 /* Return true if the floating point result of (CODE OP0) has an
15259 integer value. We also allow +Inf, -Inf and NaN to be considered
15260 integer values. Return false for signaling NaN.
15262 DEPTH is the current nesting depth of the query. */
15264 bool
15265 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
15267 switch (code)
15269 case FLOAT_EXPR:
15270 return true;
15272 case ABS_EXPR:
15273 return RECURSE (op0);
15275 CASE_CONVERT:
15277 tree type = TREE_TYPE (op0);
15278 if (TREE_CODE (type) == INTEGER_TYPE)
15279 return true;
15280 if (TREE_CODE (type) == REAL_TYPE)
15281 return RECURSE (op0);
15282 break;
15285 default:
15286 break;
15288 return false;
15291 /* Return true if the floating point result of (CODE OP0 OP1) has an
15292 integer value. We also allow +Inf, -Inf and NaN to be considered
15293 integer values. Return false for signaling NaN.
15295 DEPTH is the current nesting depth of the query. */
15297 bool
15298 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
15300 switch (code)
15302 case PLUS_EXPR:
15303 case MINUS_EXPR:
15304 case MULT_EXPR:
15305 case MIN_EXPR:
15306 case MAX_EXPR:
15307 return RECURSE (op0) && RECURSE (op1);
15309 default:
15310 break;
15312 return false;
15315 /* Return true if the floating point result of calling FNDECL with arguments
15316 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15317 considered integer values. Return false for signaling NaN. If FNDECL
15318 takes fewer than 2 arguments, the remaining ARGn are null.
15320 DEPTH is the current nesting depth of the query. */
15322 bool
15323 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
15325 switch (fn)
15327 CASE_CFN_CEIL:
15328 CASE_CFN_CEIL_FN:
15329 CASE_CFN_FLOOR:
15330 CASE_CFN_FLOOR_FN:
15331 CASE_CFN_NEARBYINT:
15332 CASE_CFN_NEARBYINT_FN:
15333 CASE_CFN_RINT:
15334 CASE_CFN_RINT_FN:
15335 CASE_CFN_ROUND:
15336 CASE_CFN_ROUND_FN:
15337 CASE_CFN_ROUNDEVEN:
15338 CASE_CFN_ROUNDEVEN_FN:
15339 CASE_CFN_TRUNC:
15340 CASE_CFN_TRUNC_FN:
15341 return true;
15343 CASE_CFN_FMIN:
15344 CASE_CFN_FMIN_FN:
15345 CASE_CFN_FMAX:
15346 CASE_CFN_FMAX_FN:
15347 return RECURSE (arg0) && RECURSE (arg1);
15349 default:
15350 break;
15352 return false;
15355 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15356 has an integer value. We also allow +Inf, -Inf and NaN to be
15357 considered integer values. Return false for signaling NaN.
15359 DEPTH is the current nesting depth of the query. */
15361 bool
15362 integer_valued_real_single_p (tree t, int depth)
15364 switch (TREE_CODE (t))
15366 case REAL_CST:
15367 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
15369 case COND_EXPR:
15370 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15372 case SSA_NAME:
15373 /* Limit the depth of recursion to avoid quadratic behavior.
15374 This is expected to catch almost all occurrences in practice.
15375 If this code misses important cases that unbounded recursion
15376 would not, passes that need this information could be revised
15377 to provide it through dataflow propagation. */
15378 return (!name_registered_for_update_p (t)
15379 && depth < param_max_ssa_name_query_depth
15380 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
15381 depth));
15383 default:
15384 break;
15386 return false;
15389 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15390 has an integer value. We also allow +Inf, -Inf and NaN to be
15391 considered integer values. Return false for signaling NaN.
15393 DEPTH is the current nesting depth of the query. */
15395 static bool
15396 integer_valued_real_invalid_p (tree t, int depth)
15398 switch (TREE_CODE (t))
15400 case COMPOUND_EXPR:
15401 case MODIFY_EXPR:
15402 case BIND_EXPR:
15403 return RECURSE (TREE_OPERAND (t, 1));
15405 case SAVE_EXPR:
15406 return RECURSE (TREE_OPERAND (t, 0));
15408 default:
15409 break;
15411 return false;
15414 #undef RECURSE
15415 #undef integer_valued_real_p
15417 /* Return true if the floating point expression T has an integer value.
15418 We also allow +Inf, -Inf and NaN to be considered integer values.
15419 Return false for signaling NaN.
15421 DEPTH is the current nesting depth of the query. */
15423 bool
15424 integer_valued_real_p (tree t, int depth)
15426 if (t == error_mark_node)
15427 return false;
15429 STRIP_ANY_LOCATION_WRAPPER (t);
15431 tree_code code = TREE_CODE (t);
15432 switch (TREE_CODE_CLASS (code))
15434 case tcc_binary:
15435 case tcc_comparison:
15436 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
15437 TREE_OPERAND (t, 1), depth);
15439 case tcc_unary:
15440 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
15442 case tcc_constant:
15443 case tcc_declaration:
15444 case tcc_reference:
15445 return integer_valued_real_single_p (t, depth);
15447 default:
15448 break;
15451 switch (code)
15453 case COND_EXPR:
15454 case SSA_NAME:
15455 return integer_valued_real_single_p (t, depth);
15457 case CALL_EXPR:
15459 tree arg0 = (call_expr_nargs (t) > 0
15460 ? CALL_EXPR_ARG (t, 0)
15461 : NULL_TREE);
15462 tree arg1 = (call_expr_nargs (t) > 1
15463 ? CALL_EXPR_ARG (t, 1)
15464 : NULL_TREE);
15465 return integer_valued_real_call_p (get_call_combined_fn (t),
15466 arg0, arg1, depth);
15469 default:
15470 return integer_valued_real_invalid_p (t, depth);
15474 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15475 attempt to fold the expression to a constant without modifying TYPE,
15476 OP0 or OP1.
15478 If the expression could be simplified to a constant, then return
15479 the constant. If the expression would not be simplified to a
15480 constant, then return NULL_TREE. */
15482 tree
15483 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15485 tree tem = fold_binary (code, type, op0, op1);
15486 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15489 /* Given the components of a unary expression CODE, TYPE and OP0,
15490 attempt to fold the expression to a constant without modifying
15491 TYPE or OP0.
15493 If the expression could be simplified to a constant, then return
15494 the constant. If the expression would not be simplified to a
15495 constant, then return NULL_TREE. */
15497 tree
15498 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15500 tree tem = fold_unary (code, type, op0);
15501 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15504 /* If EXP represents referencing an element in a constant string
15505 (either via pointer arithmetic or array indexing), return the
15506 tree representing the value accessed, otherwise return NULL. */
15508 tree
15509 fold_read_from_constant_string (tree exp)
15511 if ((TREE_CODE (exp) == INDIRECT_REF
15512 || TREE_CODE (exp) == ARRAY_REF)
15513 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15515 tree exp1 = TREE_OPERAND (exp, 0);
15516 tree index;
15517 tree string;
15518 location_t loc = EXPR_LOCATION (exp);
15520 if (TREE_CODE (exp) == INDIRECT_REF)
15521 string = string_constant (exp1, &index, NULL, NULL);
15522 else
15524 tree low_bound = array_ref_low_bound (exp);
15525 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15527 /* Optimize the special-case of a zero lower bound.
15529 We convert the low_bound to sizetype to avoid some problems
15530 with constant folding. (E.g. suppose the lower bound is 1,
15531 and its mode is QI. Without the conversion,l (ARRAY
15532 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15533 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15534 if (! integer_zerop (low_bound))
15535 index = size_diffop_loc (loc, index,
15536 fold_convert_loc (loc, sizetype, low_bound));
15538 string = exp1;
15541 scalar_int_mode char_mode;
15542 if (string
15543 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15544 && TREE_CODE (string) == STRING_CST
15545 && tree_fits_uhwi_p (index)
15546 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15547 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
15548 &char_mode)
15549 && GET_MODE_SIZE (char_mode) == 1)
15550 return build_int_cst_type (TREE_TYPE (exp),
15551 (TREE_STRING_POINTER (string)
15552 [TREE_INT_CST_LOW (index)]));
15554 return NULL;
15557 /* Folds a read from vector element at IDX of vector ARG. */
15559 tree
15560 fold_read_from_vector (tree arg, poly_uint64 idx)
15562 unsigned HOST_WIDE_INT i;
15563 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
15564 && known_ge (idx, 0u)
15565 && idx.is_constant (&i))
15567 if (TREE_CODE (arg) == VECTOR_CST)
15568 return VECTOR_CST_ELT (arg, i);
15569 else if (TREE_CODE (arg) == CONSTRUCTOR)
15571 if (CONSTRUCTOR_NELTS (arg)
15572 && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg, 0)->value)))
15573 return NULL_TREE;
15574 if (i >= CONSTRUCTOR_NELTS (arg))
15575 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
15576 return CONSTRUCTOR_ELT (arg, i)->value;
15579 return NULL_TREE;
15582 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15583 an integer constant, real, or fixed-point constant.
15585 TYPE is the type of the result. */
15587 static tree
15588 fold_negate_const (tree arg0, tree type)
15590 tree t = NULL_TREE;
15592 switch (TREE_CODE (arg0))
15594 case REAL_CST:
15595 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15596 break;
15598 case FIXED_CST:
15600 FIXED_VALUE_TYPE f;
15601 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15602 &(TREE_FIXED_CST (arg0)), NULL,
15603 TYPE_SATURATING (type));
15604 t = build_fixed (type, f);
15605 /* Propagate overflow flags. */
15606 if (overflow_p | TREE_OVERFLOW (arg0))
15607 TREE_OVERFLOW (t) = 1;
15608 break;
15611 default:
15612 if (poly_int_tree_p (arg0))
15614 wi::overflow_type overflow;
15615 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
15616 t = force_fit_type (type, res, 1,
15617 (overflow && ! TYPE_UNSIGNED (type))
15618 || TREE_OVERFLOW (arg0));
15619 break;
15622 gcc_unreachable ();
15625 return t;
15628 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15629 an integer constant or real constant.
15631 TYPE is the type of the result. */
15633 tree
15634 fold_abs_const (tree arg0, tree type)
15636 tree t = NULL_TREE;
15638 switch (TREE_CODE (arg0))
15640 case INTEGER_CST:
15642 /* If the value is unsigned or non-negative, then the absolute value
15643 is the same as the ordinary value. */
15644 wide_int val = wi::to_wide (arg0);
15645 wi::overflow_type overflow = wi::OVF_NONE;
15646 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
15649 /* If the value is negative, then the absolute value is
15650 its negation. */
15651 else
15652 val = wi::neg (val, &overflow);
15654 /* Force to the destination type, set TREE_OVERFLOW for signed
15655 TYPE only. */
15656 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
15658 break;
15660 case REAL_CST:
15661 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15662 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15663 else
15664 t = arg0;
15665 break;
15667 default:
15668 gcc_unreachable ();
15671 return t;
15674 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15675 constant. TYPE is the type of the result. */
15677 static tree
15678 fold_not_const (const_tree arg0, tree type)
15680 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15682 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
15685 /* Given CODE, a relational operator, the target type, TYPE and two
15686 constant operands OP0 and OP1, return the result of the
15687 relational operation. If the result is not a compile time
15688 constant, then return NULL_TREE. */
15690 static tree
15691 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15693 int result, invert;
15695 /* From here on, the only cases we handle are when the result is
15696 known to be a constant. */
15698 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15700 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15701 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15703 /* Handle the cases where either operand is a NaN. */
15704 if (real_isnan (c0) || real_isnan (c1))
15706 switch (code)
15708 case EQ_EXPR:
15709 case ORDERED_EXPR:
15710 result = 0;
15711 break;
15713 case NE_EXPR:
15714 case UNORDERED_EXPR:
15715 case UNLT_EXPR:
15716 case UNLE_EXPR:
15717 case UNGT_EXPR:
15718 case UNGE_EXPR:
15719 case UNEQ_EXPR:
15720 result = 1;
15721 break;
15723 case LT_EXPR:
15724 case LE_EXPR:
15725 case GT_EXPR:
15726 case GE_EXPR:
15727 case LTGT_EXPR:
15728 if (flag_trapping_math)
15729 return NULL_TREE;
15730 result = 0;
15731 break;
15733 default:
15734 gcc_unreachable ();
15737 return constant_boolean_node (result, type);
15740 return constant_boolean_node (real_compare (code, c0, c1), type);
15743 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15745 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15746 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15747 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15750 /* Handle equality/inequality of complex constants. */
15751 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15753 tree rcond = fold_relational_const (code, type,
15754 TREE_REALPART (op0),
15755 TREE_REALPART (op1));
15756 tree icond = fold_relational_const (code, type,
15757 TREE_IMAGPART (op0),
15758 TREE_IMAGPART (op1));
15759 if (code == EQ_EXPR)
15760 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15761 else if (code == NE_EXPR)
15762 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15763 else
15764 return NULL_TREE;
15767 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15769 if (!VECTOR_TYPE_P (type))
15771 /* Have vector comparison with scalar boolean result. */
15772 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
15773 && known_eq (VECTOR_CST_NELTS (op0),
15774 VECTOR_CST_NELTS (op1)));
15775 unsigned HOST_WIDE_INT nunits;
15776 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
15777 return NULL_TREE;
15778 for (unsigned i = 0; i < nunits; i++)
15780 tree elem0 = VECTOR_CST_ELT (op0, i);
15781 tree elem1 = VECTOR_CST_ELT (op1, i);
15782 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
15783 if (tmp == NULL_TREE)
15784 return NULL_TREE;
15785 if (integer_zerop (tmp))
15786 return constant_boolean_node (code == NE_EXPR, type);
15788 return constant_boolean_node (code == EQ_EXPR, type);
15790 tree_vector_builder elts;
15791 if (!elts.new_binary_operation (type, op0, op1, false))
15792 return NULL_TREE;
15793 unsigned int count = elts.encoded_nelts ();
15794 for (unsigned i = 0; i < count; i++)
15796 tree elem_type = TREE_TYPE (type);
15797 tree elem0 = VECTOR_CST_ELT (op0, i);
15798 tree elem1 = VECTOR_CST_ELT (op1, i);
15800 tree tem = fold_relational_const (code, elem_type,
15801 elem0, elem1);
15803 if (tem == NULL_TREE)
15804 return NULL_TREE;
15806 elts.quick_push (build_int_cst (elem_type,
15807 integer_zerop (tem) ? 0 : -1));
15810 return elts.build ();
15813 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15815 To compute GT, swap the arguments and do LT.
15816 To compute GE, do LT and invert the result.
15817 To compute LE, swap the arguments, do LT and invert the result.
15818 To compute NE, do EQ and invert the result.
15820 Therefore, the code below must handle only EQ and LT. */
15822 if (code == LE_EXPR || code == GT_EXPR)
15824 std::swap (op0, op1);
15825 code = swap_tree_comparison (code);
15828 /* Note that it is safe to invert for real values here because we
15829 have already handled the one case that it matters. */
15831 invert = 0;
15832 if (code == NE_EXPR || code == GE_EXPR)
15834 invert = 1;
15835 code = invert_tree_comparison (code, false);
15838 /* Compute a result for LT or EQ if args permit;
15839 Otherwise return T. */
15840 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15842 if (code == EQ_EXPR)
15843 result = tree_int_cst_equal (op0, op1);
15844 else
15845 result = tree_int_cst_lt (op0, op1);
15847 else
15848 return NULL_TREE;
15850 if (invert)
15851 result ^= 1;
15852 return constant_boolean_node (result, type);
15855 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15856 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15857 itself. */
15859 tree
15860 fold_build_cleanup_point_expr (tree type, tree expr)
15862 /* If the expression does not have side effects then we don't have to wrap
15863 it with a cleanup point expression. */
15864 if (!TREE_SIDE_EFFECTS (expr))
15865 return expr;
15867 /* If the expression is a return, check to see if the expression inside the
15868 return has no side effects or the right hand side of the modify expression
15869 inside the return. If either don't have side effects set we don't need to
15870 wrap the expression in a cleanup point expression. Note we don't check the
15871 left hand side of the modify because it should always be a return decl. */
15872 if (TREE_CODE (expr) == RETURN_EXPR)
15874 tree op = TREE_OPERAND (expr, 0);
15875 if (!op || !TREE_SIDE_EFFECTS (op))
15876 return expr;
15877 op = TREE_OPERAND (op, 1);
15878 if (!TREE_SIDE_EFFECTS (op))
15879 return expr;
15882 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
15885 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15886 of an indirection through OP0, or NULL_TREE if no simplification is
15887 possible. */
15889 tree
15890 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15892 tree sub = op0;
15893 tree subtype;
15894 poly_uint64 const_op01;
15896 STRIP_NOPS (sub);
15897 subtype = TREE_TYPE (sub);
15898 if (!POINTER_TYPE_P (subtype)
15899 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
15900 return NULL_TREE;
15902 if (TREE_CODE (sub) == ADDR_EXPR)
15904 tree op = TREE_OPERAND (sub, 0);
15905 tree optype = TREE_TYPE (op);
15907 /* *&CONST_DECL -> to the value of the const decl. */
15908 if (TREE_CODE (op) == CONST_DECL)
15909 return DECL_INITIAL (op);
15910 /* *&p => p; make sure to handle *&"str"[cst] here. */
15911 if (type == optype)
15913 tree fop = fold_read_from_constant_string (op);
15914 if (fop)
15915 return fop;
15916 else
15917 return op;
15919 /* *(foo *)&fooarray => fooarray[0] */
15920 else if (TREE_CODE (optype) == ARRAY_TYPE
15921 && type == TREE_TYPE (optype)
15922 && (!in_gimple_form
15923 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15925 tree type_domain = TYPE_DOMAIN (optype);
15926 tree min_val = size_zero_node;
15927 if (type_domain && TYPE_MIN_VALUE (type_domain))
15928 min_val = TYPE_MIN_VALUE (type_domain);
15929 if (in_gimple_form
15930 && TREE_CODE (min_val) != INTEGER_CST)
15931 return NULL_TREE;
15932 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15933 NULL_TREE, NULL_TREE);
15935 /* *(foo *)&complexfoo => __real__ complexfoo */
15936 else if (TREE_CODE (optype) == COMPLEX_TYPE
15937 && type == TREE_TYPE (optype))
15938 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15939 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15940 else if (VECTOR_TYPE_P (optype)
15941 && type == TREE_TYPE (optype))
15943 tree part_width = TYPE_SIZE (type);
15944 tree index = bitsize_int (0);
15945 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
15946 index);
15950 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15951 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
15953 tree op00 = TREE_OPERAND (sub, 0);
15954 tree op01 = TREE_OPERAND (sub, 1);
15956 STRIP_NOPS (op00);
15957 if (TREE_CODE (op00) == ADDR_EXPR)
15959 tree op00type;
15960 op00 = TREE_OPERAND (op00, 0);
15961 op00type = TREE_TYPE (op00);
15963 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15964 if (VECTOR_TYPE_P (op00type)
15965 && type == TREE_TYPE (op00type)
15966 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
15967 but we want to treat offsets with MSB set as negative.
15968 For the code below negative offsets are invalid and
15969 TYPE_SIZE of the element is something unsigned, so
15970 check whether op01 fits into poly_int64, which implies
15971 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
15972 then just use poly_uint64 because we want to treat the
15973 value as unsigned. */
15974 && tree_fits_poly_int64_p (op01))
15976 tree part_width = TYPE_SIZE (type);
15977 poly_uint64 max_offset
15978 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
15979 * TYPE_VECTOR_SUBPARTS (op00type));
15980 if (known_lt (const_op01, max_offset))
15982 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
15983 return fold_build3_loc (loc,
15984 BIT_FIELD_REF, type, op00,
15985 part_width, index);
15988 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15989 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15990 && type == TREE_TYPE (op00type))
15992 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
15993 const_op01))
15994 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15996 /* ((foo *)&fooarray)[1] => fooarray[1] */
15997 else if (TREE_CODE (op00type) == ARRAY_TYPE
15998 && type == TREE_TYPE (op00type))
16000 tree type_domain = TYPE_DOMAIN (op00type);
16001 tree min_val = size_zero_node;
16002 if (type_domain && TYPE_MIN_VALUE (type_domain))
16003 min_val = TYPE_MIN_VALUE (type_domain);
16004 poly_uint64 type_size, index;
16005 if (poly_int_tree_p (min_val)
16006 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
16007 && multiple_p (const_op01, type_size, &index))
16009 poly_offset_int off = index + wi::to_poly_offset (min_val);
16010 op01 = wide_int_to_tree (sizetype, off);
16011 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16012 NULL_TREE, NULL_TREE);
16018 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16019 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16020 && type == TREE_TYPE (TREE_TYPE (subtype))
16021 && (!in_gimple_form
16022 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16024 tree type_domain;
16025 tree min_val = size_zero_node;
16026 sub = build_fold_indirect_ref_loc (loc, sub);
16027 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16028 if (type_domain && TYPE_MIN_VALUE (type_domain))
16029 min_val = TYPE_MIN_VALUE (type_domain);
16030 if (in_gimple_form
16031 && TREE_CODE (min_val) != INTEGER_CST)
16032 return NULL_TREE;
16033 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16034 NULL_TREE);
16037 return NULL_TREE;
16040 /* Builds an expression for an indirection through T, simplifying some
16041 cases. */
16043 tree
16044 build_fold_indirect_ref_loc (location_t loc, tree t)
16046 tree type = TREE_TYPE (TREE_TYPE (t));
16047 tree sub = fold_indirect_ref_1 (loc, type, t);
16049 if (sub)
16050 return sub;
16052 return build1_loc (loc, INDIRECT_REF, type, t);
16055 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16057 tree
16058 fold_indirect_ref_loc (location_t loc, tree t)
16060 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16062 if (sub)
16063 return sub;
16064 else
16065 return t;
16068 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16069 whose result is ignored. The type of the returned tree need not be
16070 the same as the original expression. */
16072 tree
16073 fold_ignored_result (tree t)
16075 if (!TREE_SIDE_EFFECTS (t))
16076 return integer_zero_node;
16078 for (;;)
16079 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16081 case tcc_unary:
16082 t = TREE_OPERAND (t, 0);
16083 break;
16085 case tcc_binary:
16086 case tcc_comparison:
16087 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16088 t = TREE_OPERAND (t, 0);
16089 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16090 t = TREE_OPERAND (t, 1);
16091 else
16092 return t;
16093 break;
16095 case tcc_expression:
16096 switch (TREE_CODE (t))
16098 case COMPOUND_EXPR:
16099 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16100 return t;
16101 t = TREE_OPERAND (t, 0);
16102 break;
16104 case COND_EXPR:
16105 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16106 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16107 return t;
16108 t = TREE_OPERAND (t, 0);
16109 break;
16111 default:
16112 return t;
16114 break;
16116 default:
16117 return t;
16121 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16123 tree
16124 round_up_loc (location_t loc, tree value, unsigned int divisor)
16126 tree div = NULL_TREE;
16128 if (divisor == 1)
16129 return value;
16131 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16132 have to do anything. Only do this when we are not given a const,
16133 because in that case, this check is more expensive than just
16134 doing it. */
16135 if (TREE_CODE (value) != INTEGER_CST)
16137 div = build_int_cst (TREE_TYPE (value), divisor);
16139 if (multiple_of_p (TREE_TYPE (value), value, div))
16140 return value;
16143 /* If divisor is a power of two, simplify this to bit manipulation. */
16144 if (pow2_or_zerop (divisor))
16146 if (TREE_CODE (value) == INTEGER_CST)
16148 wide_int val = wi::to_wide (value);
16149 bool overflow_p;
16151 if ((val & (divisor - 1)) == 0)
16152 return value;
16154 overflow_p = TREE_OVERFLOW (value);
16155 val += divisor - 1;
16156 val &= (int) -divisor;
16157 if (val == 0)
16158 overflow_p = true;
16160 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16162 else
16164 tree t;
16166 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16167 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16168 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16169 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16172 else
16174 if (!div)
16175 div = build_int_cst (TREE_TYPE (value), divisor);
16176 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16177 value = size_binop_loc (loc, MULT_EXPR, value, div);
16180 return value;
16183 /* Likewise, but round down. */
16185 tree
16186 round_down_loc (location_t loc, tree value, int divisor)
16188 tree div = NULL_TREE;
16190 gcc_assert (divisor > 0);
16191 if (divisor == 1)
16192 return value;
16194 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16195 have to do anything. Only do this when we are not given a const,
16196 because in that case, this check is more expensive than just
16197 doing it. */
16198 if (TREE_CODE (value) != INTEGER_CST)
16200 div = build_int_cst (TREE_TYPE (value), divisor);
16202 if (multiple_of_p (TREE_TYPE (value), value, div))
16203 return value;
16206 /* If divisor is a power of two, simplify this to bit manipulation. */
16207 if (pow2_or_zerop (divisor))
16209 tree t;
16211 t = build_int_cst (TREE_TYPE (value), -divisor);
16212 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16214 else
16216 if (!div)
16217 div = build_int_cst (TREE_TYPE (value), divisor);
16218 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16219 value = size_binop_loc (loc, MULT_EXPR, value, div);
16222 return value;
16225 /* Returns the pointer to the base of the object addressed by EXP and
16226 extracts the information about the offset of the access, storing it
16227 to PBITPOS and POFFSET. */
16229 static tree
16230 split_address_to_core_and_offset (tree exp,
16231 poly_int64_pod *pbitpos, tree *poffset)
16233 tree core;
16234 machine_mode mode;
16235 int unsignedp, reversep, volatilep;
16236 poly_int64 bitsize;
16237 location_t loc = EXPR_LOCATION (exp);
16239 if (TREE_CODE (exp) == ADDR_EXPR)
16241 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16242 poffset, &mode, &unsignedp, &reversep,
16243 &volatilep);
16244 core = build_fold_addr_expr_loc (loc, core);
16246 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
16248 core = TREE_OPERAND (exp, 0);
16249 STRIP_NOPS (core);
16250 *pbitpos = 0;
16251 *poffset = TREE_OPERAND (exp, 1);
16252 if (poly_int_tree_p (*poffset))
16254 poly_offset_int tem
16255 = wi::sext (wi::to_poly_offset (*poffset),
16256 TYPE_PRECISION (TREE_TYPE (*poffset)));
16257 tem <<= LOG2_BITS_PER_UNIT;
16258 if (tem.to_shwi (pbitpos))
16259 *poffset = NULL_TREE;
16262 else
16264 core = exp;
16265 *pbitpos = 0;
16266 *poffset = NULL_TREE;
16269 return core;
16272 /* Returns true if addresses of E1 and E2 differ by a constant, false
16273 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16275 bool
16276 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
16278 tree core1, core2;
16279 poly_int64 bitpos1, bitpos2;
16280 tree toffset1, toffset2, tdiff, type;
16282 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16283 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16285 poly_int64 bytepos1, bytepos2;
16286 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
16287 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
16288 || !operand_equal_p (core1, core2, 0))
16289 return false;
16291 if (toffset1 && toffset2)
16293 type = TREE_TYPE (toffset1);
16294 if (type != TREE_TYPE (toffset2))
16295 toffset2 = fold_convert (type, toffset2);
16297 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16298 if (!cst_and_fits_in_hwi (tdiff))
16299 return false;
16301 *diff = int_cst_value (tdiff);
16303 else if (toffset1 || toffset2)
16305 /* If only one of the offsets is non-constant, the difference cannot
16306 be a constant. */
16307 return false;
16309 else
16310 *diff = 0;
16312 *diff += bytepos1 - bytepos2;
16313 return true;
16316 /* Return OFF converted to a pointer offset type suitable as offset for
16317 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16318 tree
16319 convert_to_ptrofftype_loc (location_t loc, tree off)
16321 if (ptrofftype_p (TREE_TYPE (off)))
16322 return off;
16323 return fold_convert_loc (loc, sizetype, off);
16326 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16327 tree
16328 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16330 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16331 ptr, convert_to_ptrofftype_loc (loc, off));
16334 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16335 tree
16336 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16338 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16339 ptr, size_int (off));
16342 /* Return a pointer to a NUL-terminated string containing the sequence
16343 of bytes corresponding to the representation of the object referred to
16344 by SRC (or a subsequence of such bytes within it if SRC is a reference
16345 to an initialized constant array plus some constant offset).
16346 Set *STRSIZE the number of bytes in the constant sequence including
16347 the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16348 where A is the array that stores the constant sequence that SRC points
16349 to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16350 need not point to a string or even an array of characters but may point
16351 to an object of any type. */
16353 const char *
16354 getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
16356 /* The offset into the array A storing the string, and A's byte size. */
16357 tree offset_node;
16358 tree mem_size;
16360 if (strsize)
16361 *strsize = 0;
16363 if (strsize)
16364 src = byte_representation (src, &offset_node, &mem_size, NULL);
16365 else
16366 src = string_constant (src, &offset_node, &mem_size, NULL);
16367 if (!src)
16368 return NULL;
16370 unsigned HOST_WIDE_INT offset = 0;
16371 if (offset_node != NULL_TREE)
16373 if (!tree_fits_uhwi_p (offset_node))
16374 return NULL;
16375 else
16376 offset = tree_to_uhwi (offset_node);
16379 if (!tree_fits_uhwi_p (mem_size))
16380 return NULL;
16382 /* ARRAY_SIZE is the byte size of the array the constant sequence
16383 is stored in and equal to sizeof A. INIT_BYTES is the number
16384 of bytes in the constant sequence used to initialize the array,
16385 including any embedded NULs as well as the terminating NUL (for
16386 strings), but not including any trailing zeros/NULs past
16387 the terminating one appended implicitly to a string literal to
16388 zero out the remainder of the array it's stored in. For example,
16389 given:
16390 const char a[7] = "abc\0d";
16391 n = strlen (a + 1);
16392 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16393 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16394 is equal to strlen (A) + 1. */
16395 const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
16396 unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
16397 const char *string = TREE_STRING_POINTER (src);
16399 /* Ideally this would turn into a gcc_checking_assert over time. */
16400 if (init_bytes > array_size)
16401 init_bytes = array_size;
16403 if (init_bytes == 0 || offset >= array_size)
16404 return NULL;
16406 if (strsize)
16408 /* Compute and store the number of characters from the beginning
16409 of the substring at OFFSET to the end, including the terminating
16410 nul. Offsets past the initial length refer to null strings. */
16411 if (offset < init_bytes)
16412 *strsize = init_bytes - offset;
16413 else
16414 *strsize = 1;
16416 else
16418 tree eltype = TREE_TYPE (TREE_TYPE (src));
16419 /* Support only properly NUL-terminated single byte strings. */
16420 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
16421 return NULL;
16422 if (string[init_bytes - 1] != '\0')
16423 return NULL;
16426 return offset < init_bytes ? string + offset : "";
16429 /* Return a pointer to a NUL-terminated string corresponding to
16430 the expression STR referencing a constant string, possibly
16431 involving a constant offset. Return null if STR either doesn't
16432 reference a constant string or if it involves a nonconstant
16433 offset. */
16435 const char *
16436 c_getstr (tree str)
16438 return getbyterep (str, NULL);
16441 /* Given a tree T, compute which bits in T may be nonzero. */
16443 wide_int
16444 tree_nonzero_bits (const_tree t)
16446 switch (TREE_CODE (t))
16448 case INTEGER_CST:
16449 return wi::to_wide (t);
16450 case SSA_NAME:
16451 return get_nonzero_bits (t);
16452 case NON_LVALUE_EXPR:
16453 case SAVE_EXPR:
16454 return tree_nonzero_bits (TREE_OPERAND (t, 0));
16455 case BIT_AND_EXPR:
16456 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16457 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16458 case BIT_IOR_EXPR:
16459 case BIT_XOR_EXPR:
16460 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16461 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16462 case COND_EXPR:
16463 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
16464 tree_nonzero_bits (TREE_OPERAND (t, 2)));
16465 CASE_CONVERT:
16466 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16467 TYPE_PRECISION (TREE_TYPE (t)),
16468 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
16469 case PLUS_EXPR:
16470 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
16472 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
16473 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
16474 if (wi::bit_and (nzbits1, nzbits2) == 0)
16475 return wi::bit_or (nzbits1, nzbits2);
16477 break;
16478 case LSHIFT_EXPR:
16479 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16481 tree type = TREE_TYPE (t);
16482 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16483 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16484 TYPE_PRECISION (type));
16485 return wi::neg_p (arg1)
16486 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
16487 : wi::lshift (nzbits, arg1);
16489 break;
16490 case RSHIFT_EXPR:
16491 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16493 tree type = TREE_TYPE (t);
16494 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16495 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16496 TYPE_PRECISION (type));
16497 return wi::neg_p (arg1)
16498 ? wi::lshift (nzbits, -arg1)
16499 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
16501 break;
16502 default:
16503 break;
16506 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
16509 /* Helper function for address compare simplifications in match.pd.
16510 OP0 and OP1 are ADDR_EXPR operands being compared by CODE.
16511 BASE0, BASE1, OFF0 and OFF1 are set by the function.
16512 GENERIC is true if GENERIC folding and false for GIMPLE folding.
16513 Returns 0 if OP0 is known to be unequal to OP1 regardless of OFF{0,1},
16514 1 if bases are known to be equal and OP0 cmp OP1 depends on OFF0 cmp OFF1,
16515 and 2 if unknown. */
16518 address_compare (tree_code code, tree type, tree op0, tree op1,
16519 tree &base0, tree &base1, poly_int64 &off0, poly_int64 &off1,
16520 bool generic)
16522 gcc_checking_assert (TREE_CODE (op0) == ADDR_EXPR);
16523 gcc_checking_assert (TREE_CODE (op1) == ADDR_EXPR);
16524 base0 = get_addr_base_and_unit_offset (TREE_OPERAND (op0, 0), &off0);
16525 base1 = get_addr_base_and_unit_offset (TREE_OPERAND (op1, 0), &off1);
16526 if (base0 && TREE_CODE (base0) == MEM_REF)
16528 off0 += mem_ref_offset (base0).force_shwi ();
16529 base0 = TREE_OPERAND (base0, 0);
16531 if (base1 && TREE_CODE (base1) == MEM_REF)
16533 off1 += mem_ref_offset (base1).force_shwi ();
16534 base1 = TREE_OPERAND (base1, 0);
16536 if (base0 == NULL_TREE || base1 == NULL_TREE)
16537 return 2;
16539 int equal = 2;
16540 /* Punt in GENERIC on variables with value expressions;
16541 the value expressions might point to fields/elements
16542 of other vars etc. */
16543 if (generic
16544 && ((VAR_P (base0) && DECL_HAS_VALUE_EXPR_P (base0))
16545 || (VAR_P (base1) && DECL_HAS_VALUE_EXPR_P (base1))))
16546 return 2;
16547 else if (decl_in_symtab_p (base0) && decl_in_symtab_p (base1))
16549 symtab_node *node0 = symtab_node::get_create (base0);
16550 symtab_node *node1 = symtab_node::get_create (base1);
16551 equal = node0->equal_address_to (node1);
16553 else if ((DECL_P (base0)
16554 || TREE_CODE (base0) == SSA_NAME
16555 || TREE_CODE (base0) == STRING_CST)
16556 && (DECL_P (base1)
16557 || TREE_CODE (base1) == SSA_NAME
16558 || TREE_CODE (base1) == STRING_CST))
16559 equal = (base0 == base1);
16560 if (equal == 1)
16562 if (code == EQ_EXPR
16563 || code == NE_EXPR
16564 /* If the offsets are equal we can ignore overflow. */
16565 || known_eq (off0, off1)
16566 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
16567 /* Or if we compare using pointers to decls or strings. */
16568 || (POINTER_TYPE_P (type)
16569 && (DECL_P (base0) || TREE_CODE (base0) == STRING_CST)))
16570 return 1;
16571 return 2;
16573 if (equal != 0)
16574 return equal;
16575 if (code != EQ_EXPR && code != NE_EXPR)
16576 return 2;
16578 HOST_WIDE_INT ioff0 = -1, ioff1 = -1;
16579 off0.is_constant (&ioff0);
16580 off1.is_constant (&ioff1);
16581 if ((DECL_P (base0) && TREE_CODE (base1) == STRING_CST)
16582 || (TREE_CODE (base0) == STRING_CST && DECL_P (base1))
16583 || (TREE_CODE (base0) == STRING_CST
16584 && TREE_CODE (base1) == STRING_CST
16585 && ioff0 >= 0 && ioff1 >= 0
16586 && ioff0 < TREE_STRING_LENGTH (base0)
16587 && ioff1 < TREE_STRING_LENGTH (base1)
16588 /* This is a too conservative test that the STRING_CSTs
16589 will not end up being string-merged. */
16590 && strncmp (TREE_STRING_POINTER (base0) + ioff0,
16591 TREE_STRING_POINTER (base1) + ioff1,
16592 MIN (TREE_STRING_LENGTH (base0) - ioff0,
16593 TREE_STRING_LENGTH (base1) - ioff1)) != 0))
16595 else if (!DECL_P (base0) || !DECL_P (base1))
16596 return 2;
16597 /* If this is a pointer comparison, ignore for now even
16598 valid equalities where one pointer is the offset zero
16599 of one object and the other to one past end of another one. */
16600 else if (!INTEGRAL_TYPE_P (type))
16602 /* Assume that automatic variables can't be adjacent to global
16603 variables. */
16604 else if (is_global_var (base0) != is_global_var (base1))
16606 else
16608 tree sz0 = DECL_SIZE_UNIT (base0);
16609 tree sz1 = DECL_SIZE_UNIT (base1);
16610 /* If sizes are unknown, e.g. VLA or not representable, punt. */
16611 if (!tree_fits_poly_int64_p (sz0) || !tree_fits_poly_int64_p (sz1))
16612 return 2;
16614 poly_int64 size0 = tree_to_poly_int64 (sz0);
16615 poly_int64 size1 = tree_to_poly_int64 (sz1);
16616 /* If one offset is pointing (or could be) to the beginning of one
16617 object and the other is pointing to one past the last byte of the
16618 other object, punt. */
16619 if (maybe_eq (off0, 0) && maybe_eq (off1, size1))
16620 equal = 2;
16621 else if (maybe_eq (off1, 0) && maybe_eq (off0, size0))
16622 equal = 2;
16623 /* If both offsets are the same, there are some cases we know that are
16624 ok. Either if we know they aren't zero, or if we know both sizes
16625 are no zero. */
16626 if (equal == 2
16627 && known_eq (off0, off1)
16628 && (known_ne (off0, 0)
16629 || (known_ne (size0, 0) && known_ne (size1, 0))))
16630 equal = 0;
16632 return equal;
16635 #if CHECKING_P
16637 namespace selftest {
16639 /* Helper functions for writing tests of folding trees. */
16641 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
16643 static void
16644 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
16645 tree constant)
16647 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
16650 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
16651 wrapping WRAPPED_EXPR. */
16653 static void
16654 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
16655 tree wrapped_expr)
16657 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
16658 ASSERT_NE (wrapped_expr, result);
16659 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
16660 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
16663 /* Verify that various arithmetic binary operations are folded
16664 correctly. */
16666 static void
16667 test_arithmetic_folding ()
16669 tree type = integer_type_node;
16670 tree x = create_tmp_var_raw (type, "x");
16671 tree zero = build_zero_cst (type);
16672 tree one = build_int_cst (type, 1);
16674 /* Addition. */
16675 /* 1 <-- (0 + 1) */
16676 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
16677 one);
16678 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
16679 one);
16681 /* (nonlvalue)x <-- (x + 0) */
16682 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
16685 /* Subtraction. */
16686 /* 0 <-- (x - x) */
16687 assert_binop_folds_to_const (x, MINUS_EXPR, x,
16688 zero);
16689 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
16692 /* Multiplication. */
16693 /* 0 <-- (x * 0) */
16694 assert_binop_folds_to_const (x, MULT_EXPR, zero,
16695 zero);
16697 /* (nonlvalue)x <-- (x * 1) */
16698 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
16702 /* Verify that various binary operations on vectors are folded
16703 correctly. */
16705 static void
16706 test_vector_folding ()
16708 tree inner_type = integer_type_node;
16709 tree type = build_vector_type (inner_type, 4);
16710 tree zero = build_zero_cst (type);
16711 tree one = build_one_cst (type);
16712 tree index = build_index_vector (type, 0, 1);
16714 /* Verify equality tests that return a scalar boolean result. */
16715 tree res_type = boolean_type_node;
16716 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
16717 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
16718 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
16719 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
16720 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
16721 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16722 index, one)));
16723 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
16724 index, index)));
16725 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16726 index, index)));
16729 /* Verify folding of VEC_DUPLICATE_EXPRs. */
16731 static void
16732 test_vec_duplicate_folding ()
16734 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
16735 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
16736 /* This will be 1 if VEC_MODE isn't a vector mode. */
16737 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
16739 tree type = build_vector_type (ssizetype, nunits);
16740 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
16741 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
16742 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
16745 /* Run all of the selftests within this file. */
16747 void
16748 fold_const_c_tests ()
16750 test_arithmetic_folding ();
16751 test_vector_folding ();
16752 test_vec_duplicate_folding ();
16755 } // namespace selftest
16757 #endif /* CHECKING_P */