Daily bump.
[official-gcc.git] / gcc / fold-const.c
blob0b9a42f764ac4b02c7acd1eee5f2a930024f5150
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-vrp.h"
79 #include "tree-ssanames.h"
80 #include "selftest.h"
81 #include "stringpool.h"
82 #include "attribs.h"
83 #include "tree-vector-builder.h"
84 #include "vec-perm-indices.h"
85 #include "asan.h"
86 #include "gimple-range.h"
88 /* Nonzero if we are folding constants inside an initializer or a C++
89 manifestly-constant-evaluated context; zero otherwise. */
90 int folding_initializer = 0;
92 /* The following constants represent a bit based encoding of GCC's
93 comparison operators. This encoding simplifies transformations
94 on relational comparison operators, such as AND and OR. */
95 enum comparison_code {
96 COMPCODE_FALSE = 0,
97 COMPCODE_LT = 1,
98 COMPCODE_EQ = 2,
99 COMPCODE_LE = 3,
100 COMPCODE_GT = 4,
101 COMPCODE_LTGT = 5,
102 COMPCODE_GE = 6,
103 COMPCODE_ORD = 7,
104 COMPCODE_UNORD = 8,
105 COMPCODE_UNLT = 9,
106 COMPCODE_UNEQ = 10,
107 COMPCODE_UNLE = 11,
108 COMPCODE_UNGT = 12,
109 COMPCODE_NE = 13,
110 COMPCODE_UNGE = 14,
111 COMPCODE_TRUE = 15
114 static bool negate_expr_p (tree);
115 static tree negate_expr (tree);
116 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
117 static enum comparison_code comparison_to_compcode (enum tree_code);
118 static enum tree_code compcode_to_comparison (enum comparison_code);
119 static bool twoval_comparison_p (tree, tree *, tree *);
120 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
121 static tree optimize_bit_field_compare (location_t, enum tree_code,
122 tree, tree, tree);
123 static bool simple_operand_p (const_tree);
124 static bool simple_operand_p_2 (tree);
125 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
126 static tree range_predecessor (tree);
127 static tree range_successor (tree);
128 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
129 static tree fold_cond_expr_with_comparison (location_t, tree, enum tree_code,
130 tree, tree, tree, tree);
131 static tree unextend (tree, int, int, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static tree fold_binary_op_with_conditional_arg (location_t,
135 enum tree_code, tree,
136 tree, tree,
137 tree, tree, int);
138 static tree fold_negate_const (tree, tree);
139 static tree fold_not_const (const_tree, tree);
140 static tree fold_relational_const (enum tree_code, tree, tree, tree);
141 static tree fold_convert_const (enum tree_code, tree, tree);
142 static tree fold_view_convert_expr (tree, tree);
143 static tree fold_negate_expr (location_t, tree);
146 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
147 Otherwise, return LOC. */
149 static location_t
150 expr_location_or (tree t, location_t loc)
152 location_t tloc = EXPR_LOCATION (t);
153 return tloc == UNKNOWN_LOCATION ? loc : tloc;
156 /* Similar to protected_set_expr_location, but never modify x in place,
157 if location can and needs to be set, unshare it. */
159 static inline tree
160 protected_set_expr_location_unshare (tree x, location_t loc)
162 if (CAN_HAVE_LOCATION_P (x)
163 && EXPR_LOCATION (x) != loc
164 && !(TREE_CODE (x) == SAVE_EXPR
165 || TREE_CODE (x) == TARGET_EXPR
166 || TREE_CODE (x) == BIND_EXPR))
168 x = copy_node (x);
169 SET_EXPR_LOCATION (x, loc);
171 return x;
174 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
175 division and returns the quotient. Otherwise returns
176 NULL_TREE. */
178 tree
179 div_if_zero_remainder (const_tree arg1, const_tree arg2)
181 widest_int quo;
183 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
184 SIGNED, &quo))
185 return wide_int_to_tree (TREE_TYPE (arg1), quo);
187 return NULL_TREE;
190 /* This is nonzero if we should defer warnings about undefined
191 overflow. This facility exists because these warnings are a
192 special case. The code to estimate loop iterations does not want
193 to issue any warnings, since it works with expressions which do not
194 occur in user code. Various bits of cleanup code call fold(), but
195 only use the result if it has certain characteristics (e.g., is a
196 constant); that code only wants to issue a warning if the result is
197 used. */
199 static int fold_deferring_overflow_warnings;
201 /* If a warning about undefined overflow is deferred, this is the
202 warning. Note that this may cause us to turn two warnings into
203 one, but that is fine since it is sufficient to only give one
204 warning per expression. */
206 static const char* fold_deferred_overflow_warning;
208 /* If a warning about undefined overflow is deferred, this is the
209 level at which the warning should be emitted. */
211 static enum warn_strict_overflow_code fold_deferred_overflow_code;
213 /* Start deferring overflow warnings. We could use a stack here to
214 permit nested calls, but at present it is not necessary. */
216 void
217 fold_defer_overflow_warnings (void)
219 ++fold_deferring_overflow_warnings;
222 /* Stop deferring overflow warnings. If there is a pending warning,
223 and ISSUE is true, then issue the warning if appropriate. STMT is
224 the statement with which the warning should be associated (used for
225 location information); STMT may be NULL. CODE is the level of the
226 warning--a warn_strict_overflow_code value. This function will use
227 the smaller of CODE and the deferred code when deciding whether to
228 issue the warning. CODE may be zero to mean to always use the
229 deferred code. */
231 void
232 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
234 const char *warnmsg;
235 location_t locus;
237 gcc_assert (fold_deferring_overflow_warnings > 0);
238 --fold_deferring_overflow_warnings;
239 if (fold_deferring_overflow_warnings > 0)
241 if (fold_deferred_overflow_warning != NULL
242 && code != 0
243 && code < (int) fold_deferred_overflow_code)
244 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
245 return;
248 warnmsg = fold_deferred_overflow_warning;
249 fold_deferred_overflow_warning = NULL;
251 if (!issue || warnmsg == NULL)
252 return;
254 if (warning_suppressed_p (stmt, OPT_Wstrict_overflow))
255 return;
257 /* Use the smallest code level when deciding to issue the
258 warning. */
259 if (code == 0 || code > (int) fold_deferred_overflow_code)
260 code = fold_deferred_overflow_code;
262 if (!issue_strict_overflow_warning (code))
263 return;
265 if (stmt == NULL)
266 locus = input_location;
267 else
268 locus = gimple_location (stmt);
269 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
272 /* Stop deferring overflow warnings, ignoring any deferred
273 warnings. */
275 void
276 fold_undefer_and_ignore_overflow_warnings (void)
278 fold_undefer_overflow_warnings (false, NULL, 0);
281 /* Whether we are deferring overflow warnings. */
283 bool
284 fold_deferring_overflow_warnings_p (void)
286 return fold_deferring_overflow_warnings > 0;
289 /* This is called when we fold something based on the fact that signed
290 overflow is undefined. */
292 void
293 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
295 if (fold_deferring_overflow_warnings > 0)
297 if (fold_deferred_overflow_warning == NULL
298 || wc < fold_deferred_overflow_code)
300 fold_deferred_overflow_warning = gmsgid;
301 fold_deferred_overflow_code = wc;
304 else if (issue_strict_overflow_warning (wc))
305 warning (OPT_Wstrict_overflow, gmsgid);
308 /* Return true if the built-in mathematical function specified by CODE
309 is odd, i.e. -f(x) == f(-x). */
311 bool
312 negate_mathfn_p (combined_fn fn)
314 switch (fn)
316 CASE_CFN_ASIN:
317 CASE_CFN_ASINH:
318 CASE_CFN_ATAN:
319 CASE_CFN_ATANH:
320 CASE_CFN_CASIN:
321 CASE_CFN_CASINH:
322 CASE_CFN_CATAN:
323 CASE_CFN_CATANH:
324 CASE_CFN_CBRT:
325 CASE_CFN_CPROJ:
326 CASE_CFN_CSIN:
327 CASE_CFN_CSINH:
328 CASE_CFN_CTAN:
329 CASE_CFN_CTANH:
330 CASE_CFN_ERF:
331 CASE_CFN_LLROUND:
332 CASE_CFN_LROUND:
333 CASE_CFN_ROUND:
334 CASE_CFN_ROUNDEVEN:
335 CASE_CFN_ROUNDEVEN_FN:
336 CASE_CFN_SIN:
337 CASE_CFN_SINH:
338 CASE_CFN_TAN:
339 CASE_CFN_TANH:
340 CASE_CFN_TRUNC:
341 return true;
343 CASE_CFN_LLRINT:
344 CASE_CFN_LRINT:
345 CASE_CFN_NEARBYINT:
346 CASE_CFN_RINT:
347 return !flag_rounding_math;
349 default:
350 break;
352 return false;
355 /* Check whether we may negate an integer constant T without causing
356 overflow. */
358 bool
359 may_negate_without_overflow_p (const_tree t)
361 tree type;
363 gcc_assert (TREE_CODE (t) == INTEGER_CST);
365 type = TREE_TYPE (t);
366 if (TYPE_UNSIGNED (type))
367 return false;
369 return !wi::only_sign_bit_p (wi::to_wide (t));
372 /* Determine whether an expression T can be cheaply negated using
373 the function negate_expr without introducing undefined overflow. */
375 static bool
376 negate_expr_p (tree t)
378 tree type;
380 if (t == 0)
381 return false;
383 type = TREE_TYPE (t);
385 STRIP_SIGN_NOPS (t);
386 switch (TREE_CODE (t))
388 case INTEGER_CST:
389 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
390 return true;
392 /* Check that -CST will not overflow type. */
393 return may_negate_without_overflow_p (t);
394 case BIT_NOT_EXPR:
395 return (INTEGRAL_TYPE_P (type)
396 && TYPE_OVERFLOW_WRAPS (type));
398 case FIXED_CST:
399 return true;
401 case NEGATE_EXPR:
402 return !TYPE_OVERFLOW_SANITIZED (type);
404 case REAL_CST:
405 /* We want to canonicalize to positive real constants. Pretend
406 that only negative ones can be easily negated. */
407 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
409 case COMPLEX_CST:
410 return negate_expr_p (TREE_REALPART (t))
411 && negate_expr_p (TREE_IMAGPART (t));
413 case VECTOR_CST:
415 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
416 return true;
418 /* Steps don't prevent negation. */
419 unsigned int count = vector_cst_encoded_nelts (t);
420 for (unsigned int i = 0; i < count; ++i)
421 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
422 return false;
424 return true;
427 case COMPLEX_EXPR:
428 return negate_expr_p (TREE_OPERAND (t, 0))
429 && negate_expr_p (TREE_OPERAND (t, 1));
431 case CONJ_EXPR:
432 return negate_expr_p (TREE_OPERAND (t, 0));
434 case PLUS_EXPR:
435 if (HONOR_SIGN_DEPENDENT_ROUNDING (type)
436 || HONOR_SIGNED_ZEROS (type)
437 || (ANY_INTEGRAL_TYPE_P (type)
438 && ! TYPE_OVERFLOW_WRAPS (type)))
439 return false;
440 /* -(A + B) -> (-B) - A. */
441 if (negate_expr_p (TREE_OPERAND (t, 1)))
442 return true;
443 /* -(A + B) -> (-A) - B. */
444 return negate_expr_p (TREE_OPERAND (t, 0));
446 case MINUS_EXPR:
447 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
448 return !HONOR_SIGN_DEPENDENT_ROUNDING (type)
449 && !HONOR_SIGNED_ZEROS (type)
450 && (! ANY_INTEGRAL_TYPE_P (type)
451 || TYPE_OVERFLOW_WRAPS (type));
453 case MULT_EXPR:
454 if (TYPE_UNSIGNED (type))
455 break;
456 /* INT_MIN/n * n doesn't overflow while negating one operand it does
457 if n is a (negative) power of two. */
458 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
459 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
460 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
461 && (wi::popcount
462 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
463 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
464 && (wi::popcount
465 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
466 break;
468 /* Fall through. */
470 case RDIV_EXPR:
471 if (! HONOR_SIGN_DEPENDENT_ROUNDING (t))
472 return negate_expr_p (TREE_OPERAND (t, 1))
473 || negate_expr_p (TREE_OPERAND (t, 0));
474 break;
476 case TRUNC_DIV_EXPR:
477 case ROUND_DIV_EXPR:
478 case EXACT_DIV_EXPR:
479 if (TYPE_UNSIGNED (type))
480 break;
481 /* In general we can't negate A in A / B, because if A is INT_MIN and
482 B is not 1 we change the sign of the result. */
483 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
484 && negate_expr_p (TREE_OPERAND (t, 0)))
485 return true;
486 /* In general we can't negate B in A / B, because if A is INT_MIN and
487 B is 1, we may turn this into INT_MIN / -1 which is undefined
488 and actually traps on some architectures. */
489 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
490 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
491 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
492 && ! integer_onep (TREE_OPERAND (t, 1))))
493 return negate_expr_p (TREE_OPERAND (t, 1));
494 break;
496 case NOP_EXPR:
497 /* Negate -((double)float) as (double)(-float). */
498 if (TREE_CODE (type) == REAL_TYPE)
500 tree tem = strip_float_extensions (t);
501 if (tem != t)
502 return negate_expr_p (tem);
504 break;
506 case CALL_EXPR:
507 /* Negate -f(x) as f(-x). */
508 if (negate_mathfn_p (get_call_combined_fn (t)))
509 return negate_expr_p (CALL_EXPR_ARG (t, 0));
510 break;
512 case RSHIFT_EXPR:
513 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
514 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
516 tree op1 = TREE_OPERAND (t, 1);
517 if (wi::to_wide (op1) == element_precision (type) - 1)
518 return true;
520 break;
522 default:
523 break;
525 return false;
528 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
529 simplification is possible.
530 If negate_expr_p would return true for T, NULL_TREE will never be
531 returned. */
533 static tree
534 fold_negate_expr_1 (location_t loc, tree t)
536 tree type = TREE_TYPE (t);
537 tree tem;
539 switch (TREE_CODE (t))
541 /* Convert - (~A) to A + 1. */
542 case BIT_NOT_EXPR:
543 if (INTEGRAL_TYPE_P (type))
544 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
545 build_one_cst (type));
546 break;
548 case INTEGER_CST:
549 tem = fold_negate_const (t, type);
550 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
551 || (ANY_INTEGRAL_TYPE_P (type)
552 && !TYPE_OVERFLOW_TRAPS (type)
553 && TYPE_OVERFLOW_WRAPS (type))
554 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
555 return tem;
556 break;
558 case POLY_INT_CST:
559 case REAL_CST:
560 case FIXED_CST:
561 tem = fold_negate_const (t, type);
562 return tem;
564 case COMPLEX_CST:
566 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
567 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
568 if (rpart && ipart)
569 return build_complex (type, rpart, ipart);
571 break;
573 case VECTOR_CST:
575 tree_vector_builder elts;
576 elts.new_unary_operation (type, t, true);
577 unsigned int count = elts.encoded_nelts ();
578 for (unsigned int i = 0; i < count; ++i)
580 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
581 if (elt == NULL_TREE)
582 return NULL_TREE;
583 elts.quick_push (elt);
586 return elts.build ();
589 case COMPLEX_EXPR:
590 if (negate_expr_p (t))
591 return fold_build2_loc (loc, COMPLEX_EXPR, type,
592 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
593 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
594 break;
596 case CONJ_EXPR:
597 if (negate_expr_p (t))
598 return fold_build1_loc (loc, CONJ_EXPR, type,
599 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
600 break;
602 case NEGATE_EXPR:
603 if (!TYPE_OVERFLOW_SANITIZED (type))
604 return TREE_OPERAND (t, 0);
605 break;
607 case PLUS_EXPR:
608 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
609 && !HONOR_SIGNED_ZEROS (type))
611 /* -(A + B) -> (-B) - A. */
612 if (negate_expr_p (TREE_OPERAND (t, 1)))
614 tem = negate_expr (TREE_OPERAND (t, 1));
615 return fold_build2_loc (loc, MINUS_EXPR, type,
616 tem, TREE_OPERAND (t, 0));
619 /* -(A + B) -> (-A) - B. */
620 if (negate_expr_p (TREE_OPERAND (t, 0)))
622 tem = negate_expr (TREE_OPERAND (t, 0));
623 return fold_build2_loc (loc, MINUS_EXPR, type,
624 tem, TREE_OPERAND (t, 1));
627 break;
629 case MINUS_EXPR:
630 /* - (A - B) -> B - A */
631 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
632 && !HONOR_SIGNED_ZEROS (type))
633 return fold_build2_loc (loc, MINUS_EXPR, type,
634 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
635 break;
637 case MULT_EXPR:
638 if (TYPE_UNSIGNED (type))
639 break;
641 /* Fall through. */
643 case RDIV_EXPR:
644 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
646 tem = TREE_OPERAND (t, 1);
647 if (negate_expr_p (tem))
648 return fold_build2_loc (loc, TREE_CODE (t), type,
649 TREE_OPERAND (t, 0), negate_expr (tem));
650 tem = TREE_OPERAND (t, 0);
651 if (negate_expr_p (tem))
652 return fold_build2_loc (loc, TREE_CODE (t), type,
653 negate_expr (tem), TREE_OPERAND (t, 1));
655 break;
657 case TRUNC_DIV_EXPR:
658 case ROUND_DIV_EXPR:
659 case EXACT_DIV_EXPR:
660 if (TYPE_UNSIGNED (type))
661 break;
662 /* In general we can't negate A in A / B, because if A is INT_MIN and
663 B is not 1 we change the sign of the result. */
664 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
665 && negate_expr_p (TREE_OPERAND (t, 0)))
666 return fold_build2_loc (loc, TREE_CODE (t), type,
667 negate_expr (TREE_OPERAND (t, 0)),
668 TREE_OPERAND (t, 1));
669 /* In general we can't negate B in A / B, because if A is INT_MIN and
670 B is 1, we may turn this into INT_MIN / -1 which is undefined
671 and actually traps on some architectures. */
672 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
673 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
674 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
675 && ! integer_onep (TREE_OPERAND (t, 1))))
676 && negate_expr_p (TREE_OPERAND (t, 1)))
677 return fold_build2_loc (loc, TREE_CODE (t), type,
678 TREE_OPERAND (t, 0),
679 negate_expr (TREE_OPERAND (t, 1)));
680 break;
682 case NOP_EXPR:
683 /* Convert -((double)float) into (double)(-float). */
684 if (TREE_CODE (type) == REAL_TYPE)
686 tem = strip_float_extensions (t);
687 if (tem != t && negate_expr_p (tem))
688 return fold_convert_loc (loc, type, negate_expr (tem));
690 break;
692 case CALL_EXPR:
693 /* Negate -f(x) as f(-x). */
694 if (negate_mathfn_p (get_call_combined_fn (t))
695 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
697 tree fndecl, arg;
699 fndecl = get_callee_fndecl (t);
700 arg = negate_expr (CALL_EXPR_ARG (t, 0));
701 return build_call_expr_loc (loc, fndecl, 1, arg);
703 break;
705 case RSHIFT_EXPR:
706 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
707 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
709 tree op1 = TREE_OPERAND (t, 1);
710 if (wi::to_wide (op1) == element_precision (type) - 1)
712 tree ntype = TYPE_UNSIGNED (type)
713 ? signed_type_for (type)
714 : unsigned_type_for (type);
715 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
716 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
717 return fold_convert_loc (loc, type, temp);
720 break;
722 default:
723 break;
726 return NULL_TREE;
729 /* A wrapper for fold_negate_expr_1. */
731 static tree
732 fold_negate_expr (location_t loc, tree t)
734 tree type = TREE_TYPE (t);
735 STRIP_SIGN_NOPS (t);
736 tree tem = fold_negate_expr_1 (loc, t);
737 if (tem == NULL_TREE)
738 return NULL_TREE;
739 return fold_convert_loc (loc, type, tem);
742 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
743 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
744 return NULL_TREE. */
746 static tree
747 negate_expr (tree t)
749 tree type, tem;
750 location_t loc;
752 if (t == NULL_TREE)
753 return NULL_TREE;
755 loc = EXPR_LOCATION (t);
756 type = TREE_TYPE (t);
757 STRIP_SIGN_NOPS (t);
759 tem = fold_negate_expr (loc, t);
760 if (!tem)
761 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
762 return fold_convert_loc (loc, type, tem);
765 /* Split a tree IN into a constant, literal and variable parts that could be
766 combined with CODE to make IN. "constant" means an expression with
767 TREE_CONSTANT but that isn't an actual constant. CODE must be a
768 commutative arithmetic operation. Store the constant part into *CONP,
769 the literal in *LITP and return the variable part. If a part isn't
770 present, set it to null. If the tree does not decompose in this way,
771 return the entire tree as the variable part and the other parts as null.
773 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
774 case, we negate an operand that was subtracted. Except if it is a
775 literal for which we use *MINUS_LITP instead.
777 If NEGATE_P is true, we are negating all of IN, again except a literal
778 for which we use *MINUS_LITP instead. If a variable part is of pointer
779 type, it is negated after converting to TYPE. This prevents us from
780 generating illegal MINUS pointer expression. LOC is the location of
781 the converted variable part.
783 If IN is itself a literal or constant, return it as appropriate.
785 Note that we do not guarantee that any of the three values will be the
786 same type as IN, but they will have the same signedness and mode. */
788 static tree
789 split_tree (tree in, tree type, enum tree_code code,
790 tree *minus_varp, tree *conp, tree *minus_conp,
791 tree *litp, tree *minus_litp, int negate_p)
793 tree var = 0;
794 *minus_varp = 0;
795 *conp = 0;
796 *minus_conp = 0;
797 *litp = 0;
798 *minus_litp = 0;
800 /* Strip any conversions that don't change the machine mode or signedness. */
801 STRIP_SIGN_NOPS (in);
803 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
804 || TREE_CODE (in) == FIXED_CST)
805 *litp = in;
806 else if (TREE_CODE (in) == code
807 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
808 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
809 /* We can associate addition and subtraction together (even
810 though the C standard doesn't say so) for integers because
811 the value is not affected. For reals, the value might be
812 affected, so we can't. */
813 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
814 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
815 || (code == MINUS_EXPR
816 && (TREE_CODE (in) == PLUS_EXPR
817 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
819 tree op0 = TREE_OPERAND (in, 0);
820 tree op1 = TREE_OPERAND (in, 1);
821 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
822 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
824 /* First see if either of the operands is a literal, then a constant. */
825 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
826 || TREE_CODE (op0) == FIXED_CST)
827 *litp = op0, op0 = 0;
828 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
829 || TREE_CODE (op1) == FIXED_CST)
830 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
832 if (op0 != 0 && TREE_CONSTANT (op0))
833 *conp = op0, op0 = 0;
834 else if (op1 != 0 && TREE_CONSTANT (op1))
835 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
837 /* If we haven't dealt with either operand, this is not a case we can
838 decompose. Otherwise, VAR is either of the ones remaining, if any. */
839 if (op0 != 0 && op1 != 0)
840 var = in;
841 else if (op0 != 0)
842 var = op0;
843 else
844 var = op1, neg_var_p = neg1_p;
846 /* Now do any needed negations. */
847 if (neg_litp_p)
848 *minus_litp = *litp, *litp = 0;
849 if (neg_conp_p && *conp)
850 *minus_conp = *conp, *conp = 0;
851 if (neg_var_p && var)
852 *minus_varp = var, var = 0;
854 else if (TREE_CONSTANT (in))
855 *conp = in;
856 else if (TREE_CODE (in) == BIT_NOT_EXPR
857 && code == PLUS_EXPR)
859 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
860 when IN is constant. */
861 *litp = build_minus_one_cst (type);
862 *minus_varp = TREE_OPERAND (in, 0);
864 else
865 var = in;
867 if (negate_p)
869 if (*litp)
870 *minus_litp = *litp, *litp = 0;
871 else if (*minus_litp)
872 *litp = *minus_litp, *minus_litp = 0;
873 if (*conp)
874 *minus_conp = *conp, *conp = 0;
875 else if (*minus_conp)
876 *conp = *minus_conp, *minus_conp = 0;
877 if (var)
878 *minus_varp = var, var = 0;
879 else if (*minus_varp)
880 var = *minus_varp, *minus_varp = 0;
883 if (*litp
884 && TREE_OVERFLOW_P (*litp))
885 *litp = drop_tree_overflow (*litp);
886 if (*minus_litp
887 && TREE_OVERFLOW_P (*minus_litp))
888 *minus_litp = drop_tree_overflow (*minus_litp);
890 return var;
893 /* Re-associate trees split by the above function. T1 and T2 are
894 either expressions to associate or null. Return the new
895 expression, if any. LOC is the location of the new expression. If
896 we build an operation, do it in TYPE and with CODE. */
898 static tree
899 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
901 if (t1 == 0)
903 gcc_assert (t2 == 0 || code != MINUS_EXPR);
904 return t2;
906 else if (t2 == 0)
907 return t1;
909 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
910 try to fold this since we will have infinite recursion. But do
911 deal with any NEGATE_EXPRs. */
912 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
913 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
914 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
916 if (code == PLUS_EXPR)
918 if (TREE_CODE (t1) == NEGATE_EXPR)
919 return build2_loc (loc, MINUS_EXPR, type,
920 fold_convert_loc (loc, type, t2),
921 fold_convert_loc (loc, type,
922 TREE_OPERAND (t1, 0)));
923 else if (TREE_CODE (t2) == NEGATE_EXPR)
924 return build2_loc (loc, MINUS_EXPR, type,
925 fold_convert_loc (loc, type, t1),
926 fold_convert_loc (loc, type,
927 TREE_OPERAND (t2, 0)));
928 else if (integer_zerop (t2))
929 return fold_convert_loc (loc, type, t1);
931 else if (code == MINUS_EXPR)
933 if (integer_zerop (t2))
934 return fold_convert_loc (loc, type, t1);
937 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
938 fold_convert_loc (loc, type, t2));
941 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
942 fold_convert_loc (loc, type, t2));
945 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
946 for use in int_const_binop, size_binop and size_diffop. */
948 static bool
949 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
951 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
952 return false;
953 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
954 return false;
956 switch (code)
958 case LSHIFT_EXPR:
959 case RSHIFT_EXPR:
960 case LROTATE_EXPR:
961 case RROTATE_EXPR:
962 return true;
964 default:
965 break;
968 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
969 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
970 && TYPE_MODE (type1) == TYPE_MODE (type2);
973 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
974 a new constant in RES. Return FALSE if we don't know how to
975 evaluate CODE at compile-time. */
977 bool
978 wide_int_binop (wide_int &res,
979 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
980 signop sign, wi::overflow_type *overflow)
982 wide_int tmp;
983 *overflow = wi::OVF_NONE;
984 switch (code)
986 case BIT_IOR_EXPR:
987 res = wi::bit_or (arg1, arg2);
988 break;
990 case BIT_XOR_EXPR:
991 res = wi::bit_xor (arg1, arg2);
992 break;
994 case BIT_AND_EXPR:
995 res = wi::bit_and (arg1, arg2);
996 break;
998 case LSHIFT_EXPR:
999 if (wi::neg_p (arg2))
1000 return false;
1001 res = wi::lshift (arg1, arg2);
1002 break;
1004 case RSHIFT_EXPR:
1005 if (wi::neg_p (arg2))
1006 return false;
1007 /* It's unclear from the C standard whether shifts can overflow.
1008 The following code ignores overflow; perhaps a C standard
1009 interpretation ruling is needed. */
1010 res = wi::rshift (arg1, arg2, sign);
1011 break;
1013 case RROTATE_EXPR:
1014 case LROTATE_EXPR:
1015 if (wi::neg_p (arg2))
1017 tmp = -arg2;
1018 if (code == RROTATE_EXPR)
1019 code = LROTATE_EXPR;
1020 else
1021 code = RROTATE_EXPR;
1023 else
1024 tmp = arg2;
1026 if (code == RROTATE_EXPR)
1027 res = wi::rrotate (arg1, tmp);
1028 else
1029 res = wi::lrotate (arg1, tmp);
1030 break;
1032 case PLUS_EXPR:
1033 res = wi::add (arg1, arg2, sign, overflow);
1034 break;
1036 case MINUS_EXPR:
1037 res = wi::sub (arg1, arg2, sign, overflow);
1038 break;
1040 case MULT_EXPR:
1041 res = wi::mul (arg1, arg2, sign, overflow);
1042 break;
1044 case MULT_HIGHPART_EXPR:
1045 res = wi::mul_high (arg1, arg2, sign);
1046 break;
1048 case TRUNC_DIV_EXPR:
1049 case EXACT_DIV_EXPR:
1050 if (arg2 == 0)
1051 return false;
1052 res = wi::div_trunc (arg1, arg2, sign, overflow);
1053 break;
1055 case FLOOR_DIV_EXPR:
1056 if (arg2 == 0)
1057 return false;
1058 res = wi::div_floor (arg1, arg2, sign, overflow);
1059 break;
1061 case CEIL_DIV_EXPR:
1062 if (arg2 == 0)
1063 return false;
1064 res = wi::div_ceil (arg1, arg2, sign, overflow);
1065 break;
1067 case ROUND_DIV_EXPR:
1068 if (arg2 == 0)
1069 return false;
1070 res = wi::div_round (arg1, arg2, sign, overflow);
1071 break;
1073 case TRUNC_MOD_EXPR:
1074 if (arg2 == 0)
1075 return false;
1076 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1077 break;
1079 case FLOOR_MOD_EXPR:
1080 if (arg2 == 0)
1081 return false;
1082 res = wi::mod_floor (arg1, arg2, sign, overflow);
1083 break;
1085 case CEIL_MOD_EXPR:
1086 if (arg2 == 0)
1087 return false;
1088 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1089 break;
1091 case ROUND_MOD_EXPR:
1092 if (arg2 == 0)
1093 return false;
1094 res = wi::mod_round (arg1, arg2, sign, overflow);
1095 break;
1097 case MIN_EXPR:
1098 res = wi::min (arg1, arg2, sign);
1099 break;
1101 case MAX_EXPR:
1102 res = wi::max (arg1, arg2, sign);
1103 break;
1105 default:
1106 return false;
1108 return true;
1111 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1112 produce a new constant in RES. Return FALSE if we don't know how
1113 to evaluate CODE at compile-time. */
1115 static bool
1116 poly_int_binop (poly_wide_int &res, enum tree_code code,
1117 const_tree arg1, const_tree arg2,
1118 signop sign, wi::overflow_type *overflow)
1120 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1121 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1122 switch (code)
1124 case PLUS_EXPR:
1125 res = wi::add (wi::to_poly_wide (arg1),
1126 wi::to_poly_wide (arg2), sign, overflow);
1127 break;
1129 case MINUS_EXPR:
1130 res = wi::sub (wi::to_poly_wide (arg1),
1131 wi::to_poly_wide (arg2), sign, overflow);
1132 break;
1134 case MULT_EXPR:
1135 if (TREE_CODE (arg2) == INTEGER_CST)
1136 res = wi::mul (wi::to_poly_wide (arg1),
1137 wi::to_wide (arg2), sign, overflow);
1138 else if (TREE_CODE (arg1) == INTEGER_CST)
1139 res = wi::mul (wi::to_poly_wide (arg2),
1140 wi::to_wide (arg1), sign, overflow);
1141 else
1142 return NULL_TREE;
1143 break;
1145 case LSHIFT_EXPR:
1146 if (TREE_CODE (arg2) == INTEGER_CST)
1147 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1148 else
1149 return false;
1150 break;
1152 case BIT_IOR_EXPR:
1153 if (TREE_CODE (arg2) != INTEGER_CST
1154 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1155 &res))
1156 return false;
1157 break;
1159 default:
1160 return false;
1162 return true;
1165 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1166 produce a new constant. Return NULL_TREE if we don't know how to
1167 evaluate CODE at compile-time. */
1169 tree
1170 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1171 int overflowable)
1173 poly_wide_int poly_res;
1174 tree type = TREE_TYPE (arg1);
1175 signop sign = TYPE_SIGN (type);
1176 wi::overflow_type overflow = wi::OVF_NONE;
1178 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1180 wide_int warg1 = wi::to_wide (arg1), res;
1181 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1182 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1183 return NULL_TREE;
1184 poly_res = res;
1186 else if (!poly_int_tree_p (arg1)
1187 || !poly_int_tree_p (arg2)
1188 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1189 return NULL_TREE;
1190 return force_fit_type (type, poly_res, overflowable,
1191 (((sign == SIGNED || overflowable == -1)
1192 && overflow)
1193 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1196 /* Return true if binary operation OP distributes over addition in operand
1197 OPNO, with the other operand being held constant. OPNO counts from 1. */
1199 static bool
1200 distributes_over_addition_p (tree_code op, int opno)
1202 switch (op)
1204 case PLUS_EXPR:
1205 case MINUS_EXPR:
1206 case MULT_EXPR:
1207 return true;
1209 case LSHIFT_EXPR:
1210 return opno == 1;
1212 default:
1213 return false;
1217 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1218 constant. We assume ARG1 and ARG2 have the same data type, or at least
1219 are the same kind of constant and the same machine mode. Return zero if
1220 combining the constants is not allowed in the current operating mode. */
1222 static tree
1223 const_binop (enum tree_code code, tree arg1, tree arg2)
1225 /* Sanity check for the recursive cases. */
1226 if (!arg1 || !arg2)
1227 return NULL_TREE;
1229 STRIP_NOPS (arg1);
1230 STRIP_NOPS (arg2);
1232 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1234 if (code == POINTER_PLUS_EXPR)
1235 return int_const_binop (PLUS_EXPR,
1236 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1238 return int_const_binop (code, arg1, arg2);
1241 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1243 machine_mode mode;
1244 REAL_VALUE_TYPE d1;
1245 REAL_VALUE_TYPE d2;
1246 REAL_VALUE_TYPE value;
1247 REAL_VALUE_TYPE result;
1248 bool inexact;
1249 tree t, type;
1251 /* The following codes are handled by real_arithmetic. */
1252 switch (code)
1254 case PLUS_EXPR:
1255 case MINUS_EXPR:
1256 case MULT_EXPR:
1257 case RDIV_EXPR:
1258 case MIN_EXPR:
1259 case MAX_EXPR:
1260 break;
1262 default:
1263 return NULL_TREE;
1266 d1 = TREE_REAL_CST (arg1);
1267 d2 = TREE_REAL_CST (arg2);
1269 type = TREE_TYPE (arg1);
1270 mode = TYPE_MODE (type);
1272 /* Don't perform operation if we honor signaling NaNs and
1273 either operand is a signaling NaN. */
1274 if (HONOR_SNANS (mode)
1275 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1276 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1277 return NULL_TREE;
1279 /* Don't perform operation if it would raise a division
1280 by zero exception. */
1281 if (code == RDIV_EXPR
1282 && real_equal (&d2, &dconst0)
1283 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1284 return NULL_TREE;
1286 /* If either operand is a NaN, just return it. Otherwise, set up
1287 for floating-point trap; we return an overflow. */
1288 if (REAL_VALUE_ISNAN (d1))
1290 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1291 is off. */
1292 d1.signalling = 0;
1293 t = build_real (type, d1);
1294 return t;
1296 else if (REAL_VALUE_ISNAN (d2))
1298 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1299 is off. */
1300 d2.signalling = 0;
1301 t = build_real (type, d2);
1302 return t;
1305 inexact = real_arithmetic (&value, code, &d1, &d2);
1306 real_convert (&result, mode, &value);
1308 /* Don't constant fold this floating point operation if
1309 the result has overflowed and flag_trapping_math. */
1310 if (flag_trapping_math
1311 && MODE_HAS_INFINITIES (mode)
1312 && REAL_VALUE_ISINF (result)
1313 && !REAL_VALUE_ISINF (d1)
1314 && !REAL_VALUE_ISINF (d2))
1315 return NULL_TREE;
1317 /* Don't constant fold this floating point operation if the
1318 result may dependent upon the run-time rounding mode and
1319 flag_rounding_math is set, or if GCC's software emulation
1320 is unable to accurately represent the result. */
1321 if ((flag_rounding_math
1322 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1323 && (inexact || !real_identical (&result, &value)))
1324 return NULL_TREE;
1326 t = build_real (type, result);
1328 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1329 return t;
1332 if (TREE_CODE (arg1) == FIXED_CST)
1334 FIXED_VALUE_TYPE f1;
1335 FIXED_VALUE_TYPE f2;
1336 FIXED_VALUE_TYPE result;
1337 tree t, type;
1338 int sat_p;
1339 bool overflow_p;
1341 /* The following codes are handled by fixed_arithmetic. */
1342 switch (code)
1344 case PLUS_EXPR:
1345 case MINUS_EXPR:
1346 case MULT_EXPR:
1347 case TRUNC_DIV_EXPR:
1348 if (TREE_CODE (arg2) != FIXED_CST)
1349 return NULL_TREE;
1350 f2 = TREE_FIXED_CST (arg2);
1351 break;
1353 case LSHIFT_EXPR:
1354 case RSHIFT_EXPR:
1356 if (TREE_CODE (arg2) != INTEGER_CST)
1357 return NULL_TREE;
1358 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1359 f2.data.high = w2.elt (1);
1360 f2.data.low = w2.ulow ();
1361 f2.mode = SImode;
1363 break;
1365 default:
1366 return NULL_TREE;
1369 f1 = TREE_FIXED_CST (arg1);
1370 type = TREE_TYPE (arg1);
1371 sat_p = TYPE_SATURATING (type);
1372 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1373 t = build_fixed (type, result);
1374 /* Propagate overflow flags. */
1375 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1376 TREE_OVERFLOW (t) = 1;
1377 return t;
1380 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1382 tree type = TREE_TYPE (arg1);
1383 tree r1 = TREE_REALPART (arg1);
1384 tree i1 = TREE_IMAGPART (arg1);
1385 tree r2 = TREE_REALPART (arg2);
1386 tree i2 = TREE_IMAGPART (arg2);
1387 tree real, imag;
1389 switch (code)
1391 case PLUS_EXPR:
1392 case MINUS_EXPR:
1393 real = const_binop (code, r1, r2);
1394 imag = const_binop (code, i1, i2);
1395 break;
1397 case MULT_EXPR:
1398 if (COMPLEX_FLOAT_TYPE_P (type))
1399 return do_mpc_arg2 (arg1, arg2, type,
1400 /* do_nonfinite= */ folding_initializer,
1401 mpc_mul);
1403 real = const_binop (MINUS_EXPR,
1404 const_binop (MULT_EXPR, r1, r2),
1405 const_binop (MULT_EXPR, i1, i2));
1406 imag = const_binop (PLUS_EXPR,
1407 const_binop (MULT_EXPR, r1, i2),
1408 const_binop (MULT_EXPR, i1, r2));
1409 break;
1411 case RDIV_EXPR:
1412 if (COMPLEX_FLOAT_TYPE_P (type))
1413 return do_mpc_arg2 (arg1, arg2, type,
1414 /* do_nonfinite= */ folding_initializer,
1415 mpc_div);
1416 /* Fallthru. */
1417 case TRUNC_DIV_EXPR:
1418 case CEIL_DIV_EXPR:
1419 case FLOOR_DIV_EXPR:
1420 case ROUND_DIV_EXPR:
1421 if (flag_complex_method == 0)
1423 /* Keep this algorithm in sync with
1424 tree-complex.c:expand_complex_div_straight().
1426 Expand complex division to scalars, straightforward algorithm.
1427 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1428 t = br*br + bi*bi
1430 tree magsquared
1431 = const_binop (PLUS_EXPR,
1432 const_binop (MULT_EXPR, r2, r2),
1433 const_binop (MULT_EXPR, i2, i2));
1434 tree t1
1435 = const_binop (PLUS_EXPR,
1436 const_binop (MULT_EXPR, r1, r2),
1437 const_binop (MULT_EXPR, i1, i2));
1438 tree t2
1439 = const_binop (MINUS_EXPR,
1440 const_binop (MULT_EXPR, i1, r2),
1441 const_binop (MULT_EXPR, r1, i2));
1443 real = const_binop (code, t1, magsquared);
1444 imag = const_binop (code, t2, magsquared);
1446 else
1448 /* Keep this algorithm in sync with
1449 tree-complex.c:expand_complex_div_wide().
1451 Expand complex division to scalars, modified algorithm to minimize
1452 overflow with wide input ranges. */
1453 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1454 fold_abs_const (r2, TREE_TYPE (type)),
1455 fold_abs_const (i2, TREE_TYPE (type)));
1457 if (integer_nonzerop (compare))
1459 /* In the TRUE branch, we compute
1460 ratio = br/bi;
1461 div = (br * ratio) + bi;
1462 tr = (ar * ratio) + ai;
1463 ti = (ai * ratio) - ar;
1464 tr = tr / div;
1465 ti = ti / div; */
1466 tree ratio = const_binop (code, r2, i2);
1467 tree div = const_binop (PLUS_EXPR, i2,
1468 const_binop (MULT_EXPR, r2, ratio));
1469 real = const_binop (MULT_EXPR, r1, ratio);
1470 real = const_binop (PLUS_EXPR, real, i1);
1471 real = const_binop (code, real, div);
1473 imag = const_binop (MULT_EXPR, i1, ratio);
1474 imag = const_binop (MINUS_EXPR, imag, r1);
1475 imag = const_binop (code, imag, div);
1477 else
1479 /* In the FALSE branch, we compute
1480 ratio = d/c;
1481 divisor = (d * ratio) + c;
1482 tr = (b * ratio) + a;
1483 ti = b - (a * ratio);
1484 tr = tr / div;
1485 ti = ti / div; */
1486 tree ratio = const_binop (code, i2, r2);
1487 tree div = const_binop (PLUS_EXPR, r2,
1488 const_binop (MULT_EXPR, i2, ratio));
1490 real = const_binop (MULT_EXPR, i1, ratio);
1491 real = const_binop (PLUS_EXPR, real, r1);
1492 real = const_binop (code, real, div);
1494 imag = const_binop (MULT_EXPR, r1, ratio);
1495 imag = const_binop (MINUS_EXPR, i1, imag);
1496 imag = const_binop (code, imag, div);
1499 break;
1501 default:
1502 return NULL_TREE;
1505 if (real && imag)
1506 return build_complex (type, real, imag);
1509 if (TREE_CODE (arg1) == VECTOR_CST
1510 && TREE_CODE (arg2) == VECTOR_CST
1511 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1512 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1514 tree type = TREE_TYPE (arg1);
1515 bool step_ok_p;
1516 if (VECTOR_CST_STEPPED_P (arg1)
1517 && VECTOR_CST_STEPPED_P (arg2))
1518 /* We can operate directly on the encoding if:
1520 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1521 implies
1522 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1524 Addition and subtraction are the supported operators
1525 for which this is true. */
1526 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1527 else if (VECTOR_CST_STEPPED_P (arg1))
1528 /* We can operate directly on stepped encodings if:
1530 a3 - a2 == a2 - a1
1531 implies:
1532 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1534 which is true if (x -> x op c) distributes over addition. */
1535 step_ok_p = distributes_over_addition_p (code, 1);
1536 else
1537 /* Similarly in reverse. */
1538 step_ok_p = distributes_over_addition_p (code, 2);
1539 tree_vector_builder elts;
1540 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1541 return NULL_TREE;
1542 unsigned int count = elts.encoded_nelts ();
1543 for (unsigned int i = 0; i < count; ++i)
1545 tree elem1 = VECTOR_CST_ELT (arg1, i);
1546 tree elem2 = VECTOR_CST_ELT (arg2, i);
1548 tree elt = const_binop (code, elem1, elem2);
1550 /* It is possible that const_binop cannot handle the given
1551 code and return NULL_TREE */
1552 if (elt == NULL_TREE)
1553 return NULL_TREE;
1554 elts.quick_push (elt);
1557 return elts.build ();
1560 /* Shifts allow a scalar offset for a vector. */
1561 if (TREE_CODE (arg1) == VECTOR_CST
1562 && TREE_CODE (arg2) == INTEGER_CST)
1564 tree type = TREE_TYPE (arg1);
1565 bool step_ok_p = distributes_over_addition_p (code, 1);
1566 tree_vector_builder elts;
1567 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1568 return NULL_TREE;
1569 unsigned int count = elts.encoded_nelts ();
1570 for (unsigned int i = 0; i < count; ++i)
1572 tree elem1 = VECTOR_CST_ELT (arg1, i);
1574 tree elt = const_binop (code, elem1, arg2);
1576 /* It is possible that const_binop cannot handle the given
1577 code and return NULL_TREE. */
1578 if (elt == NULL_TREE)
1579 return NULL_TREE;
1580 elts.quick_push (elt);
1583 return elts.build ();
1585 return NULL_TREE;
1588 /* Overload that adds a TYPE parameter to be able to dispatch
1589 to fold_relational_const. */
1591 tree
1592 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1594 if (TREE_CODE_CLASS (code) == tcc_comparison)
1595 return fold_relational_const (code, type, arg1, arg2);
1597 /* ??? Until we make the const_binop worker take the type of the
1598 result as argument put those cases that need it here. */
1599 switch (code)
1601 case VEC_SERIES_EXPR:
1602 if (CONSTANT_CLASS_P (arg1)
1603 && CONSTANT_CLASS_P (arg2))
1604 return build_vec_series (type, arg1, arg2);
1605 return NULL_TREE;
1607 case COMPLEX_EXPR:
1608 if ((TREE_CODE (arg1) == REAL_CST
1609 && TREE_CODE (arg2) == REAL_CST)
1610 || (TREE_CODE (arg1) == INTEGER_CST
1611 && TREE_CODE (arg2) == INTEGER_CST))
1612 return build_complex (type, arg1, arg2);
1613 return NULL_TREE;
1615 case POINTER_DIFF_EXPR:
1616 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1618 poly_offset_int res = (wi::to_poly_offset (arg1)
1619 - wi::to_poly_offset (arg2));
1620 return force_fit_type (type, res, 1,
1621 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1623 return NULL_TREE;
1625 case VEC_PACK_TRUNC_EXPR:
1626 case VEC_PACK_FIX_TRUNC_EXPR:
1627 case VEC_PACK_FLOAT_EXPR:
1629 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1631 if (TREE_CODE (arg1) != VECTOR_CST
1632 || TREE_CODE (arg2) != VECTOR_CST)
1633 return NULL_TREE;
1635 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1636 return NULL_TREE;
1638 out_nelts = in_nelts * 2;
1639 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1640 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1642 tree_vector_builder elts (type, out_nelts, 1);
1643 for (i = 0; i < out_nelts; i++)
1645 tree elt = (i < in_nelts
1646 ? VECTOR_CST_ELT (arg1, i)
1647 : VECTOR_CST_ELT (arg2, i - in_nelts));
1648 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1649 ? NOP_EXPR
1650 : code == VEC_PACK_FLOAT_EXPR
1651 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1652 TREE_TYPE (type), elt);
1653 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1654 return NULL_TREE;
1655 elts.quick_push (elt);
1658 return elts.build ();
1661 case VEC_WIDEN_MULT_LO_EXPR:
1662 case VEC_WIDEN_MULT_HI_EXPR:
1663 case VEC_WIDEN_MULT_EVEN_EXPR:
1664 case VEC_WIDEN_MULT_ODD_EXPR:
1666 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1668 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1669 return NULL_TREE;
1671 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1672 return NULL_TREE;
1673 out_nelts = in_nelts / 2;
1674 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1675 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1677 if (code == VEC_WIDEN_MULT_LO_EXPR)
1678 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1679 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1680 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1681 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1682 scale = 1, ofs = 0;
1683 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1684 scale = 1, ofs = 1;
1686 tree_vector_builder elts (type, out_nelts, 1);
1687 for (out = 0; out < out_nelts; out++)
1689 unsigned int in = (out << scale) + ofs;
1690 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1691 VECTOR_CST_ELT (arg1, in));
1692 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1693 VECTOR_CST_ELT (arg2, in));
1695 if (t1 == NULL_TREE || t2 == NULL_TREE)
1696 return NULL_TREE;
1697 tree elt = const_binop (MULT_EXPR, t1, t2);
1698 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1699 return NULL_TREE;
1700 elts.quick_push (elt);
1703 return elts.build ();
1706 default:;
1709 if (TREE_CODE_CLASS (code) != tcc_binary)
1710 return NULL_TREE;
1712 /* Make sure type and arg0 have the same saturating flag. */
1713 gcc_checking_assert (TYPE_SATURATING (type)
1714 == TYPE_SATURATING (TREE_TYPE (arg1)));
1716 return const_binop (code, arg1, arg2);
1719 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1720 Return zero if computing the constants is not possible. */
1722 tree
1723 const_unop (enum tree_code code, tree type, tree arg0)
1725 /* Don't perform the operation, other than NEGATE and ABS, if
1726 flag_signaling_nans is on and the operand is a signaling NaN. */
1727 if (TREE_CODE (arg0) == REAL_CST
1728 && HONOR_SNANS (arg0)
1729 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1730 && code != NEGATE_EXPR
1731 && code != ABS_EXPR
1732 && code != ABSU_EXPR)
1733 return NULL_TREE;
1735 switch (code)
1737 CASE_CONVERT:
1738 case FLOAT_EXPR:
1739 case FIX_TRUNC_EXPR:
1740 case FIXED_CONVERT_EXPR:
1741 return fold_convert_const (code, type, arg0);
1743 case ADDR_SPACE_CONVERT_EXPR:
1744 /* If the source address is 0, and the source address space
1745 cannot have a valid object at 0, fold to dest type null. */
1746 if (integer_zerop (arg0)
1747 && !(targetm.addr_space.zero_address_valid
1748 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1749 return fold_convert_const (code, type, arg0);
1750 break;
1752 case VIEW_CONVERT_EXPR:
1753 return fold_view_convert_expr (type, arg0);
1755 case NEGATE_EXPR:
1757 /* Can't call fold_negate_const directly here as that doesn't
1758 handle all cases and we might not be able to negate some
1759 constants. */
1760 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1761 if (tem && CONSTANT_CLASS_P (tem))
1762 return tem;
1763 break;
1766 case ABS_EXPR:
1767 case ABSU_EXPR:
1768 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1769 return fold_abs_const (arg0, type);
1770 break;
1772 case CONJ_EXPR:
1773 if (TREE_CODE (arg0) == COMPLEX_CST)
1775 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1776 TREE_TYPE (type));
1777 return build_complex (type, TREE_REALPART (arg0), ipart);
1779 break;
1781 case BIT_NOT_EXPR:
1782 if (TREE_CODE (arg0) == INTEGER_CST)
1783 return fold_not_const (arg0, type);
1784 else if (POLY_INT_CST_P (arg0))
1785 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1786 /* Perform BIT_NOT_EXPR on each element individually. */
1787 else if (TREE_CODE (arg0) == VECTOR_CST)
1789 tree elem;
1791 /* This can cope with stepped encodings because ~x == -1 - x. */
1792 tree_vector_builder elements;
1793 elements.new_unary_operation (type, arg0, true);
1794 unsigned int i, count = elements.encoded_nelts ();
1795 for (i = 0; i < count; ++i)
1797 elem = VECTOR_CST_ELT (arg0, i);
1798 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1799 if (elem == NULL_TREE)
1800 break;
1801 elements.quick_push (elem);
1803 if (i == count)
1804 return elements.build ();
1806 break;
1808 case TRUTH_NOT_EXPR:
1809 if (TREE_CODE (arg0) == INTEGER_CST)
1810 return constant_boolean_node (integer_zerop (arg0), type);
1811 break;
1813 case REALPART_EXPR:
1814 if (TREE_CODE (arg0) == COMPLEX_CST)
1815 return fold_convert (type, TREE_REALPART (arg0));
1816 break;
1818 case IMAGPART_EXPR:
1819 if (TREE_CODE (arg0) == COMPLEX_CST)
1820 return fold_convert (type, TREE_IMAGPART (arg0));
1821 break;
1823 case VEC_UNPACK_LO_EXPR:
1824 case VEC_UNPACK_HI_EXPR:
1825 case VEC_UNPACK_FLOAT_LO_EXPR:
1826 case VEC_UNPACK_FLOAT_HI_EXPR:
1827 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1828 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1830 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1831 enum tree_code subcode;
1833 if (TREE_CODE (arg0) != VECTOR_CST)
1834 return NULL_TREE;
1836 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1837 return NULL_TREE;
1838 out_nelts = in_nelts / 2;
1839 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1841 unsigned int offset = 0;
1842 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1843 || code == VEC_UNPACK_FLOAT_LO_EXPR
1844 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1845 offset = out_nelts;
1847 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1848 subcode = NOP_EXPR;
1849 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1850 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1851 subcode = FLOAT_EXPR;
1852 else
1853 subcode = FIX_TRUNC_EXPR;
1855 tree_vector_builder elts (type, out_nelts, 1);
1856 for (i = 0; i < out_nelts; i++)
1858 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1859 VECTOR_CST_ELT (arg0, i + offset));
1860 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1861 return NULL_TREE;
1862 elts.quick_push (elt);
1865 return elts.build ();
1868 case VEC_DUPLICATE_EXPR:
1869 if (CONSTANT_CLASS_P (arg0))
1870 return build_vector_from_val (type, arg0);
1871 return NULL_TREE;
1873 default:
1874 break;
1877 return NULL_TREE;
1880 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1881 indicates which particular sizetype to create. */
1883 tree
1884 size_int_kind (poly_int64 number, enum size_type_kind kind)
1886 return build_int_cst (sizetype_tab[(int) kind], number);
1889 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1890 is a tree code. The type of the result is taken from the operands.
1891 Both must be equivalent integer types, ala int_binop_types_match_p.
1892 If the operands are constant, so is the result. */
1894 tree
1895 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1897 tree type = TREE_TYPE (arg0);
1899 if (arg0 == error_mark_node || arg1 == error_mark_node)
1900 return error_mark_node;
1902 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1903 TREE_TYPE (arg1)));
1905 /* Handle the special case of two poly_int constants faster. */
1906 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1908 /* And some specific cases even faster than that. */
1909 if (code == PLUS_EXPR)
1911 if (integer_zerop (arg0)
1912 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1913 return arg1;
1914 if (integer_zerop (arg1)
1915 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1916 return arg0;
1918 else if (code == MINUS_EXPR)
1920 if (integer_zerop (arg1)
1921 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1922 return arg0;
1924 else if (code == MULT_EXPR)
1926 if (integer_onep (arg0)
1927 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1928 return arg1;
1931 /* Handle general case of two integer constants. For sizetype
1932 constant calculations we always want to know about overflow,
1933 even in the unsigned case. */
1934 tree res = int_const_binop (code, arg0, arg1, -1);
1935 if (res != NULL_TREE)
1936 return res;
1939 return fold_build2_loc (loc, code, type, arg0, arg1);
1942 /* Given two values, either both of sizetype or both of bitsizetype,
1943 compute the difference between the two values. Return the value
1944 in signed type corresponding to the type of the operands. */
1946 tree
1947 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1949 tree type = TREE_TYPE (arg0);
1950 tree ctype;
1952 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1953 TREE_TYPE (arg1)));
1955 /* If the type is already signed, just do the simple thing. */
1956 if (!TYPE_UNSIGNED (type))
1957 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1959 if (type == sizetype)
1960 ctype = ssizetype;
1961 else if (type == bitsizetype)
1962 ctype = sbitsizetype;
1963 else
1964 ctype = signed_type_for (type);
1966 /* If either operand is not a constant, do the conversions to the signed
1967 type and subtract. The hardware will do the right thing with any
1968 overflow in the subtraction. */
1969 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1970 return size_binop_loc (loc, MINUS_EXPR,
1971 fold_convert_loc (loc, ctype, arg0),
1972 fold_convert_loc (loc, ctype, arg1));
1974 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1975 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1976 overflow) and negate (which can't either). Special-case a result
1977 of zero while we're here. */
1978 if (tree_int_cst_equal (arg0, arg1))
1979 return build_int_cst (ctype, 0);
1980 else if (tree_int_cst_lt (arg1, arg0))
1981 return fold_convert_loc (loc, ctype,
1982 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1983 else
1984 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1985 fold_convert_loc (loc, ctype,
1986 size_binop_loc (loc,
1987 MINUS_EXPR,
1988 arg1, arg0)));
1991 /* A subroutine of fold_convert_const handling conversions of an
1992 INTEGER_CST to another integer type. */
1994 static tree
1995 fold_convert_const_int_from_int (tree type, const_tree arg1)
1997 /* Given an integer constant, make new constant with new type,
1998 appropriately sign-extended or truncated. Use widest_int
1999 so that any extension is done according ARG1's type. */
2000 return force_fit_type (type, wi::to_widest (arg1),
2001 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2002 TREE_OVERFLOW (arg1));
2005 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2006 to an integer type. */
2008 static tree
2009 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2011 bool overflow = false;
2012 tree t;
2014 /* The following code implements the floating point to integer
2015 conversion rules required by the Java Language Specification,
2016 that IEEE NaNs are mapped to zero and values that overflow
2017 the target precision saturate, i.e. values greater than
2018 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2019 are mapped to INT_MIN. These semantics are allowed by the
2020 C and C++ standards that simply state that the behavior of
2021 FP-to-integer conversion is unspecified upon overflow. */
2023 wide_int val;
2024 REAL_VALUE_TYPE r;
2025 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2027 switch (code)
2029 case FIX_TRUNC_EXPR:
2030 real_trunc (&r, VOIDmode, &x);
2031 break;
2033 default:
2034 gcc_unreachable ();
2037 /* If R is NaN, return zero and show we have an overflow. */
2038 if (REAL_VALUE_ISNAN (r))
2040 overflow = true;
2041 val = wi::zero (TYPE_PRECISION (type));
2044 /* See if R is less than the lower bound or greater than the
2045 upper bound. */
2047 if (! overflow)
2049 tree lt = TYPE_MIN_VALUE (type);
2050 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2051 if (real_less (&r, &l))
2053 overflow = true;
2054 val = wi::to_wide (lt);
2058 if (! overflow)
2060 tree ut = TYPE_MAX_VALUE (type);
2061 if (ut)
2063 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2064 if (real_less (&u, &r))
2066 overflow = true;
2067 val = wi::to_wide (ut);
2072 if (! overflow)
2073 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2075 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2076 return t;
2079 /* A subroutine of fold_convert_const handling conversions of a
2080 FIXED_CST to an integer type. */
2082 static tree
2083 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2085 tree t;
2086 double_int temp, temp_trunc;
2087 scalar_mode mode;
2089 /* Right shift FIXED_CST to temp by fbit. */
2090 temp = TREE_FIXED_CST (arg1).data;
2091 mode = TREE_FIXED_CST (arg1).mode;
2092 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2094 temp = temp.rshift (GET_MODE_FBIT (mode),
2095 HOST_BITS_PER_DOUBLE_INT,
2096 SIGNED_FIXED_POINT_MODE_P (mode));
2098 /* Left shift temp to temp_trunc by fbit. */
2099 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2100 HOST_BITS_PER_DOUBLE_INT,
2101 SIGNED_FIXED_POINT_MODE_P (mode));
2103 else
2105 temp = double_int_zero;
2106 temp_trunc = double_int_zero;
2109 /* If FIXED_CST is negative, we need to round the value toward 0.
2110 By checking if the fractional bits are not zero to add 1 to temp. */
2111 if (SIGNED_FIXED_POINT_MODE_P (mode)
2112 && temp_trunc.is_negative ()
2113 && TREE_FIXED_CST (arg1).data != temp_trunc)
2114 temp += double_int_one;
2116 /* Given a fixed-point constant, make new constant with new type,
2117 appropriately sign-extended or truncated. */
2118 t = force_fit_type (type, temp, -1,
2119 (temp.is_negative ()
2120 && (TYPE_UNSIGNED (type)
2121 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2122 | TREE_OVERFLOW (arg1));
2124 return t;
2127 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2128 to another floating point type. */
2130 static tree
2131 fold_convert_const_real_from_real (tree type, const_tree arg1)
2133 REAL_VALUE_TYPE value;
2134 tree t;
2136 /* Don't perform the operation if flag_signaling_nans is on
2137 and the operand is a signaling NaN. */
2138 if (HONOR_SNANS (arg1)
2139 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2140 return NULL_TREE;
2142 /* With flag_rounding_math we should respect the current rounding mode
2143 unless the conversion is exact. */
2144 if (HONOR_SIGN_DEPENDENT_ROUNDING (arg1)
2145 && !exact_real_truncate (TYPE_MODE (type), &TREE_REAL_CST (arg1)))
2146 return NULL_TREE;
2148 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2149 t = build_real (type, value);
2151 /* If converting an infinity or NAN to a representation that doesn't
2152 have one, set the overflow bit so that we can produce some kind of
2153 error message at the appropriate point if necessary. It's not the
2154 most user-friendly message, but it's better than nothing. */
2155 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2156 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2157 TREE_OVERFLOW (t) = 1;
2158 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2159 && !MODE_HAS_NANS (TYPE_MODE (type)))
2160 TREE_OVERFLOW (t) = 1;
2161 /* Regular overflow, conversion produced an infinity in a mode that
2162 can't represent them. */
2163 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2164 && REAL_VALUE_ISINF (value)
2165 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2166 TREE_OVERFLOW (t) = 1;
2167 else
2168 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2169 return t;
2172 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2173 to a floating point type. */
2175 static tree
2176 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2178 REAL_VALUE_TYPE value;
2179 tree t;
2181 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2182 &TREE_FIXED_CST (arg1));
2183 t = build_real (type, value);
2185 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2186 return t;
2189 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2190 to another fixed-point type. */
2192 static tree
2193 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2195 FIXED_VALUE_TYPE value;
2196 tree t;
2197 bool overflow_p;
2199 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2200 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2201 t = build_fixed (type, value);
2203 /* Propagate overflow flags. */
2204 if (overflow_p | TREE_OVERFLOW (arg1))
2205 TREE_OVERFLOW (t) = 1;
2206 return t;
2209 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2210 to a fixed-point type. */
2212 static tree
2213 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2215 FIXED_VALUE_TYPE value;
2216 tree t;
2217 bool overflow_p;
2218 double_int di;
2220 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2222 di.low = TREE_INT_CST_ELT (arg1, 0);
2223 if (TREE_INT_CST_NUNITS (arg1) == 1)
2224 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2225 else
2226 di.high = TREE_INT_CST_ELT (arg1, 1);
2228 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2229 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2230 TYPE_SATURATING (type));
2231 t = build_fixed (type, value);
2233 /* Propagate overflow flags. */
2234 if (overflow_p | TREE_OVERFLOW (arg1))
2235 TREE_OVERFLOW (t) = 1;
2236 return t;
2239 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2240 to a fixed-point type. */
2242 static tree
2243 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2245 FIXED_VALUE_TYPE value;
2246 tree t;
2247 bool overflow_p;
2249 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2250 &TREE_REAL_CST (arg1),
2251 TYPE_SATURATING (type));
2252 t = build_fixed (type, value);
2254 /* Propagate overflow flags. */
2255 if (overflow_p | TREE_OVERFLOW (arg1))
2256 TREE_OVERFLOW (t) = 1;
2257 return t;
2260 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2261 type TYPE. If no simplification can be done return NULL_TREE. */
2263 static tree
2264 fold_convert_const (enum tree_code code, tree type, tree arg1)
2266 tree arg_type = TREE_TYPE (arg1);
2267 if (arg_type == type)
2268 return arg1;
2270 /* We can't widen types, since the runtime value could overflow the
2271 original type before being extended to the new type. */
2272 if (POLY_INT_CST_P (arg1)
2273 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2274 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2275 return build_poly_int_cst (type,
2276 poly_wide_int::from (poly_int_cst_value (arg1),
2277 TYPE_PRECISION (type),
2278 TYPE_SIGN (arg_type)));
2280 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2281 || TREE_CODE (type) == OFFSET_TYPE)
2283 if (TREE_CODE (arg1) == INTEGER_CST)
2284 return fold_convert_const_int_from_int (type, arg1);
2285 else if (TREE_CODE (arg1) == REAL_CST)
2286 return fold_convert_const_int_from_real (code, type, arg1);
2287 else if (TREE_CODE (arg1) == FIXED_CST)
2288 return fold_convert_const_int_from_fixed (type, arg1);
2290 else if (TREE_CODE (type) == REAL_TYPE)
2292 if (TREE_CODE (arg1) == INTEGER_CST)
2294 tree res = build_real_from_int_cst (type, arg1);
2295 /* Avoid the folding if flag_rounding_math is on and the
2296 conversion is not exact. */
2297 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
2299 bool fail = false;
2300 wide_int w = real_to_integer (&TREE_REAL_CST (res), &fail,
2301 TYPE_PRECISION (TREE_TYPE (arg1)));
2302 if (fail || wi::ne_p (w, wi::to_wide (arg1)))
2303 return NULL_TREE;
2305 return res;
2307 else if (TREE_CODE (arg1) == REAL_CST)
2308 return fold_convert_const_real_from_real (type, arg1);
2309 else if (TREE_CODE (arg1) == FIXED_CST)
2310 return fold_convert_const_real_from_fixed (type, arg1);
2312 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2314 if (TREE_CODE (arg1) == FIXED_CST)
2315 return fold_convert_const_fixed_from_fixed (type, arg1);
2316 else if (TREE_CODE (arg1) == INTEGER_CST)
2317 return fold_convert_const_fixed_from_int (type, arg1);
2318 else if (TREE_CODE (arg1) == REAL_CST)
2319 return fold_convert_const_fixed_from_real (type, arg1);
2321 else if (TREE_CODE (type) == VECTOR_TYPE)
2323 if (TREE_CODE (arg1) == VECTOR_CST
2324 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2326 tree elttype = TREE_TYPE (type);
2327 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2328 /* We can't handle steps directly when extending, since the
2329 values need to wrap at the original precision first. */
2330 bool step_ok_p
2331 = (INTEGRAL_TYPE_P (elttype)
2332 && INTEGRAL_TYPE_P (arg1_elttype)
2333 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2334 tree_vector_builder v;
2335 if (!v.new_unary_operation (type, arg1, step_ok_p))
2336 return NULL_TREE;
2337 unsigned int len = v.encoded_nelts ();
2338 for (unsigned int i = 0; i < len; ++i)
2340 tree elt = VECTOR_CST_ELT (arg1, i);
2341 tree cvt = fold_convert_const (code, elttype, elt);
2342 if (cvt == NULL_TREE)
2343 return NULL_TREE;
2344 v.quick_push (cvt);
2346 return v.build ();
2349 return NULL_TREE;
2352 /* Construct a vector of zero elements of vector type TYPE. */
2354 static tree
2355 build_zero_vector (tree type)
2357 tree t;
2359 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2360 return build_vector_from_val (type, t);
2363 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2365 bool
2366 fold_convertible_p (const_tree type, const_tree arg)
2368 tree orig = TREE_TYPE (arg);
2370 if (type == orig)
2371 return true;
2373 if (TREE_CODE (arg) == ERROR_MARK
2374 || TREE_CODE (type) == ERROR_MARK
2375 || TREE_CODE (orig) == ERROR_MARK)
2376 return false;
2378 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2379 return true;
2381 switch (TREE_CODE (type))
2383 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2384 case POINTER_TYPE: case REFERENCE_TYPE:
2385 case OFFSET_TYPE:
2386 return (INTEGRAL_TYPE_P (orig)
2387 || (POINTER_TYPE_P (orig)
2388 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2389 || TREE_CODE (orig) == OFFSET_TYPE);
2391 case REAL_TYPE:
2392 case FIXED_POINT_TYPE:
2393 case VOID_TYPE:
2394 return TREE_CODE (type) == TREE_CODE (orig);
2396 case VECTOR_TYPE:
2397 return (VECTOR_TYPE_P (orig)
2398 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2399 TYPE_VECTOR_SUBPARTS (orig))
2400 && fold_convertible_p (TREE_TYPE (type), TREE_TYPE (orig)));
2402 default:
2403 return false;
2407 /* Convert expression ARG to type TYPE. Used by the middle-end for
2408 simple conversions in preference to calling the front-end's convert. */
2410 tree
2411 fold_convert_loc (location_t loc, tree type, tree arg)
2413 tree orig = TREE_TYPE (arg);
2414 tree tem;
2416 if (type == orig)
2417 return arg;
2419 if (TREE_CODE (arg) == ERROR_MARK
2420 || TREE_CODE (type) == ERROR_MARK
2421 || TREE_CODE (orig) == ERROR_MARK)
2422 return error_mark_node;
2424 switch (TREE_CODE (type))
2426 case POINTER_TYPE:
2427 case REFERENCE_TYPE:
2428 /* Handle conversions between pointers to different address spaces. */
2429 if (POINTER_TYPE_P (orig)
2430 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2431 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2432 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2433 /* fall through */
2435 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2436 case OFFSET_TYPE:
2437 if (TREE_CODE (arg) == INTEGER_CST)
2439 tem = fold_convert_const (NOP_EXPR, type, arg);
2440 if (tem != NULL_TREE)
2441 return tem;
2443 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2444 || TREE_CODE (orig) == OFFSET_TYPE)
2445 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2446 if (TREE_CODE (orig) == COMPLEX_TYPE)
2447 return fold_convert_loc (loc, type,
2448 fold_build1_loc (loc, REALPART_EXPR,
2449 TREE_TYPE (orig), arg));
2450 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2451 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2452 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2454 case REAL_TYPE:
2455 if (TREE_CODE (arg) == INTEGER_CST)
2457 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2458 if (tem != NULL_TREE)
2459 return tem;
2461 else if (TREE_CODE (arg) == REAL_CST)
2463 tem = fold_convert_const (NOP_EXPR, type, arg);
2464 if (tem != NULL_TREE)
2465 return tem;
2467 else if (TREE_CODE (arg) == FIXED_CST)
2469 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2470 if (tem != NULL_TREE)
2471 return tem;
2474 switch (TREE_CODE (orig))
2476 case INTEGER_TYPE:
2477 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2478 case POINTER_TYPE: case REFERENCE_TYPE:
2479 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2481 case REAL_TYPE:
2482 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2484 case FIXED_POINT_TYPE:
2485 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2487 case COMPLEX_TYPE:
2488 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2489 return fold_convert_loc (loc, type, tem);
2491 default:
2492 gcc_unreachable ();
2495 case FIXED_POINT_TYPE:
2496 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2497 || TREE_CODE (arg) == REAL_CST)
2499 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2500 if (tem != NULL_TREE)
2501 goto fold_convert_exit;
2504 switch (TREE_CODE (orig))
2506 case FIXED_POINT_TYPE:
2507 case INTEGER_TYPE:
2508 case ENUMERAL_TYPE:
2509 case BOOLEAN_TYPE:
2510 case REAL_TYPE:
2511 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2513 case COMPLEX_TYPE:
2514 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2515 return fold_convert_loc (loc, type, tem);
2517 default:
2518 gcc_unreachable ();
2521 case COMPLEX_TYPE:
2522 switch (TREE_CODE (orig))
2524 case INTEGER_TYPE:
2525 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2526 case POINTER_TYPE: case REFERENCE_TYPE:
2527 case REAL_TYPE:
2528 case FIXED_POINT_TYPE:
2529 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2530 fold_convert_loc (loc, TREE_TYPE (type), arg),
2531 fold_convert_loc (loc, TREE_TYPE (type),
2532 integer_zero_node));
2533 case COMPLEX_TYPE:
2535 tree rpart, ipart;
2537 if (TREE_CODE (arg) == COMPLEX_EXPR)
2539 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2540 TREE_OPERAND (arg, 0));
2541 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2542 TREE_OPERAND (arg, 1));
2543 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2546 arg = save_expr (arg);
2547 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2548 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2549 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2550 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2551 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2554 default:
2555 gcc_unreachable ();
2558 case VECTOR_TYPE:
2559 if (integer_zerop (arg))
2560 return build_zero_vector (type);
2561 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2562 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2563 || TREE_CODE (orig) == VECTOR_TYPE);
2564 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2566 case VOID_TYPE:
2567 tem = fold_ignored_result (arg);
2568 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2570 default:
2571 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2572 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2573 gcc_unreachable ();
2575 fold_convert_exit:
2576 protected_set_expr_location_unshare (tem, loc);
2577 return tem;
2580 /* Return false if expr can be assumed not to be an lvalue, true
2581 otherwise. */
2583 static bool
2584 maybe_lvalue_p (const_tree x)
2586 /* We only need to wrap lvalue tree codes. */
2587 switch (TREE_CODE (x))
2589 case VAR_DECL:
2590 case PARM_DECL:
2591 case RESULT_DECL:
2592 case LABEL_DECL:
2593 case FUNCTION_DECL:
2594 case SSA_NAME:
2596 case COMPONENT_REF:
2597 case MEM_REF:
2598 case INDIRECT_REF:
2599 case ARRAY_REF:
2600 case ARRAY_RANGE_REF:
2601 case BIT_FIELD_REF:
2602 case OBJ_TYPE_REF:
2604 case REALPART_EXPR:
2605 case IMAGPART_EXPR:
2606 case PREINCREMENT_EXPR:
2607 case PREDECREMENT_EXPR:
2608 case SAVE_EXPR:
2609 case TRY_CATCH_EXPR:
2610 case WITH_CLEANUP_EXPR:
2611 case COMPOUND_EXPR:
2612 case MODIFY_EXPR:
2613 case TARGET_EXPR:
2614 case COND_EXPR:
2615 case BIND_EXPR:
2616 case VIEW_CONVERT_EXPR:
2617 break;
2619 default:
2620 /* Assume the worst for front-end tree codes. */
2621 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2622 break;
2623 return false;
2626 return true;
2629 /* Return an expr equal to X but certainly not valid as an lvalue. */
2631 tree
2632 non_lvalue_loc (location_t loc, tree x)
2634 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2635 us. */
2636 if (in_gimple_form)
2637 return x;
2639 if (! maybe_lvalue_p (x))
2640 return x;
2641 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2644 /* Given a tree comparison code, return the code that is the logical inverse.
2645 It is generally not safe to do this for floating-point comparisons, except
2646 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2647 ERROR_MARK in this case. */
2649 enum tree_code
2650 invert_tree_comparison (enum tree_code code, bool honor_nans)
2652 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2653 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2654 return ERROR_MARK;
2656 switch (code)
2658 case EQ_EXPR:
2659 return NE_EXPR;
2660 case NE_EXPR:
2661 return EQ_EXPR;
2662 case GT_EXPR:
2663 return honor_nans ? UNLE_EXPR : LE_EXPR;
2664 case GE_EXPR:
2665 return honor_nans ? UNLT_EXPR : LT_EXPR;
2666 case LT_EXPR:
2667 return honor_nans ? UNGE_EXPR : GE_EXPR;
2668 case LE_EXPR:
2669 return honor_nans ? UNGT_EXPR : GT_EXPR;
2670 case LTGT_EXPR:
2671 return UNEQ_EXPR;
2672 case UNEQ_EXPR:
2673 return LTGT_EXPR;
2674 case UNGT_EXPR:
2675 return LE_EXPR;
2676 case UNGE_EXPR:
2677 return LT_EXPR;
2678 case UNLT_EXPR:
2679 return GE_EXPR;
2680 case UNLE_EXPR:
2681 return GT_EXPR;
2682 case ORDERED_EXPR:
2683 return UNORDERED_EXPR;
2684 case UNORDERED_EXPR:
2685 return ORDERED_EXPR;
2686 default:
2687 gcc_unreachable ();
2691 /* Similar, but return the comparison that results if the operands are
2692 swapped. This is safe for floating-point. */
2694 enum tree_code
2695 swap_tree_comparison (enum tree_code code)
2697 switch (code)
2699 case EQ_EXPR:
2700 case NE_EXPR:
2701 case ORDERED_EXPR:
2702 case UNORDERED_EXPR:
2703 case LTGT_EXPR:
2704 case UNEQ_EXPR:
2705 return code;
2706 case GT_EXPR:
2707 return LT_EXPR;
2708 case GE_EXPR:
2709 return LE_EXPR;
2710 case LT_EXPR:
2711 return GT_EXPR;
2712 case LE_EXPR:
2713 return GE_EXPR;
2714 case UNGT_EXPR:
2715 return UNLT_EXPR;
2716 case UNGE_EXPR:
2717 return UNLE_EXPR;
2718 case UNLT_EXPR:
2719 return UNGT_EXPR;
2720 case UNLE_EXPR:
2721 return UNGE_EXPR;
2722 default:
2723 gcc_unreachable ();
2728 /* Convert a comparison tree code from an enum tree_code representation
2729 into a compcode bit-based encoding. This function is the inverse of
2730 compcode_to_comparison. */
2732 static enum comparison_code
2733 comparison_to_compcode (enum tree_code code)
2735 switch (code)
2737 case LT_EXPR:
2738 return COMPCODE_LT;
2739 case EQ_EXPR:
2740 return COMPCODE_EQ;
2741 case LE_EXPR:
2742 return COMPCODE_LE;
2743 case GT_EXPR:
2744 return COMPCODE_GT;
2745 case NE_EXPR:
2746 return COMPCODE_NE;
2747 case GE_EXPR:
2748 return COMPCODE_GE;
2749 case ORDERED_EXPR:
2750 return COMPCODE_ORD;
2751 case UNORDERED_EXPR:
2752 return COMPCODE_UNORD;
2753 case UNLT_EXPR:
2754 return COMPCODE_UNLT;
2755 case UNEQ_EXPR:
2756 return COMPCODE_UNEQ;
2757 case UNLE_EXPR:
2758 return COMPCODE_UNLE;
2759 case UNGT_EXPR:
2760 return COMPCODE_UNGT;
2761 case LTGT_EXPR:
2762 return COMPCODE_LTGT;
2763 case UNGE_EXPR:
2764 return COMPCODE_UNGE;
2765 default:
2766 gcc_unreachable ();
2770 /* Convert a compcode bit-based encoding of a comparison operator back
2771 to GCC's enum tree_code representation. This function is the
2772 inverse of comparison_to_compcode. */
2774 static enum tree_code
2775 compcode_to_comparison (enum comparison_code code)
2777 switch (code)
2779 case COMPCODE_LT:
2780 return LT_EXPR;
2781 case COMPCODE_EQ:
2782 return EQ_EXPR;
2783 case COMPCODE_LE:
2784 return LE_EXPR;
2785 case COMPCODE_GT:
2786 return GT_EXPR;
2787 case COMPCODE_NE:
2788 return NE_EXPR;
2789 case COMPCODE_GE:
2790 return GE_EXPR;
2791 case COMPCODE_ORD:
2792 return ORDERED_EXPR;
2793 case COMPCODE_UNORD:
2794 return UNORDERED_EXPR;
2795 case COMPCODE_UNLT:
2796 return UNLT_EXPR;
2797 case COMPCODE_UNEQ:
2798 return UNEQ_EXPR;
2799 case COMPCODE_UNLE:
2800 return UNLE_EXPR;
2801 case COMPCODE_UNGT:
2802 return UNGT_EXPR;
2803 case COMPCODE_LTGT:
2804 return LTGT_EXPR;
2805 case COMPCODE_UNGE:
2806 return UNGE_EXPR;
2807 default:
2808 gcc_unreachable ();
2812 /* Return true if COND1 tests the opposite condition of COND2. */
2814 bool
2815 inverse_conditions_p (const_tree cond1, const_tree cond2)
2817 return (COMPARISON_CLASS_P (cond1)
2818 && COMPARISON_CLASS_P (cond2)
2819 && (invert_tree_comparison
2820 (TREE_CODE (cond1),
2821 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2822 && operand_equal_p (TREE_OPERAND (cond1, 0),
2823 TREE_OPERAND (cond2, 0), 0)
2824 && operand_equal_p (TREE_OPERAND (cond1, 1),
2825 TREE_OPERAND (cond2, 1), 0));
2828 /* Return a tree for the comparison which is the combination of
2829 doing the AND or OR (depending on CODE) of the two operations LCODE
2830 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2831 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2832 if this makes the transformation invalid. */
2834 tree
2835 combine_comparisons (location_t loc,
2836 enum tree_code code, enum tree_code lcode,
2837 enum tree_code rcode, tree truth_type,
2838 tree ll_arg, tree lr_arg)
2840 bool honor_nans = HONOR_NANS (ll_arg);
2841 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2842 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2843 int compcode;
2845 switch (code)
2847 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2848 compcode = lcompcode & rcompcode;
2849 break;
2851 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2852 compcode = lcompcode | rcompcode;
2853 break;
2855 default:
2856 return NULL_TREE;
2859 if (!honor_nans)
2861 /* Eliminate unordered comparisons, as well as LTGT and ORD
2862 which are not used unless the mode has NaNs. */
2863 compcode &= ~COMPCODE_UNORD;
2864 if (compcode == COMPCODE_LTGT)
2865 compcode = COMPCODE_NE;
2866 else if (compcode == COMPCODE_ORD)
2867 compcode = COMPCODE_TRUE;
2869 else if (flag_trapping_math)
2871 /* Check that the original operation and the optimized ones will trap
2872 under the same condition. */
2873 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2874 && (lcompcode != COMPCODE_EQ)
2875 && (lcompcode != COMPCODE_ORD);
2876 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2877 && (rcompcode != COMPCODE_EQ)
2878 && (rcompcode != COMPCODE_ORD);
2879 bool trap = (compcode & COMPCODE_UNORD) == 0
2880 && (compcode != COMPCODE_EQ)
2881 && (compcode != COMPCODE_ORD);
2883 /* In a short-circuited boolean expression the LHS might be
2884 such that the RHS, if evaluated, will never trap. For
2885 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2886 if neither x nor y is NaN. (This is a mixed blessing: for
2887 example, the expression above will never trap, hence
2888 optimizing it to x < y would be invalid). */
2889 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2890 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2891 rtrap = false;
2893 /* If the comparison was short-circuited, and only the RHS
2894 trapped, we may now generate a spurious trap. */
2895 if (rtrap && !ltrap
2896 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2897 return NULL_TREE;
2899 /* If we changed the conditions that cause a trap, we lose. */
2900 if ((ltrap || rtrap) != trap)
2901 return NULL_TREE;
2904 if (compcode == COMPCODE_TRUE)
2905 return constant_boolean_node (true, truth_type);
2906 else if (compcode == COMPCODE_FALSE)
2907 return constant_boolean_node (false, truth_type);
2908 else
2910 enum tree_code tcode;
2912 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2913 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2917 /* Return nonzero if two operands (typically of the same tree node)
2918 are necessarily equal. FLAGS modifies behavior as follows:
2920 If OEP_ONLY_CONST is set, only return nonzero for constants.
2921 This function tests whether the operands are indistinguishable;
2922 it does not test whether they are equal using C's == operation.
2923 The distinction is important for IEEE floating point, because
2924 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2925 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2927 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2928 even though it may hold multiple values during a function.
2929 This is because a GCC tree node guarantees that nothing else is
2930 executed between the evaluation of its "operands" (which may often
2931 be evaluated in arbitrary order). Hence if the operands themselves
2932 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2933 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2934 unset means assuming isochronic (or instantaneous) tree equivalence.
2935 Unless comparing arbitrary expression trees, such as from different
2936 statements, this flag can usually be left unset.
2938 If OEP_PURE_SAME is set, then pure functions with identical arguments
2939 are considered the same. It is used when the caller has other ways
2940 to ensure that global memory is unchanged in between.
2942 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2943 not values of expressions.
2945 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2946 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2948 If OEP_BITWISE is set, then require the values to be bitwise identical
2949 rather than simply numerically equal. Do not take advantage of things
2950 like math-related flags or undefined behavior; only return true for
2951 values that are provably bitwise identical in all circumstances.
2953 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2954 any operand with side effect. This is unnecesarily conservative in the
2955 case we know that arg0 and arg1 are in disjoint code paths (such as in
2956 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2957 addresses with TREE_CONSTANT flag set so we know that &var == &var
2958 even if var is volatile. */
2960 bool
2961 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
2962 unsigned int flags)
2964 bool r;
2965 if (verify_hash_value (arg0, arg1, flags, &r))
2966 return r;
2968 STRIP_ANY_LOCATION_WRAPPER (arg0);
2969 STRIP_ANY_LOCATION_WRAPPER (arg1);
2971 /* If either is ERROR_MARK, they aren't equal. */
2972 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2973 || TREE_TYPE (arg0) == error_mark_node
2974 || TREE_TYPE (arg1) == error_mark_node)
2975 return false;
2977 /* Similar, if either does not have a type (like a template id),
2978 they aren't equal. */
2979 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2980 return false;
2982 /* Bitwise identity makes no sense if the values have different layouts. */
2983 if ((flags & OEP_BITWISE)
2984 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
2985 return false;
2987 /* We cannot consider pointers to different address space equal. */
2988 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2989 && POINTER_TYPE_P (TREE_TYPE (arg1))
2990 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2991 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2992 return false;
2994 /* Check equality of integer constants before bailing out due to
2995 precision differences. */
2996 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2998 /* Address of INTEGER_CST is not defined; check that we did not forget
2999 to drop the OEP_ADDRESS_OF flags. */
3000 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3001 return tree_int_cst_equal (arg0, arg1);
3004 if (!(flags & OEP_ADDRESS_OF))
3006 /* If both types don't have the same signedness, then we can't consider
3007 them equal. We must check this before the STRIP_NOPS calls
3008 because they may change the signedness of the arguments. As pointers
3009 strictly don't have a signedness, require either two pointers or
3010 two non-pointers as well. */
3011 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3012 || POINTER_TYPE_P (TREE_TYPE (arg0))
3013 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3014 return false;
3016 /* If both types don't have the same precision, then it is not safe
3017 to strip NOPs. */
3018 if (element_precision (TREE_TYPE (arg0))
3019 != element_precision (TREE_TYPE (arg1)))
3020 return false;
3022 STRIP_NOPS (arg0);
3023 STRIP_NOPS (arg1);
3025 #if 0
3026 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3027 sanity check once the issue is solved. */
3028 else
3029 /* Addresses of conversions and SSA_NAMEs (and many other things)
3030 are not defined. Check that we did not forget to drop the
3031 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3032 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3033 && TREE_CODE (arg0) != SSA_NAME);
3034 #endif
3036 /* In case both args are comparisons but with different comparison
3037 code, try to swap the comparison operands of one arg to produce
3038 a match and compare that variant. */
3039 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3040 && COMPARISON_CLASS_P (arg0)
3041 && COMPARISON_CLASS_P (arg1))
3043 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3045 if (TREE_CODE (arg0) == swap_code)
3046 return operand_equal_p (TREE_OPERAND (arg0, 0),
3047 TREE_OPERAND (arg1, 1), flags)
3048 && operand_equal_p (TREE_OPERAND (arg0, 1),
3049 TREE_OPERAND (arg1, 0), flags);
3052 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3054 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3055 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3057 else if (flags & OEP_ADDRESS_OF)
3059 /* If we are interested in comparing addresses ignore
3060 MEM_REF wrappings of the base that can appear just for
3061 TBAA reasons. */
3062 if (TREE_CODE (arg0) == MEM_REF
3063 && DECL_P (arg1)
3064 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3065 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3066 && integer_zerop (TREE_OPERAND (arg0, 1)))
3067 return true;
3068 else if (TREE_CODE (arg1) == MEM_REF
3069 && DECL_P (arg0)
3070 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3071 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3072 && integer_zerop (TREE_OPERAND (arg1, 1)))
3073 return true;
3074 return false;
3076 else
3077 return false;
3080 /* When not checking adddresses, this is needed for conversions and for
3081 COMPONENT_REF. Might as well play it safe and always test this. */
3082 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3083 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3084 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3085 && !(flags & OEP_ADDRESS_OF)))
3086 return false;
3088 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3089 We don't care about side effects in that case because the SAVE_EXPR
3090 takes care of that for us. In all other cases, two expressions are
3091 equal if they have no side effects. If we have two identical
3092 expressions with side effects that should be treated the same due
3093 to the only side effects being identical SAVE_EXPR's, that will
3094 be detected in the recursive calls below.
3095 If we are taking an invariant address of two identical objects
3096 they are necessarily equal as well. */
3097 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3098 && (TREE_CODE (arg0) == SAVE_EXPR
3099 || (flags & OEP_MATCH_SIDE_EFFECTS)
3100 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3101 return true;
3103 /* Next handle constant cases, those for which we can return 1 even
3104 if ONLY_CONST is set. */
3105 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3106 switch (TREE_CODE (arg0))
3108 case INTEGER_CST:
3109 return tree_int_cst_equal (arg0, arg1);
3111 case FIXED_CST:
3112 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3113 TREE_FIXED_CST (arg1));
3115 case REAL_CST:
3116 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3117 return true;
3119 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3121 /* If we do not distinguish between signed and unsigned zero,
3122 consider them equal. */
3123 if (real_zerop (arg0) && real_zerop (arg1))
3124 return true;
3126 return false;
3128 case VECTOR_CST:
3130 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3131 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3132 return false;
3134 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3135 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3136 return false;
3138 unsigned int count = vector_cst_encoded_nelts (arg0);
3139 for (unsigned int i = 0; i < count; ++i)
3140 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3141 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3142 return false;
3143 return true;
3146 case COMPLEX_CST:
3147 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3148 flags)
3149 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3150 flags));
3152 case STRING_CST:
3153 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3154 && ! memcmp (TREE_STRING_POINTER (arg0),
3155 TREE_STRING_POINTER (arg1),
3156 TREE_STRING_LENGTH (arg0)));
3158 case ADDR_EXPR:
3159 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3160 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3161 flags | OEP_ADDRESS_OF
3162 | OEP_MATCH_SIDE_EFFECTS);
3163 case CONSTRUCTOR:
3164 /* In GIMPLE empty constructors are allowed in initializers of
3165 aggregates. */
3166 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3167 default:
3168 break;
3171 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3172 two instances of undefined behavior will give identical results. */
3173 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3174 return false;
3176 /* Define macros to test an operand from arg0 and arg1 for equality and a
3177 variant that allows null and views null as being different from any
3178 non-null value. In the latter case, if either is null, the both
3179 must be; otherwise, do the normal comparison. */
3180 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3181 TREE_OPERAND (arg1, N), flags)
3183 #define OP_SAME_WITH_NULL(N) \
3184 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3185 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3187 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3189 case tcc_unary:
3190 /* Two conversions are equal only if signedness and modes match. */
3191 switch (TREE_CODE (arg0))
3193 CASE_CONVERT:
3194 case FIX_TRUNC_EXPR:
3195 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3196 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3197 return false;
3198 break;
3199 default:
3200 break;
3203 return OP_SAME (0);
3206 case tcc_comparison:
3207 case tcc_binary:
3208 if (OP_SAME (0) && OP_SAME (1))
3209 return true;
3211 /* For commutative ops, allow the other order. */
3212 return (commutative_tree_code (TREE_CODE (arg0))
3213 && operand_equal_p (TREE_OPERAND (arg0, 0),
3214 TREE_OPERAND (arg1, 1), flags)
3215 && operand_equal_p (TREE_OPERAND (arg0, 1),
3216 TREE_OPERAND (arg1, 0), flags));
3218 case tcc_reference:
3219 /* If either of the pointer (or reference) expressions we are
3220 dereferencing contain a side effect, these cannot be equal,
3221 but their addresses can be. */
3222 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3223 && (TREE_SIDE_EFFECTS (arg0)
3224 || TREE_SIDE_EFFECTS (arg1)))
3225 return false;
3227 switch (TREE_CODE (arg0))
3229 case INDIRECT_REF:
3230 if (!(flags & OEP_ADDRESS_OF))
3232 if (TYPE_ALIGN (TREE_TYPE (arg0))
3233 != TYPE_ALIGN (TREE_TYPE (arg1)))
3234 return false;
3235 /* Verify that the access types are compatible. */
3236 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3237 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3238 return false;
3240 flags &= ~OEP_ADDRESS_OF;
3241 return OP_SAME (0);
3243 case IMAGPART_EXPR:
3244 /* Require the same offset. */
3245 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3246 TYPE_SIZE (TREE_TYPE (arg1)),
3247 flags & ~OEP_ADDRESS_OF))
3248 return false;
3250 /* Fallthru. */
3251 case REALPART_EXPR:
3252 case VIEW_CONVERT_EXPR:
3253 return OP_SAME (0);
3255 case TARGET_MEM_REF:
3256 case MEM_REF:
3257 if (!(flags & OEP_ADDRESS_OF))
3259 /* Require equal access sizes */
3260 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3261 && (!TYPE_SIZE (TREE_TYPE (arg0))
3262 || !TYPE_SIZE (TREE_TYPE (arg1))
3263 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3264 TYPE_SIZE (TREE_TYPE (arg1)),
3265 flags)))
3266 return false;
3267 /* Verify that access happens in similar types. */
3268 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3269 return false;
3270 /* Verify that accesses are TBAA compatible. */
3271 if (!alias_ptr_types_compatible_p
3272 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3273 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3274 || (MR_DEPENDENCE_CLIQUE (arg0)
3275 != MR_DEPENDENCE_CLIQUE (arg1))
3276 || (MR_DEPENDENCE_BASE (arg0)
3277 != MR_DEPENDENCE_BASE (arg1)))
3278 return false;
3279 /* Verify that alignment is compatible. */
3280 if (TYPE_ALIGN (TREE_TYPE (arg0))
3281 != TYPE_ALIGN (TREE_TYPE (arg1)))
3282 return false;
3284 flags &= ~OEP_ADDRESS_OF;
3285 return (OP_SAME (0) && OP_SAME (1)
3286 /* TARGET_MEM_REF require equal extra operands. */
3287 && (TREE_CODE (arg0) != TARGET_MEM_REF
3288 || (OP_SAME_WITH_NULL (2)
3289 && OP_SAME_WITH_NULL (3)
3290 && OP_SAME_WITH_NULL (4))));
3292 case ARRAY_REF:
3293 case ARRAY_RANGE_REF:
3294 if (!OP_SAME (0))
3295 return false;
3296 flags &= ~OEP_ADDRESS_OF;
3297 /* Compare the array index by value if it is constant first as we
3298 may have different types but same value here. */
3299 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3300 TREE_OPERAND (arg1, 1))
3301 || OP_SAME (1))
3302 && OP_SAME_WITH_NULL (2)
3303 && OP_SAME_WITH_NULL (3)
3304 /* Compare low bound and element size as with OEP_ADDRESS_OF
3305 we have to account for the offset of the ref. */
3306 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3307 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3308 || (operand_equal_p (array_ref_low_bound
3309 (CONST_CAST_TREE (arg0)),
3310 array_ref_low_bound
3311 (CONST_CAST_TREE (arg1)), flags)
3312 && operand_equal_p (array_ref_element_size
3313 (CONST_CAST_TREE (arg0)),
3314 array_ref_element_size
3315 (CONST_CAST_TREE (arg1)),
3316 flags))));
3318 case COMPONENT_REF:
3319 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3320 may be NULL when we're called to compare MEM_EXPRs. */
3321 if (!OP_SAME_WITH_NULL (0))
3322 return false;
3324 bool compare_address = flags & OEP_ADDRESS_OF;
3326 /* Most of time we only need to compare FIELD_DECLs for equality.
3327 However when determining address look into actual offsets.
3328 These may match for unions and unshared record types. */
3329 flags &= ~OEP_ADDRESS_OF;
3330 if (!OP_SAME (1))
3332 if (compare_address
3333 && (flags & OEP_ADDRESS_OF_SAME_FIELD) == 0)
3335 if (TREE_OPERAND (arg0, 2)
3336 || TREE_OPERAND (arg1, 2))
3337 return OP_SAME_WITH_NULL (2);
3338 tree field0 = TREE_OPERAND (arg0, 1);
3339 tree field1 = TREE_OPERAND (arg1, 1);
3341 if (!operand_equal_p (DECL_FIELD_OFFSET (field0),
3342 DECL_FIELD_OFFSET (field1), flags)
3343 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3344 DECL_FIELD_BIT_OFFSET (field1),
3345 flags))
3346 return false;
3348 else
3349 return false;
3352 return OP_SAME_WITH_NULL (2);
3354 case BIT_FIELD_REF:
3355 if (!OP_SAME (0))
3356 return false;
3357 flags &= ~OEP_ADDRESS_OF;
3358 return OP_SAME (1) && OP_SAME (2);
3360 default:
3361 return false;
3364 case tcc_expression:
3365 switch (TREE_CODE (arg0))
3367 case ADDR_EXPR:
3368 /* Be sure we pass right ADDRESS_OF flag. */
3369 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3370 return operand_equal_p (TREE_OPERAND (arg0, 0),
3371 TREE_OPERAND (arg1, 0),
3372 flags | OEP_ADDRESS_OF);
3374 case TRUTH_NOT_EXPR:
3375 return OP_SAME (0);
3377 case TRUTH_ANDIF_EXPR:
3378 case TRUTH_ORIF_EXPR:
3379 return OP_SAME (0) && OP_SAME (1);
3381 case WIDEN_MULT_PLUS_EXPR:
3382 case WIDEN_MULT_MINUS_EXPR:
3383 if (!OP_SAME (2))
3384 return false;
3385 /* The multiplcation operands are commutative. */
3386 /* FALLTHRU */
3388 case TRUTH_AND_EXPR:
3389 case TRUTH_OR_EXPR:
3390 case TRUTH_XOR_EXPR:
3391 if (OP_SAME (0) && OP_SAME (1))
3392 return true;
3394 /* Otherwise take into account this is a commutative operation. */
3395 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3396 TREE_OPERAND (arg1, 1), flags)
3397 && operand_equal_p (TREE_OPERAND (arg0, 1),
3398 TREE_OPERAND (arg1, 0), flags));
3400 case COND_EXPR:
3401 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3402 return false;
3403 flags &= ~OEP_ADDRESS_OF;
3404 return OP_SAME (0);
3406 case BIT_INSERT_EXPR:
3407 /* BIT_INSERT_EXPR has an implict operand as the type precision
3408 of op1. Need to check to make sure they are the same. */
3409 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3410 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3411 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3412 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3413 return false;
3414 /* FALLTHRU */
3416 case VEC_COND_EXPR:
3417 case DOT_PROD_EXPR:
3418 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3420 case MODIFY_EXPR:
3421 case INIT_EXPR:
3422 case COMPOUND_EXPR:
3423 case PREDECREMENT_EXPR:
3424 case PREINCREMENT_EXPR:
3425 case POSTDECREMENT_EXPR:
3426 case POSTINCREMENT_EXPR:
3427 if (flags & OEP_LEXICOGRAPHIC)
3428 return OP_SAME (0) && OP_SAME (1);
3429 return false;
3431 case CLEANUP_POINT_EXPR:
3432 case EXPR_STMT:
3433 case SAVE_EXPR:
3434 if (flags & OEP_LEXICOGRAPHIC)
3435 return OP_SAME (0);
3436 return false;
3438 case OBJ_TYPE_REF:
3439 /* Virtual table reference. */
3440 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3441 OBJ_TYPE_REF_EXPR (arg1), flags))
3442 return false;
3443 flags &= ~OEP_ADDRESS_OF;
3444 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3445 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3446 return false;
3447 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3448 OBJ_TYPE_REF_OBJECT (arg1), flags))
3449 return false;
3450 if (virtual_method_call_p (arg0))
3452 if (!virtual_method_call_p (arg1))
3453 return false;
3454 return types_same_for_odr (obj_type_ref_class (arg0),
3455 obj_type_ref_class (arg1));
3457 return false;
3459 default:
3460 return false;
3463 case tcc_vl_exp:
3464 switch (TREE_CODE (arg0))
3466 case CALL_EXPR:
3467 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3468 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3469 /* If not both CALL_EXPRs are either internal or normal function
3470 functions, then they are not equal. */
3471 return false;
3472 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3474 /* If the CALL_EXPRs call different internal functions, then they
3475 are not equal. */
3476 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3477 return false;
3479 else
3481 /* If the CALL_EXPRs call different functions, then they are not
3482 equal. */
3483 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3484 flags))
3485 return false;
3488 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3490 unsigned int cef = call_expr_flags (arg0);
3491 if (flags & OEP_PURE_SAME)
3492 cef &= ECF_CONST | ECF_PURE;
3493 else
3494 cef &= ECF_CONST;
3495 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3496 return false;
3499 /* Now see if all the arguments are the same. */
3501 const_call_expr_arg_iterator iter0, iter1;
3502 const_tree a0, a1;
3503 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3504 a1 = first_const_call_expr_arg (arg1, &iter1);
3505 a0 && a1;
3506 a0 = next_const_call_expr_arg (&iter0),
3507 a1 = next_const_call_expr_arg (&iter1))
3508 if (! operand_equal_p (a0, a1, flags))
3509 return false;
3511 /* If we get here and both argument lists are exhausted
3512 then the CALL_EXPRs are equal. */
3513 return ! (a0 || a1);
3515 default:
3516 return false;
3519 case tcc_declaration:
3520 /* Consider __builtin_sqrt equal to sqrt. */
3521 if (TREE_CODE (arg0) == FUNCTION_DECL)
3522 return (fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3523 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3524 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3525 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3527 if (DECL_P (arg0)
3528 && (flags & OEP_DECL_NAME)
3529 && (flags & OEP_LEXICOGRAPHIC))
3531 /* Consider decls with the same name equal. The caller needs
3532 to make sure they refer to the same entity (such as a function
3533 formal parameter). */
3534 tree a0name = DECL_NAME (arg0);
3535 tree a1name = DECL_NAME (arg1);
3536 const char *a0ns = a0name ? IDENTIFIER_POINTER (a0name) : NULL;
3537 const char *a1ns = a1name ? IDENTIFIER_POINTER (a1name) : NULL;
3538 return a0ns && a1ns && strcmp (a0ns, a1ns) == 0;
3540 return false;
3542 case tcc_exceptional:
3543 if (TREE_CODE (arg0) == CONSTRUCTOR)
3545 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3546 return false;
3548 /* In GIMPLE constructors are used only to build vectors from
3549 elements. Individual elements in the constructor must be
3550 indexed in increasing order and form an initial sequence.
3552 We make no effort to compare constructors in generic.
3553 (see sem_variable::equals in ipa-icf which can do so for
3554 constants). */
3555 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3556 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3557 return false;
3559 /* Be sure that vectors constructed have the same representation.
3560 We only tested element precision and modes to match.
3561 Vectors may be BLKmode and thus also check that the number of
3562 parts match. */
3563 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3564 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3565 return false;
3567 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3568 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3569 unsigned int len = vec_safe_length (v0);
3571 if (len != vec_safe_length (v1))
3572 return false;
3574 for (unsigned int i = 0; i < len; i++)
3576 constructor_elt *c0 = &(*v0)[i];
3577 constructor_elt *c1 = &(*v1)[i];
3579 if (!operand_equal_p (c0->value, c1->value, flags)
3580 /* In GIMPLE the indexes can be either NULL or matching i.
3581 Double check this so we won't get false
3582 positives for GENERIC. */
3583 || (c0->index
3584 && (TREE_CODE (c0->index) != INTEGER_CST
3585 || compare_tree_int (c0->index, i)))
3586 || (c1->index
3587 && (TREE_CODE (c1->index) != INTEGER_CST
3588 || compare_tree_int (c1->index, i))))
3589 return false;
3591 return true;
3593 else if (TREE_CODE (arg0) == STATEMENT_LIST
3594 && (flags & OEP_LEXICOGRAPHIC))
3596 /* Compare the STATEMENT_LISTs. */
3597 tree_stmt_iterator tsi1, tsi2;
3598 tree body1 = CONST_CAST_TREE (arg0);
3599 tree body2 = CONST_CAST_TREE (arg1);
3600 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3601 tsi_next (&tsi1), tsi_next (&tsi2))
3603 /* The lists don't have the same number of statements. */
3604 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3605 return false;
3606 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3607 return true;
3608 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3609 flags & (OEP_LEXICOGRAPHIC
3610 | OEP_NO_HASH_CHECK)))
3611 return false;
3614 return false;
3616 case tcc_statement:
3617 switch (TREE_CODE (arg0))
3619 case RETURN_EXPR:
3620 if (flags & OEP_LEXICOGRAPHIC)
3621 return OP_SAME_WITH_NULL (0);
3622 return false;
3623 case DEBUG_BEGIN_STMT:
3624 if (flags & OEP_LEXICOGRAPHIC)
3625 return true;
3626 return false;
3627 default:
3628 return false;
3631 default:
3632 return false;
3635 #undef OP_SAME
3636 #undef OP_SAME_WITH_NULL
3639 /* Generate a hash value for an expression. This can be used iteratively
3640 by passing a previous result as the HSTATE argument. */
3642 void
3643 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3644 unsigned int flags)
3646 int i;
3647 enum tree_code code;
3648 enum tree_code_class tclass;
3650 if (t == NULL_TREE || t == error_mark_node)
3652 hstate.merge_hash (0);
3653 return;
3656 STRIP_ANY_LOCATION_WRAPPER (t);
3658 if (!(flags & OEP_ADDRESS_OF))
3659 STRIP_NOPS (t);
3661 code = TREE_CODE (t);
3663 switch (code)
3665 /* Alas, constants aren't shared, so we can't rely on pointer
3666 identity. */
3667 case VOID_CST:
3668 hstate.merge_hash (0);
3669 return;
3670 case INTEGER_CST:
3671 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3672 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3673 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3674 return;
3675 case REAL_CST:
3677 unsigned int val2;
3678 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3679 val2 = rvc_zero;
3680 else
3681 val2 = real_hash (TREE_REAL_CST_PTR (t));
3682 hstate.merge_hash (val2);
3683 return;
3685 case FIXED_CST:
3687 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3688 hstate.merge_hash (val2);
3689 return;
3691 case STRING_CST:
3692 hstate.add ((const void *) TREE_STRING_POINTER (t),
3693 TREE_STRING_LENGTH (t));
3694 return;
3695 case COMPLEX_CST:
3696 hash_operand (TREE_REALPART (t), hstate, flags);
3697 hash_operand (TREE_IMAGPART (t), hstate, flags);
3698 return;
3699 case VECTOR_CST:
3701 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3702 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3703 unsigned int count = vector_cst_encoded_nelts (t);
3704 for (unsigned int i = 0; i < count; ++i)
3705 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3706 return;
3708 case SSA_NAME:
3709 /* We can just compare by pointer. */
3710 hstate.add_hwi (SSA_NAME_VERSION (t));
3711 return;
3712 case PLACEHOLDER_EXPR:
3713 /* The node itself doesn't matter. */
3714 return;
3715 case BLOCK:
3716 case OMP_CLAUSE:
3717 /* Ignore. */
3718 return;
3719 case TREE_LIST:
3720 /* A list of expressions, for a CALL_EXPR or as the elements of a
3721 VECTOR_CST. */
3722 for (; t; t = TREE_CHAIN (t))
3723 hash_operand (TREE_VALUE (t), hstate, flags);
3724 return;
3725 case CONSTRUCTOR:
3727 unsigned HOST_WIDE_INT idx;
3728 tree field, value;
3729 flags &= ~OEP_ADDRESS_OF;
3730 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3731 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3733 /* In GIMPLE the indexes can be either NULL or matching i. */
3734 if (field == NULL_TREE)
3735 field = bitsize_int (idx);
3736 hash_operand (field, hstate, flags);
3737 hash_operand (value, hstate, flags);
3739 return;
3741 case STATEMENT_LIST:
3743 tree_stmt_iterator i;
3744 for (i = tsi_start (CONST_CAST_TREE (t));
3745 !tsi_end_p (i); tsi_next (&i))
3746 hash_operand (tsi_stmt (i), hstate, flags);
3747 return;
3749 case TREE_VEC:
3750 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3751 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3752 return;
3753 case IDENTIFIER_NODE:
3754 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3755 return;
3756 case FUNCTION_DECL:
3757 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3758 Otherwise nodes that compare equal according to operand_equal_p might
3759 get different hash codes. However, don't do this for machine specific
3760 or front end builtins, since the function code is overloaded in those
3761 cases. */
3762 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3763 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3765 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3766 code = TREE_CODE (t);
3768 /* FALL THROUGH */
3769 default:
3770 if (POLY_INT_CST_P (t))
3772 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3773 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3774 return;
3776 tclass = TREE_CODE_CLASS (code);
3778 if (tclass == tcc_declaration)
3780 /* DECL's have a unique ID */
3781 hstate.add_hwi (DECL_UID (t));
3783 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3785 /* For comparisons that can be swapped, use the lower
3786 tree code. */
3787 enum tree_code ccode = swap_tree_comparison (code);
3788 if (code < ccode)
3789 ccode = code;
3790 hstate.add_object (ccode);
3791 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3792 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3794 else if (CONVERT_EXPR_CODE_P (code))
3796 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3797 operand_equal_p. */
3798 enum tree_code ccode = NOP_EXPR;
3799 hstate.add_object (ccode);
3801 /* Don't hash the type, that can lead to having nodes which
3802 compare equal according to operand_equal_p, but which
3803 have different hash codes. Make sure to include signedness
3804 in the hash computation. */
3805 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3806 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3808 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3809 else if (code == MEM_REF
3810 && (flags & OEP_ADDRESS_OF) != 0
3811 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3812 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3813 && integer_zerop (TREE_OPERAND (t, 1)))
3814 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3815 hstate, flags);
3816 /* Don't ICE on FE specific trees, or their arguments etc.
3817 during operand_equal_p hash verification. */
3818 else if (!IS_EXPR_CODE_CLASS (tclass))
3819 gcc_assert (flags & OEP_HASH_CHECK);
3820 else
3822 unsigned int sflags = flags;
3824 hstate.add_object (code);
3826 switch (code)
3828 case ADDR_EXPR:
3829 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3830 flags |= OEP_ADDRESS_OF;
3831 sflags = flags;
3832 break;
3834 case INDIRECT_REF:
3835 case MEM_REF:
3836 case TARGET_MEM_REF:
3837 flags &= ~OEP_ADDRESS_OF;
3838 sflags = flags;
3839 break;
3841 case COMPONENT_REF:
3842 if (sflags & OEP_ADDRESS_OF)
3844 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3845 if (TREE_OPERAND (t, 2))
3846 hash_operand (TREE_OPERAND (t, 2), hstate,
3847 flags & ~OEP_ADDRESS_OF);
3848 else
3850 tree field = TREE_OPERAND (t, 1);
3851 hash_operand (DECL_FIELD_OFFSET (field),
3852 hstate, flags & ~OEP_ADDRESS_OF);
3853 hash_operand (DECL_FIELD_BIT_OFFSET (field),
3854 hstate, flags & ~OEP_ADDRESS_OF);
3856 return;
3858 break;
3859 case ARRAY_REF:
3860 case ARRAY_RANGE_REF:
3861 case BIT_FIELD_REF:
3862 sflags &= ~OEP_ADDRESS_OF;
3863 break;
3865 case COND_EXPR:
3866 flags &= ~OEP_ADDRESS_OF;
3867 break;
3869 case WIDEN_MULT_PLUS_EXPR:
3870 case WIDEN_MULT_MINUS_EXPR:
3872 /* The multiplication operands are commutative. */
3873 inchash::hash one, two;
3874 hash_operand (TREE_OPERAND (t, 0), one, flags);
3875 hash_operand (TREE_OPERAND (t, 1), two, flags);
3876 hstate.add_commutative (one, two);
3877 hash_operand (TREE_OPERAND (t, 2), two, flags);
3878 return;
3881 case CALL_EXPR:
3882 if (CALL_EXPR_FN (t) == NULL_TREE)
3883 hstate.add_int (CALL_EXPR_IFN (t));
3884 break;
3886 case TARGET_EXPR:
3887 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3888 Usually different TARGET_EXPRs just should use
3889 different temporaries in their slots. */
3890 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
3891 return;
3893 case OBJ_TYPE_REF:
3894 /* Virtual table reference. */
3895 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
3896 flags &= ~OEP_ADDRESS_OF;
3897 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
3898 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
3899 if (!virtual_method_call_p (t))
3900 return;
3901 if (tree c = obj_type_ref_class (t))
3903 c = TYPE_NAME (TYPE_MAIN_VARIANT (c));
3904 /* We compute mangled names only when free_lang_data is run.
3905 In that case we can hash precisely. */
3906 if (TREE_CODE (c) == TYPE_DECL
3907 && DECL_ASSEMBLER_NAME_SET_P (c))
3908 hstate.add_object
3909 (IDENTIFIER_HASH_VALUE
3910 (DECL_ASSEMBLER_NAME (c)));
3912 return;
3913 default:
3914 break;
3917 /* Don't hash the type, that can lead to having nodes which
3918 compare equal according to operand_equal_p, but which
3919 have different hash codes. */
3920 if (code == NON_LVALUE_EXPR)
3922 /* Make sure to include signness in the hash computation. */
3923 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3924 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3927 else if (commutative_tree_code (code))
3929 /* It's a commutative expression. We want to hash it the same
3930 however it appears. We do this by first hashing both operands
3931 and then rehashing based on the order of their independent
3932 hashes. */
3933 inchash::hash one, two;
3934 hash_operand (TREE_OPERAND (t, 0), one, flags);
3935 hash_operand (TREE_OPERAND (t, 1), two, flags);
3936 hstate.add_commutative (one, two);
3938 else
3939 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3940 hash_operand (TREE_OPERAND (t, i), hstate,
3941 i == 0 ? flags : sflags);
3943 return;
3947 bool
3948 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
3949 unsigned int flags, bool *ret)
3951 /* When checking and unless comparing DECL names, verify that if
3952 the outermost operand_equal_p call returns non-zero then ARG0
3953 and ARG1 have the same hash value. */
3954 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
3956 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
3958 if (arg0 != arg1 && !(flags & OEP_DECL_NAME))
3960 inchash::hash hstate0 (0), hstate1 (0);
3961 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
3962 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
3963 hashval_t h0 = hstate0.end ();
3964 hashval_t h1 = hstate1.end ();
3965 gcc_assert (h0 == h1);
3967 *ret = true;
3969 else
3970 *ret = false;
3972 return true;
3975 return false;
3979 static operand_compare default_compare_instance;
3981 /* Conveinece wrapper around operand_compare class because usually we do
3982 not need to play with the valueizer. */
3984 bool
3985 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3987 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
3990 namespace inchash
3993 /* Generate a hash value for an expression. This can be used iteratively
3994 by passing a previous result as the HSTATE argument.
3996 This function is intended to produce the same hash for expressions which
3997 would compare equal using operand_equal_p. */
3998 void
3999 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
4001 default_compare_instance.hash_operand (t, hstate, flags);
4006 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
4007 with a different signedness or a narrower precision. */
4009 static bool
4010 operand_equal_for_comparison_p (tree arg0, tree arg1)
4012 if (operand_equal_p (arg0, arg1, 0))
4013 return true;
4015 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
4016 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
4017 return false;
4019 /* Discard any conversions that don't change the modes of ARG0 and ARG1
4020 and see if the inner values are the same. This removes any
4021 signedness comparison, which doesn't matter here. */
4022 tree op0 = arg0;
4023 tree op1 = arg1;
4024 STRIP_NOPS (op0);
4025 STRIP_NOPS (op1);
4026 if (operand_equal_p (op0, op1, 0))
4027 return true;
4029 /* Discard a single widening conversion from ARG1 and see if the inner
4030 value is the same as ARG0. */
4031 if (CONVERT_EXPR_P (arg1)
4032 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4033 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4034 < TYPE_PRECISION (TREE_TYPE (arg1))
4035 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
4036 return true;
4038 return false;
4041 /* See if ARG is an expression that is either a comparison or is performing
4042 arithmetic on comparisons. The comparisons must only be comparing
4043 two different values, which will be stored in *CVAL1 and *CVAL2; if
4044 they are nonzero it means that some operands have already been found.
4045 No variables may be used anywhere else in the expression except in the
4046 comparisons.
4048 If this is true, return 1. Otherwise, return zero. */
4050 static bool
4051 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4053 enum tree_code code = TREE_CODE (arg);
4054 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4056 /* We can handle some of the tcc_expression cases here. */
4057 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4058 tclass = tcc_unary;
4059 else if (tclass == tcc_expression
4060 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4061 || code == COMPOUND_EXPR))
4062 tclass = tcc_binary;
4064 switch (tclass)
4066 case tcc_unary:
4067 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4069 case tcc_binary:
4070 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4071 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4073 case tcc_constant:
4074 return true;
4076 case tcc_expression:
4077 if (code == COND_EXPR)
4078 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4079 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4080 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4081 return false;
4083 case tcc_comparison:
4084 /* First see if we can handle the first operand, then the second. For
4085 the second operand, we know *CVAL1 can't be zero. It must be that
4086 one side of the comparison is each of the values; test for the
4087 case where this isn't true by failing if the two operands
4088 are the same. */
4090 if (operand_equal_p (TREE_OPERAND (arg, 0),
4091 TREE_OPERAND (arg, 1), 0))
4092 return false;
4094 if (*cval1 == 0)
4095 *cval1 = TREE_OPERAND (arg, 0);
4096 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4098 else if (*cval2 == 0)
4099 *cval2 = TREE_OPERAND (arg, 0);
4100 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4102 else
4103 return false;
4105 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4107 else if (*cval2 == 0)
4108 *cval2 = TREE_OPERAND (arg, 1);
4109 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4111 else
4112 return false;
4114 return true;
4116 default:
4117 return false;
4121 /* ARG is a tree that is known to contain just arithmetic operations and
4122 comparisons. Evaluate the operations in the tree substituting NEW0 for
4123 any occurrence of OLD0 as an operand of a comparison and likewise for
4124 NEW1 and OLD1. */
4126 static tree
4127 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4128 tree old1, tree new1)
4130 tree type = TREE_TYPE (arg);
4131 enum tree_code code = TREE_CODE (arg);
4132 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4134 /* We can handle some of the tcc_expression cases here. */
4135 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4136 tclass = tcc_unary;
4137 else if (tclass == tcc_expression
4138 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4139 tclass = tcc_binary;
4141 switch (tclass)
4143 case tcc_unary:
4144 return fold_build1_loc (loc, code, type,
4145 eval_subst (loc, TREE_OPERAND (arg, 0),
4146 old0, new0, old1, new1));
4148 case tcc_binary:
4149 return fold_build2_loc (loc, code, type,
4150 eval_subst (loc, TREE_OPERAND (arg, 0),
4151 old0, new0, old1, new1),
4152 eval_subst (loc, TREE_OPERAND (arg, 1),
4153 old0, new0, old1, new1));
4155 case tcc_expression:
4156 switch (code)
4158 case SAVE_EXPR:
4159 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4160 old1, new1);
4162 case COMPOUND_EXPR:
4163 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4164 old1, new1);
4166 case COND_EXPR:
4167 return fold_build3_loc (loc, code, type,
4168 eval_subst (loc, TREE_OPERAND (arg, 0),
4169 old0, new0, old1, new1),
4170 eval_subst (loc, TREE_OPERAND (arg, 1),
4171 old0, new0, old1, new1),
4172 eval_subst (loc, TREE_OPERAND (arg, 2),
4173 old0, new0, old1, new1));
4174 default:
4175 break;
4177 /* Fall through - ??? */
4179 case tcc_comparison:
4181 tree arg0 = TREE_OPERAND (arg, 0);
4182 tree arg1 = TREE_OPERAND (arg, 1);
4184 /* We need to check both for exact equality and tree equality. The
4185 former will be true if the operand has a side-effect. In that
4186 case, we know the operand occurred exactly once. */
4188 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4189 arg0 = new0;
4190 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4191 arg0 = new1;
4193 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4194 arg1 = new0;
4195 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4196 arg1 = new1;
4198 return fold_build2_loc (loc, code, type, arg0, arg1);
4201 default:
4202 return arg;
4206 /* Return a tree for the case when the result of an expression is RESULT
4207 converted to TYPE and OMITTED was previously an operand of the expression
4208 but is now not needed (e.g., we folded OMITTED * 0).
4210 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4211 the conversion of RESULT to TYPE. */
4213 tree
4214 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4216 tree t = fold_convert_loc (loc, type, result);
4218 /* If the resulting operand is an empty statement, just return the omitted
4219 statement casted to void. */
4220 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4221 return build1_loc (loc, NOP_EXPR, void_type_node,
4222 fold_ignored_result (omitted));
4224 if (TREE_SIDE_EFFECTS (omitted))
4225 return build2_loc (loc, COMPOUND_EXPR, type,
4226 fold_ignored_result (omitted), t);
4228 return non_lvalue_loc (loc, t);
4231 /* Return a tree for the case when the result of an expression is RESULT
4232 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4233 of the expression but are now not needed.
4235 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4236 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4237 evaluated before OMITTED2. Otherwise, if neither has side effects,
4238 just do the conversion of RESULT to TYPE. */
4240 tree
4241 omit_two_operands_loc (location_t loc, tree type, tree result,
4242 tree omitted1, tree omitted2)
4244 tree t = fold_convert_loc (loc, type, result);
4246 if (TREE_SIDE_EFFECTS (omitted2))
4247 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4248 if (TREE_SIDE_EFFECTS (omitted1))
4249 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4251 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4255 /* Return a simplified tree node for the truth-negation of ARG. This
4256 never alters ARG itself. We assume that ARG is an operation that
4257 returns a truth value (0 or 1).
4259 FIXME: one would think we would fold the result, but it causes
4260 problems with the dominator optimizer. */
4262 static tree
4263 fold_truth_not_expr (location_t loc, tree arg)
4265 tree type = TREE_TYPE (arg);
4266 enum tree_code code = TREE_CODE (arg);
4267 location_t loc1, loc2;
4269 /* If this is a comparison, we can simply invert it, except for
4270 floating-point non-equality comparisons, in which case we just
4271 enclose a TRUTH_NOT_EXPR around what we have. */
4273 if (TREE_CODE_CLASS (code) == tcc_comparison)
4275 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4276 if (FLOAT_TYPE_P (op_type)
4277 && flag_trapping_math
4278 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4279 && code != NE_EXPR && code != EQ_EXPR)
4280 return NULL_TREE;
4282 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4283 if (code == ERROR_MARK)
4284 return NULL_TREE;
4286 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4287 TREE_OPERAND (arg, 1));
4288 copy_warning (ret, arg);
4289 return ret;
4292 switch (code)
4294 case INTEGER_CST:
4295 return constant_boolean_node (integer_zerop (arg), type);
4297 case TRUTH_AND_EXPR:
4298 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4299 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4300 return build2_loc (loc, TRUTH_OR_EXPR, type,
4301 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4302 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4304 case TRUTH_OR_EXPR:
4305 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4306 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4307 return build2_loc (loc, TRUTH_AND_EXPR, type,
4308 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4309 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4311 case TRUTH_XOR_EXPR:
4312 /* Here we can invert either operand. We invert the first operand
4313 unless the second operand is a TRUTH_NOT_EXPR in which case our
4314 result is the XOR of the first operand with the inside of the
4315 negation of the second operand. */
4317 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4318 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4319 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4320 else
4321 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4322 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4323 TREE_OPERAND (arg, 1));
4325 case TRUTH_ANDIF_EXPR:
4326 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4327 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4328 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4329 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4330 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4332 case TRUTH_ORIF_EXPR:
4333 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4334 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4335 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4336 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4337 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4339 case TRUTH_NOT_EXPR:
4340 return TREE_OPERAND (arg, 0);
4342 case COND_EXPR:
4344 tree arg1 = TREE_OPERAND (arg, 1);
4345 tree arg2 = TREE_OPERAND (arg, 2);
4347 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4348 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4350 /* A COND_EXPR may have a throw as one operand, which
4351 then has void type. Just leave void operands
4352 as they are. */
4353 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4354 VOID_TYPE_P (TREE_TYPE (arg1))
4355 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4356 VOID_TYPE_P (TREE_TYPE (arg2))
4357 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4360 case COMPOUND_EXPR:
4361 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4362 return build2_loc (loc, COMPOUND_EXPR, type,
4363 TREE_OPERAND (arg, 0),
4364 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4366 case NON_LVALUE_EXPR:
4367 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4368 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4370 CASE_CONVERT:
4371 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4372 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4374 /* fall through */
4376 case FLOAT_EXPR:
4377 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4378 return build1_loc (loc, TREE_CODE (arg), type,
4379 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4381 case BIT_AND_EXPR:
4382 if (!integer_onep (TREE_OPERAND (arg, 1)))
4383 return NULL_TREE;
4384 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4386 case SAVE_EXPR:
4387 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4389 case CLEANUP_POINT_EXPR:
4390 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4391 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4392 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4394 default:
4395 return NULL_TREE;
4399 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4400 assume that ARG is an operation that returns a truth value (0 or 1
4401 for scalars, 0 or -1 for vectors). Return the folded expression if
4402 folding is successful. Otherwise, return NULL_TREE. */
4404 static tree
4405 fold_invert_truthvalue (location_t loc, tree arg)
4407 tree type = TREE_TYPE (arg);
4408 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4409 ? BIT_NOT_EXPR
4410 : TRUTH_NOT_EXPR,
4411 type, arg);
4414 /* Return a simplified tree node for the truth-negation of ARG. This
4415 never alters ARG itself. We assume that ARG is an operation that
4416 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4418 tree
4419 invert_truthvalue_loc (location_t loc, tree arg)
4421 if (TREE_CODE (arg) == ERROR_MARK)
4422 return arg;
4424 tree type = TREE_TYPE (arg);
4425 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4426 ? BIT_NOT_EXPR
4427 : TRUTH_NOT_EXPR,
4428 type, arg);
4431 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4432 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4433 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4434 is the original memory reference used to preserve the alias set of
4435 the access. */
4437 static tree
4438 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4439 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4440 int unsignedp, int reversep)
4442 tree result, bftype;
4444 /* Attempt not to lose the access path if possible. */
4445 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4447 tree ninner = TREE_OPERAND (orig_inner, 0);
4448 machine_mode nmode;
4449 poly_int64 nbitsize, nbitpos;
4450 tree noffset;
4451 int nunsignedp, nreversep, nvolatilep = 0;
4452 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4453 &noffset, &nmode, &nunsignedp,
4454 &nreversep, &nvolatilep);
4455 if (base == inner
4456 && noffset == NULL_TREE
4457 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4458 && !reversep
4459 && !nreversep
4460 && !nvolatilep)
4462 inner = ninner;
4463 bitpos -= nbitpos;
4467 alias_set_type iset = get_alias_set (orig_inner);
4468 if (iset == 0 && get_alias_set (inner) != iset)
4469 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4470 build_fold_addr_expr (inner),
4471 build_int_cst (ptr_type_node, 0));
4473 if (known_eq (bitpos, 0) && !reversep)
4475 tree size = TYPE_SIZE (TREE_TYPE (inner));
4476 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4477 || POINTER_TYPE_P (TREE_TYPE (inner)))
4478 && tree_fits_shwi_p (size)
4479 && tree_to_shwi (size) == bitsize)
4480 return fold_convert_loc (loc, type, inner);
4483 bftype = type;
4484 if (TYPE_PRECISION (bftype) != bitsize
4485 || TYPE_UNSIGNED (bftype) == !unsignedp)
4486 bftype = build_nonstandard_integer_type (bitsize, 0);
4488 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4489 bitsize_int (bitsize), bitsize_int (bitpos));
4490 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4492 if (bftype != type)
4493 result = fold_convert_loc (loc, type, result);
4495 return result;
4498 /* Optimize a bit-field compare.
4500 There are two cases: First is a compare against a constant and the
4501 second is a comparison of two items where the fields are at the same
4502 bit position relative to the start of a chunk (byte, halfword, word)
4503 large enough to contain it. In these cases we can avoid the shift
4504 implicit in bitfield extractions.
4506 For constants, we emit a compare of the shifted constant with the
4507 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4508 compared. For two fields at the same position, we do the ANDs with the
4509 similar mask and compare the result of the ANDs.
4511 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4512 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4513 are the left and right operands of the comparison, respectively.
4515 If the optimization described above can be done, we return the resulting
4516 tree. Otherwise we return zero. */
4518 static tree
4519 optimize_bit_field_compare (location_t loc, enum tree_code code,
4520 tree compare_type, tree lhs, tree rhs)
4522 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4523 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4524 tree type = TREE_TYPE (lhs);
4525 tree unsigned_type;
4526 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4527 machine_mode lmode, rmode;
4528 scalar_int_mode nmode;
4529 int lunsignedp, runsignedp;
4530 int lreversep, rreversep;
4531 int lvolatilep = 0, rvolatilep = 0;
4532 tree linner, rinner = NULL_TREE;
4533 tree mask;
4534 tree offset;
4536 /* Get all the information about the extractions being done. If the bit size
4537 is the same as the size of the underlying object, we aren't doing an
4538 extraction at all and so can do nothing. We also don't want to
4539 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4540 then will no longer be able to replace it. */
4541 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4542 &lunsignedp, &lreversep, &lvolatilep);
4543 if (linner == lhs
4544 || !known_size_p (plbitsize)
4545 || !plbitsize.is_constant (&lbitsize)
4546 || !plbitpos.is_constant (&lbitpos)
4547 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4548 || offset != 0
4549 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4550 || lvolatilep)
4551 return 0;
4553 if (const_p)
4554 rreversep = lreversep;
4555 else
4557 /* If this is not a constant, we can only do something if bit positions,
4558 sizes, signedness and storage order are the same. */
4559 rinner
4560 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4561 &runsignedp, &rreversep, &rvolatilep);
4563 if (rinner == rhs
4564 || maybe_ne (lbitpos, rbitpos)
4565 || maybe_ne (lbitsize, rbitsize)
4566 || lunsignedp != runsignedp
4567 || lreversep != rreversep
4568 || offset != 0
4569 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4570 || rvolatilep)
4571 return 0;
4574 /* Honor the C++ memory model and mimic what RTL expansion does. */
4575 poly_uint64 bitstart = 0;
4576 poly_uint64 bitend = 0;
4577 if (TREE_CODE (lhs) == COMPONENT_REF)
4579 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4580 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4581 return 0;
4584 /* See if we can find a mode to refer to this field. We should be able to,
4585 but fail if we can't. */
4586 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4587 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4588 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4589 TYPE_ALIGN (TREE_TYPE (rinner))),
4590 BITS_PER_WORD, false, &nmode))
4591 return 0;
4593 /* Set signed and unsigned types of the precision of this mode for the
4594 shifts below. */
4595 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4597 /* Compute the bit position and size for the new reference and our offset
4598 within it. If the new reference is the same size as the original, we
4599 won't optimize anything, so return zero. */
4600 nbitsize = GET_MODE_BITSIZE (nmode);
4601 nbitpos = lbitpos & ~ (nbitsize - 1);
4602 lbitpos -= nbitpos;
4603 if (nbitsize == lbitsize)
4604 return 0;
4606 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4607 lbitpos = nbitsize - lbitsize - lbitpos;
4609 /* Make the mask to be used against the extracted field. */
4610 mask = build_int_cst_type (unsigned_type, -1);
4611 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4612 mask = const_binop (RSHIFT_EXPR, mask,
4613 size_int (nbitsize - lbitsize - lbitpos));
4615 if (! const_p)
4617 if (nbitpos < 0)
4618 return 0;
4620 /* If not comparing with constant, just rework the comparison
4621 and return. */
4622 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4623 nbitsize, nbitpos, 1, lreversep);
4624 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4625 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4626 nbitsize, nbitpos, 1, rreversep);
4627 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4628 return fold_build2_loc (loc, code, compare_type, t1, t2);
4631 /* Otherwise, we are handling the constant case. See if the constant is too
4632 big for the field. Warn and return a tree for 0 (false) if so. We do
4633 this not only for its own sake, but to avoid having to test for this
4634 error case below. If we didn't, we might generate wrong code.
4636 For unsigned fields, the constant shifted right by the field length should
4637 be all zero. For signed fields, the high-order bits should agree with
4638 the sign bit. */
4640 if (lunsignedp)
4642 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4644 warning (0, "comparison is always %d due to width of bit-field",
4645 code == NE_EXPR);
4646 return constant_boolean_node (code == NE_EXPR, compare_type);
4649 else
4651 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4652 if (tem != 0 && tem != -1)
4654 warning (0, "comparison is always %d due to width of bit-field",
4655 code == NE_EXPR);
4656 return constant_boolean_node (code == NE_EXPR, compare_type);
4660 if (nbitpos < 0)
4661 return 0;
4663 /* Single-bit compares should always be against zero. */
4664 if (lbitsize == 1 && ! integer_zerop (rhs))
4666 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4667 rhs = build_int_cst (type, 0);
4670 /* Make a new bitfield reference, shift the constant over the
4671 appropriate number of bits and mask it with the computed mask
4672 (in case this was a signed field). If we changed it, make a new one. */
4673 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4674 nbitsize, nbitpos, 1, lreversep);
4676 rhs = const_binop (BIT_AND_EXPR,
4677 const_binop (LSHIFT_EXPR,
4678 fold_convert_loc (loc, unsigned_type, rhs),
4679 size_int (lbitpos)),
4680 mask);
4682 lhs = build2_loc (loc, code, compare_type,
4683 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4684 return lhs;
4687 /* Subroutine for fold_truth_andor_1: decode a field reference.
4689 If EXP is a comparison reference, we return the innermost reference.
4691 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4692 set to the starting bit number.
4694 If the innermost field can be completely contained in a mode-sized
4695 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4697 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4698 otherwise it is not changed.
4700 *PUNSIGNEDP is set to the signedness of the field.
4702 *PREVERSEP is set to the storage order of the field.
4704 *PMASK is set to the mask used. This is either contained in a
4705 BIT_AND_EXPR or derived from the width of the field.
4707 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4709 Return 0 if this is not a component reference or is one that we can't
4710 do anything with. */
4712 static tree
4713 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4714 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4715 int *punsignedp, int *preversep, int *pvolatilep,
4716 tree *pmask, tree *pand_mask)
4718 tree exp = *exp_;
4719 tree outer_type = 0;
4720 tree and_mask = 0;
4721 tree mask, inner, offset;
4722 tree unsigned_type;
4723 unsigned int precision;
4725 /* All the optimizations using this function assume integer fields.
4726 There are problems with FP fields since the type_for_size call
4727 below can fail for, e.g., XFmode. */
4728 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4729 return NULL_TREE;
4731 /* We are interested in the bare arrangement of bits, so strip everything
4732 that doesn't affect the machine mode. However, record the type of the
4733 outermost expression if it may matter below. */
4734 if (CONVERT_EXPR_P (exp)
4735 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4736 outer_type = TREE_TYPE (exp);
4737 STRIP_NOPS (exp);
4739 if (TREE_CODE (exp) == BIT_AND_EXPR)
4741 and_mask = TREE_OPERAND (exp, 1);
4742 exp = TREE_OPERAND (exp, 0);
4743 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4744 if (TREE_CODE (and_mask) != INTEGER_CST)
4745 return NULL_TREE;
4748 poly_int64 poly_bitsize, poly_bitpos;
4749 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4750 pmode, punsignedp, preversep, pvolatilep);
4751 if ((inner == exp && and_mask == 0)
4752 || !poly_bitsize.is_constant (pbitsize)
4753 || !poly_bitpos.is_constant (pbitpos)
4754 || *pbitsize < 0
4755 || offset != 0
4756 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4757 /* Reject out-of-bound accesses (PR79731). */
4758 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4759 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4760 *pbitpos + *pbitsize) < 0))
4761 return NULL_TREE;
4763 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4764 if (unsigned_type == NULL_TREE)
4765 return NULL_TREE;
4767 *exp_ = exp;
4769 /* If the number of bits in the reference is the same as the bitsize of
4770 the outer type, then the outer type gives the signedness. Otherwise
4771 (in case of a small bitfield) the signedness is unchanged. */
4772 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4773 *punsignedp = TYPE_UNSIGNED (outer_type);
4775 /* Compute the mask to access the bitfield. */
4776 precision = TYPE_PRECISION (unsigned_type);
4778 mask = build_int_cst_type (unsigned_type, -1);
4780 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4781 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4783 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4784 if (and_mask != 0)
4785 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4786 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4788 *pmask = mask;
4789 *pand_mask = and_mask;
4790 return inner;
4793 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4794 bit positions and MASK is SIGNED. */
4796 static bool
4797 all_ones_mask_p (const_tree mask, unsigned int size)
4799 tree type = TREE_TYPE (mask);
4800 unsigned int precision = TYPE_PRECISION (type);
4802 /* If this function returns true when the type of the mask is
4803 UNSIGNED, then there will be errors. In particular see
4804 gcc.c-torture/execute/990326-1.c. There does not appear to be
4805 any documentation paper trail as to why this is so. But the pre
4806 wide-int worked with that restriction and it has been preserved
4807 here. */
4808 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4809 return false;
4811 return wi::mask (size, false, precision) == wi::to_wide (mask);
4814 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4815 represents the sign bit of EXP's type. If EXP represents a sign
4816 or zero extension, also test VAL against the unextended type.
4817 The return value is the (sub)expression whose sign bit is VAL,
4818 or NULL_TREE otherwise. */
4820 tree
4821 sign_bit_p (tree exp, const_tree val)
4823 int width;
4824 tree t;
4826 /* Tree EXP must have an integral type. */
4827 t = TREE_TYPE (exp);
4828 if (! INTEGRAL_TYPE_P (t))
4829 return NULL_TREE;
4831 /* Tree VAL must be an integer constant. */
4832 if (TREE_CODE (val) != INTEGER_CST
4833 || TREE_OVERFLOW (val))
4834 return NULL_TREE;
4836 width = TYPE_PRECISION (t);
4837 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4838 return exp;
4840 /* Handle extension from a narrower type. */
4841 if (TREE_CODE (exp) == NOP_EXPR
4842 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4843 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4845 return NULL_TREE;
4848 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4849 to be evaluated unconditionally. */
4851 static bool
4852 simple_operand_p (const_tree exp)
4854 /* Strip any conversions that don't change the machine mode. */
4855 STRIP_NOPS (exp);
4857 return (CONSTANT_CLASS_P (exp)
4858 || TREE_CODE (exp) == SSA_NAME
4859 || (DECL_P (exp)
4860 && ! TREE_ADDRESSABLE (exp)
4861 && ! TREE_THIS_VOLATILE (exp)
4862 && ! DECL_NONLOCAL (exp)
4863 /* Don't regard global variables as simple. They may be
4864 allocated in ways unknown to the compiler (shared memory,
4865 #pragma weak, etc). */
4866 && ! TREE_PUBLIC (exp)
4867 && ! DECL_EXTERNAL (exp)
4868 /* Weakrefs are not safe to be read, since they can be NULL.
4869 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4870 have DECL_WEAK flag set. */
4871 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4872 /* Loading a static variable is unduly expensive, but global
4873 registers aren't expensive. */
4874 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4877 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4878 to be evaluated unconditionally.
4879 I addition to simple_operand_p, we assume that comparisons, conversions,
4880 and logic-not operations are simple, if their operands are simple, too. */
4882 static bool
4883 simple_operand_p_2 (tree exp)
4885 enum tree_code code;
4887 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
4888 return false;
4890 while (CONVERT_EXPR_P (exp))
4891 exp = TREE_OPERAND (exp, 0);
4893 code = TREE_CODE (exp);
4895 if (TREE_CODE_CLASS (code) == tcc_comparison)
4896 return (simple_operand_p (TREE_OPERAND (exp, 0))
4897 && simple_operand_p (TREE_OPERAND (exp, 1)));
4899 if (code == TRUTH_NOT_EXPR)
4900 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4902 return simple_operand_p (exp);
4906 /* The following functions are subroutines to fold_range_test and allow it to
4907 try to change a logical combination of comparisons into a range test.
4909 For example, both
4910 X == 2 || X == 3 || X == 4 || X == 5
4912 X >= 2 && X <= 5
4913 are converted to
4914 (unsigned) (X - 2) <= 3
4916 We describe each set of comparisons as being either inside or outside
4917 a range, using a variable named like IN_P, and then describe the
4918 range with a lower and upper bound. If one of the bounds is omitted,
4919 it represents either the highest or lowest value of the type.
4921 In the comments below, we represent a range by two numbers in brackets
4922 preceded by a "+" to designate being inside that range, or a "-" to
4923 designate being outside that range, so the condition can be inverted by
4924 flipping the prefix. An omitted bound is represented by a "-". For
4925 example, "- [-, 10]" means being outside the range starting at the lowest
4926 possible value and ending at 10, in other words, being greater than 10.
4927 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4928 always false.
4930 We set up things so that the missing bounds are handled in a consistent
4931 manner so neither a missing bound nor "true" and "false" need to be
4932 handled using a special case. */
4934 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4935 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4936 and UPPER1_P are nonzero if the respective argument is an upper bound
4937 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4938 must be specified for a comparison. ARG1 will be converted to ARG0's
4939 type if both are specified. */
4941 static tree
4942 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4943 tree arg1, int upper1_p)
4945 tree tem;
4946 int result;
4947 int sgn0, sgn1;
4949 /* If neither arg represents infinity, do the normal operation.
4950 Else, if not a comparison, return infinity. Else handle the special
4951 comparison rules. Note that most of the cases below won't occur, but
4952 are handled for consistency. */
4954 if (arg0 != 0 && arg1 != 0)
4956 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4957 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4958 STRIP_NOPS (tem);
4959 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4962 if (TREE_CODE_CLASS (code) != tcc_comparison)
4963 return 0;
4965 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4966 for neither. In real maths, we cannot assume open ended ranges are
4967 the same. But, this is computer arithmetic, where numbers are finite.
4968 We can therefore make the transformation of any unbounded range with
4969 the value Z, Z being greater than any representable number. This permits
4970 us to treat unbounded ranges as equal. */
4971 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4972 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4973 switch (code)
4975 case EQ_EXPR:
4976 result = sgn0 == sgn1;
4977 break;
4978 case NE_EXPR:
4979 result = sgn0 != sgn1;
4980 break;
4981 case LT_EXPR:
4982 result = sgn0 < sgn1;
4983 break;
4984 case LE_EXPR:
4985 result = sgn0 <= sgn1;
4986 break;
4987 case GT_EXPR:
4988 result = sgn0 > sgn1;
4989 break;
4990 case GE_EXPR:
4991 result = sgn0 >= sgn1;
4992 break;
4993 default:
4994 gcc_unreachable ();
4997 return constant_boolean_node (result, type);
5000 /* Helper routine for make_range. Perform one step for it, return
5001 new expression if the loop should continue or NULL_TREE if it should
5002 stop. */
5004 tree
5005 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
5006 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
5007 bool *strict_overflow_p)
5009 tree arg0_type = TREE_TYPE (arg0);
5010 tree n_low, n_high, low = *p_low, high = *p_high;
5011 int in_p = *p_in_p, n_in_p;
5013 switch (code)
5015 case TRUTH_NOT_EXPR:
5016 /* We can only do something if the range is testing for zero. */
5017 if (low == NULL_TREE || high == NULL_TREE
5018 || ! integer_zerop (low) || ! integer_zerop (high))
5019 return NULL_TREE;
5020 *p_in_p = ! in_p;
5021 return arg0;
5023 case EQ_EXPR: case NE_EXPR:
5024 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
5025 /* We can only do something if the range is testing for zero
5026 and if the second operand is an integer constant. Note that
5027 saying something is "in" the range we make is done by
5028 complementing IN_P since it will set in the initial case of
5029 being not equal to zero; "out" is leaving it alone. */
5030 if (low == NULL_TREE || high == NULL_TREE
5031 || ! integer_zerop (low) || ! integer_zerop (high)
5032 || TREE_CODE (arg1) != INTEGER_CST)
5033 return NULL_TREE;
5035 switch (code)
5037 case NE_EXPR: /* - [c, c] */
5038 low = high = arg1;
5039 break;
5040 case EQ_EXPR: /* + [c, c] */
5041 in_p = ! in_p, low = high = arg1;
5042 break;
5043 case GT_EXPR: /* - [-, c] */
5044 low = 0, high = arg1;
5045 break;
5046 case GE_EXPR: /* + [c, -] */
5047 in_p = ! in_p, low = arg1, high = 0;
5048 break;
5049 case LT_EXPR: /* - [c, -] */
5050 low = arg1, high = 0;
5051 break;
5052 case LE_EXPR: /* + [-, c] */
5053 in_p = ! in_p, low = 0, high = arg1;
5054 break;
5055 default:
5056 gcc_unreachable ();
5059 /* If this is an unsigned comparison, we also know that EXP is
5060 greater than or equal to zero. We base the range tests we make
5061 on that fact, so we record it here so we can parse existing
5062 range tests. We test arg0_type since often the return type
5063 of, e.g. EQ_EXPR, is boolean. */
5064 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5066 if (! merge_ranges (&n_in_p, &n_low, &n_high,
5067 in_p, low, high, 1,
5068 build_int_cst (arg0_type, 0),
5069 NULL_TREE))
5070 return NULL_TREE;
5072 in_p = n_in_p, low = n_low, high = n_high;
5074 /* If the high bound is missing, but we have a nonzero low
5075 bound, reverse the range so it goes from zero to the low bound
5076 minus 1. */
5077 if (high == 0 && low && ! integer_zerop (low))
5079 in_p = ! in_p;
5080 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5081 build_int_cst (TREE_TYPE (low), 1), 0);
5082 low = build_int_cst (arg0_type, 0);
5086 *p_low = low;
5087 *p_high = high;
5088 *p_in_p = in_p;
5089 return arg0;
5091 case NEGATE_EXPR:
5092 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5093 low and high are non-NULL, then normalize will DTRT. */
5094 if (!TYPE_UNSIGNED (arg0_type)
5095 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5097 if (low == NULL_TREE)
5098 low = TYPE_MIN_VALUE (arg0_type);
5099 if (high == NULL_TREE)
5100 high = TYPE_MAX_VALUE (arg0_type);
5103 /* (-x) IN [a,b] -> x in [-b, -a] */
5104 n_low = range_binop (MINUS_EXPR, exp_type,
5105 build_int_cst (exp_type, 0),
5106 0, high, 1);
5107 n_high = range_binop (MINUS_EXPR, exp_type,
5108 build_int_cst (exp_type, 0),
5109 0, low, 0);
5110 if (n_high != 0 && TREE_OVERFLOW (n_high))
5111 return NULL_TREE;
5112 goto normalize;
5114 case BIT_NOT_EXPR:
5115 /* ~ X -> -X - 1 */
5116 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5117 build_int_cst (exp_type, 1));
5119 case PLUS_EXPR:
5120 case MINUS_EXPR:
5121 if (TREE_CODE (arg1) != INTEGER_CST)
5122 return NULL_TREE;
5124 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5125 move a constant to the other side. */
5126 if (!TYPE_UNSIGNED (arg0_type)
5127 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5128 return NULL_TREE;
5130 /* If EXP is signed, any overflow in the computation is undefined,
5131 so we don't worry about it so long as our computations on
5132 the bounds don't overflow. For unsigned, overflow is defined
5133 and this is exactly the right thing. */
5134 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5135 arg0_type, low, 0, arg1, 0);
5136 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5137 arg0_type, high, 1, arg1, 0);
5138 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5139 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5140 return NULL_TREE;
5142 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5143 *strict_overflow_p = true;
5145 normalize:
5146 /* Check for an unsigned range which has wrapped around the maximum
5147 value thus making n_high < n_low, and normalize it. */
5148 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5150 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5151 build_int_cst (TREE_TYPE (n_high), 1), 0);
5152 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5153 build_int_cst (TREE_TYPE (n_low), 1), 0);
5155 /* If the range is of the form +/- [ x+1, x ], we won't
5156 be able to normalize it. But then, it represents the
5157 whole range or the empty set, so make it
5158 +/- [ -, - ]. */
5159 if (tree_int_cst_equal (n_low, low)
5160 && tree_int_cst_equal (n_high, high))
5161 low = high = 0;
5162 else
5163 in_p = ! in_p;
5165 else
5166 low = n_low, high = n_high;
5168 *p_low = low;
5169 *p_high = high;
5170 *p_in_p = in_p;
5171 return arg0;
5173 CASE_CONVERT:
5174 case NON_LVALUE_EXPR:
5175 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5176 return NULL_TREE;
5178 if (! INTEGRAL_TYPE_P (arg0_type)
5179 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5180 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5181 return NULL_TREE;
5183 n_low = low, n_high = high;
5185 if (n_low != 0)
5186 n_low = fold_convert_loc (loc, arg0_type, n_low);
5188 if (n_high != 0)
5189 n_high = fold_convert_loc (loc, arg0_type, n_high);
5191 /* If we're converting arg0 from an unsigned type, to exp,
5192 a signed type, we will be doing the comparison as unsigned.
5193 The tests above have already verified that LOW and HIGH
5194 are both positive.
5196 So we have to ensure that we will handle large unsigned
5197 values the same way that the current signed bounds treat
5198 negative values. */
5200 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5202 tree high_positive;
5203 tree equiv_type;
5204 /* For fixed-point modes, we need to pass the saturating flag
5205 as the 2nd parameter. */
5206 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5207 equiv_type
5208 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5209 TYPE_SATURATING (arg0_type));
5210 else
5211 equiv_type
5212 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5214 /* A range without an upper bound is, naturally, unbounded.
5215 Since convert would have cropped a very large value, use
5216 the max value for the destination type. */
5217 high_positive
5218 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5219 : TYPE_MAX_VALUE (arg0_type);
5221 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5222 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5223 fold_convert_loc (loc, arg0_type,
5224 high_positive),
5225 build_int_cst (arg0_type, 1));
5227 /* If the low bound is specified, "and" the range with the
5228 range for which the original unsigned value will be
5229 positive. */
5230 if (low != 0)
5232 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5233 1, fold_convert_loc (loc, arg0_type,
5234 integer_zero_node),
5235 high_positive))
5236 return NULL_TREE;
5238 in_p = (n_in_p == in_p);
5240 else
5242 /* Otherwise, "or" the range with the range of the input
5243 that will be interpreted as negative. */
5244 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5245 1, fold_convert_loc (loc, arg0_type,
5246 integer_zero_node),
5247 high_positive))
5248 return NULL_TREE;
5250 in_p = (in_p != n_in_p);
5254 *p_low = n_low;
5255 *p_high = n_high;
5256 *p_in_p = in_p;
5257 return arg0;
5259 default:
5260 return NULL_TREE;
5264 /* Given EXP, a logical expression, set the range it is testing into
5265 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5266 actually being tested. *PLOW and *PHIGH will be made of the same
5267 type as the returned expression. If EXP is not a comparison, we
5268 will most likely not be returning a useful value and range. Set
5269 *STRICT_OVERFLOW_P to true if the return value is only valid
5270 because signed overflow is undefined; otherwise, do not change
5271 *STRICT_OVERFLOW_P. */
5273 tree
5274 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5275 bool *strict_overflow_p)
5277 enum tree_code code;
5278 tree arg0, arg1 = NULL_TREE;
5279 tree exp_type, nexp;
5280 int in_p;
5281 tree low, high;
5282 location_t loc = EXPR_LOCATION (exp);
5284 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5285 and see if we can refine the range. Some of the cases below may not
5286 happen, but it doesn't seem worth worrying about this. We "continue"
5287 the outer loop when we've changed something; otherwise we "break"
5288 the switch, which will "break" the while. */
5290 in_p = 0;
5291 low = high = build_int_cst (TREE_TYPE (exp), 0);
5293 while (1)
5295 code = TREE_CODE (exp);
5296 exp_type = TREE_TYPE (exp);
5297 arg0 = NULL_TREE;
5299 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5301 if (TREE_OPERAND_LENGTH (exp) > 0)
5302 arg0 = TREE_OPERAND (exp, 0);
5303 if (TREE_CODE_CLASS (code) == tcc_binary
5304 || TREE_CODE_CLASS (code) == tcc_comparison
5305 || (TREE_CODE_CLASS (code) == tcc_expression
5306 && TREE_OPERAND_LENGTH (exp) > 1))
5307 arg1 = TREE_OPERAND (exp, 1);
5309 if (arg0 == NULL_TREE)
5310 break;
5312 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5313 &high, &in_p, strict_overflow_p);
5314 if (nexp == NULL_TREE)
5315 break;
5316 exp = nexp;
5319 /* If EXP is a constant, we can evaluate whether this is true or false. */
5320 if (TREE_CODE (exp) == INTEGER_CST)
5322 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5323 exp, 0, low, 0))
5324 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5325 exp, 1, high, 1)));
5326 low = high = 0;
5327 exp = 0;
5330 *pin_p = in_p, *plow = low, *phigh = high;
5331 return exp;
5334 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5335 a bitwise check i.e. when
5336 LOW == 0xXX...X00...0
5337 HIGH == 0xXX...X11...1
5338 Return corresponding mask in MASK and stem in VALUE. */
5340 static bool
5341 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5342 tree *value)
5344 if (TREE_CODE (low) != INTEGER_CST
5345 || TREE_CODE (high) != INTEGER_CST)
5346 return false;
5348 unsigned prec = TYPE_PRECISION (type);
5349 wide_int lo = wi::to_wide (low, prec);
5350 wide_int hi = wi::to_wide (high, prec);
5352 wide_int end_mask = lo ^ hi;
5353 if ((end_mask & (end_mask + 1)) != 0
5354 || (lo & end_mask) != 0)
5355 return false;
5357 wide_int stem_mask = ~end_mask;
5358 wide_int stem = lo & stem_mask;
5359 if (stem != (hi & stem_mask))
5360 return false;
5362 *mask = wide_int_to_tree (type, stem_mask);
5363 *value = wide_int_to_tree (type, stem);
5365 return true;
5368 /* Helper routine for build_range_check and match.pd. Return the type to
5369 perform the check or NULL if it shouldn't be optimized. */
5371 tree
5372 range_check_type (tree etype)
5374 /* First make sure that arithmetics in this type is valid, then make sure
5375 that it wraps around. */
5376 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5377 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5379 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5381 tree utype, minv, maxv;
5383 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5384 for the type in question, as we rely on this here. */
5385 utype = unsigned_type_for (etype);
5386 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5387 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5388 build_int_cst (TREE_TYPE (maxv), 1), 1);
5389 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5391 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5392 minv, 1, maxv, 1)))
5393 etype = utype;
5394 else
5395 return NULL_TREE;
5397 else if (POINTER_TYPE_P (etype) || TREE_CODE (etype) == OFFSET_TYPE)
5398 etype = unsigned_type_for (etype);
5399 return etype;
5402 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5403 type, TYPE, return an expression to test if EXP is in (or out of, depending
5404 on IN_P) the range. Return 0 if the test couldn't be created. */
5406 tree
5407 build_range_check (location_t loc, tree type, tree exp, int in_p,
5408 tree low, tree high)
5410 tree etype = TREE_TYPE (exp), mask, value;
5412 /* Disable this optimization for function pointer expressions
5413 on targets that require function pointer canonicalization. */
5414 if (targetm.have_canonicalize_funcptr_for_compare ()
5415 && POINTER_TYPE_P (etype)
5416 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5417 return NULL_TREE;
5419 if (! in_p)
5421 value = build_range_check (loc, type, exp, 1, low, high);
5422 if (value != 0)
5423 return invert_truthvalue_loc (loc, value);
5425 return 0;
5428 if (low == 0 && high == 0)
5429 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5431 if (low == 0)
5432 return fold_build2_loc (loc, LE_EXPR, type, exp,
5433 fold_convert_loc (loc, etype, high));
5435 if (high == 0)
5436 return fold_build2_loc (loc, GE_EXPR, type, exp,
5437 fold_convert_loc (loc, etype, low));
5439 if (operand_equal_p (low, high, 0))
5440 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5441 fold_convert_loc (loc, etype, low));
5443 if (TREE_CODE (exp) == BIT_AND_EXPR
5444 && maskable_range_p (low, high, etype, &mask, &value))
5445 return fold_build2_loc (loc, EQ_EXPR, type,
5446 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5447 exp, mask),
5448 value);
5450 if (integer_zerop (low))
5452 if (! TYPE_UNSIGNED (etype))
5454 etype = unsigned_type_for (etype);
5455 high = fold_convert_loc (loc, etype, high);
5456 exp = fold_convert_loc (loc, etype, exp);
5458 return build_range_check (loc, type, exp, 1, 0, high);
5461 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5462 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5464 int prec = TYPE_PRECISION (etype);
5466 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5468 if (TYPE_UNSIGNED (etype))
5470 tree signed_etype = signed_type_for (etype);
5471 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5472 etype
5473 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5474 else
5475 etype = signed_etype;
5476 exp = fold_convert_loc (loc, etype, exp);
5478 return fold_build2_loc (loc, GT_EXPR, type, exp,
5479 build_int_cst (etype, 0));
5483 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5484 This requires wrap-around arithmetics for the type of the expression. */
5485 etype = range_check_type (etype);
5486 if (etype == NULL_TREE)
5487 return NULL_TREE;
5489 high = fold_convert_loc (loc, etype, high);
5490 low = fold_convert_loc (loc, etype, low);
5491 exp = fold_convert_loc (loc, etype, exp);
5493 value = const_binop (MINUS_EXPR, high, low);
5495 if (value != 0 && !TREE_OVERFLOW (value))
5496 return build_range_check (loc, type,
5497 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5498 1, build_int_cst (etype, 0), value);
5500 return 0;
5503 /* Return the predecessor of VAL in its type, handling the infinite case. */
5505 static tree
5506 range_predecessor (tree val)
5508 tree type = TREE_TYPE (val);
5510 if (INTEGRAL_TYPE_P (type)
5511 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5512 return 0;
5513 else
5514 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5515 build_int_cst (TREE_TYPE (val), 1), 0);
5518 /* Return the successor of VAL in its type, handling the infinite case. */
5520 static tree
5521 range_successor (tree val)
5523 tree type = TREE_TYPE (val);
5525 if (INTEGRAL_TYPE_P (type)
5526 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5527 return 0;
5528 else
5529 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5530 build_int_cst (TREE_TYPE (val), 1), 0);
5533 /* Given two ranges, see if we can merge them into one. Return 1 if we
5534 can, 0 if we can't. Set the output range into the specified parameters. */
5536 bool
5537 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5538 tree high0, int in1_p, tree low1, tree high1)
5540 int no_overlap;
5541 int subset;
5542 int temp;
5543 tree tem;
5544 int in_p;
5545 tree low, high;
5546 int lowequal = ((low0 == 0 && low1 == 0)
5547 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5548 low0, 0, low1, 0)));
5549 int highequal = ((high0 == 0 && high1 == 0)
5550 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5551 high0, 1, high1, 1)));
5553 /* Make range 0 be the range that starts first, or ends last if they
5554 start at the same value. Swap them if it isn't. */
5555 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5556 low0, 0, low1, 0))
5557 || (lowequal
5558 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5559 high1, 1, high0, 1))))
5561 temp = in0_p, in0_p = in1_p, in1_p = temp;
5562 tem = low0, low0 = low1, low1 = tem;
5563 tem = high0, high0 = high1, high1 = tem;
5566 /* If the second range is != high1 where high1 is the type maximum of
5567 the type, try first merging with < high1 range. */
5568 if (low1
5569 && high1
5570 && TREE_CODE (low1) == INTEGER_CST
5571 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5572 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5573 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5574 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5575 && operand_equal_p (low1, high1, 0))
5577 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5578 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5579 !in1_p, NULL_TREE, range_predecessor (low1)))
5580 return true;
5581 /* Similarly for the second range != low1 where low1 is the type minimum
5582 of the type, try first merging with > low1 range. */
5583 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5584 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5585 !in1_p, range_successor (low1), NULL_TREE))
5586 return true;
5589 /* Now flag two cases, whether the ranges are disjoint or whether the
5590 second range is totally subsumed in the first. Note that the tests
5591 below are simplified by the ones above. */
5592 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5593 high0, 1, low1, 0));
5594 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5595 high1, 1, high0, 1));
5597 /* We now have four cases, depending on whether we are including or
5598 excluding the two ranges. */
5599 if (in0_p && in1_p)
5601 /* If they don't overlap, the result is false. If the second range
5602 is a subset it is the result. Otherwise, the range is from the start
5603 of the second to the end of the first. */
5604 if (no_overlap)
5605 in_p = 0, low = high = 0;
5606 else if (subset)
5607 in_p = 1, low = low1, high = high1;
5608 else
5609 in_p = 1, low = low1, high = high0;
5612 else if (in0_p && ! in1_p)
5614 /* If they don't overlap, the result is the first range. If they are
5615 equal, the result is false. If the second range is a subset of the
5616 first, and the ranges begin at the same place, we go from just after
5617 the end of the second range to the end of the first. If the second
5618 range is not a subset of the first, or if it is a subset and both
5619 ranges end at the same place, the range starts at the start of the
5620 first range and ends just before the second range.
5621 Otherwise, we can't describe this as a single range. */
5622 if (no_overlap)
5623 in_p = 1, low = low0, high = high0;
5624 else if (lowequal && highequal)
5625 in_p = 0, low = high = 0;
5626 else if (subset && lowequal)
5628 low = range_successor (high1);
5629 high = high0;
5630 in_p = 1;
5631 if (low == 0)
5633 /* We are in the weird situation where high0 > high1 but
5634 high1 has no successor. Punt. */
5635 return 0;
5638 else if (! subset || highequal)
5640 low = low0;
5641 high = range_predecessor (low1);
5642 in_p = 1;
5643 if (high == 0)
5645 /* low0 < low1 but low1 has no predecessor. Punt. */
5646 return 0;
5649 else
5650 return 0;
5653 else if (! in0_p && in1_p)
5655 /* If they don't overlap, the result is the second range. If the second
5656 is a subset of the first, the result is false. Otherwise,
5657 the range starts just after the first range and ends at the
5658 end of the second. */
5659 if (no_overlap)
5660 in_p = 1, low = low1, high = high1;
5661 else if (subset || highequal)
5662 in_p = 0, low = high = 0;
5663 else
5665 low = range_successor (high0);
5666 high = high1;
5667 in_p = 1;
5668 if (low == 0)
5670 /* high1 > high0 but high0 has no successor. Punt. */
5671 return 0;
5676 else
5678 /* The case where we are excluding both ranges. Here the complex case
5679 is if they don't overlap. In that case, the only time we have a
5680 range is if they are adjacent. If the second is a subset of the
5681 first, the result is the first. Otherwise, the range to exclude
5682 starts at the beginning of the first range and ends at the end of the
5683 second. */
5684 if (no_overlap)
5686 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5687 range_successor (high0),
5688 1, low1, 0)))
5689 in_p = 0, low = low0, high = high1;
5690 else
5692 /* Canonicalize - [min, x] into - [-, x]. */
5693 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5694 switch (TREE_CODE (TREE_TYPE (low0)))
5696 case ENUMERAL_TYPE:
5697 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5698 GET_MODE_BITSIZE
5699 (TYPE_MODE (TREE_TYPE (low0)))))
5700 break;
5701 /* FALLTHROUGH */
5702 case INTEGER_TYPE:
5703 if (tree_int_cst_equal (low0,
5704 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5705 low0 = 0;
5706 break;
5707 case POINTER_TYPE:
5708 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5709 && integer_zerop (low0))
5710 low0 = 0;
5711 break;
5712 default:
5713 break;
5716 /* Canonicalize - [x, max] into - [x, -]. */
5717 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5718 switch (TREE_CODE (TREE_TYPE (high1)))
5720 case ENUMERAL_TYPE:
5721 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5722 GET_MODE_BITSIZE
5723 (TYPE_MODE (TREE_TYPE (high1)))))
5724 break;
5725 /* FALLTHROUGH */
5726 case INTEGER_TYPE:
5727 if (tree_int_cst_equal (high1,
5728 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5729 high1 = 0;
5730 break;
5731 case POINTER_TYPE:
5732 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5733 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5734 high1, 1,
5735 build_int_cst (TREE_TYPE (high1), 1),
5736 1)))
5737 high1 = 0;
5738 break;
5739 default:
5740 break;
5743 /* The ranges might be also adjacent between the maximum and
5744 minimum values of the given type. For
5745 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5746 return + [x + 1, y - 1]. */
5747 if (low0 == 0 && high1 == 0)
5749 low = range_successor (high0);
5750 high = range_predecessor (low1);
5751 if (low == 0 || high == 0)
5752 return 0;
5754 in_p = 1;
5756 else
5757 return 0;
5760 else if (subset)
5761 in_p = 0, low = low0, high = high0;
5762 else
5763 in_p = 0, low = low0, high = high1;
5766 *pin_p = in_p, *plow = low, *phigh = high;
5767 return 1;
5771 /* Subroutine of fold, looking inside expressions of the form
5772 A op B ? A : C, where (ARG00, COMP_CODE, ARG01), ARG1 and ARG2
5773 are the three operands of the COND_EXPR. This function is
5774 being used also to optimize A op B ? C : A, by reversing the
5775 comparison first.
5777 Return a folded expression whose code is not a COND_EXPR
5778 anymore, or NULL_TREE if no folding opportunity is found. */
5780 static tree
5781 fold_cond_expr_with_comparison (location_t loc, tree type,
5782 enum tree_code comp_code,
5783 tree arg00, tree arg01, tree arg1, tree arg2)
5785 tree arg1_type = TREE_TYPE (arg1);
5786 tree tem;
5788 STRIP_NOPS (arg1);
5789 STRIP_NOPS (arg2);
5791 /* If we have A op 0 ? A : -A, consider applying the following
5792 transformations:
5794 A == 0? A : -A same as -A
5795 A != 0? A : -A same as A
5796 A >= 0? A : -A same as abs (A)
5797 A > 0? A : -A same as abs (A)
5798 A <= 0? A : -A same as -abs (A)
5799 A < 0? A : -A same as -abs (A)
5801 None of these transformations work for modes with signed
5802 zeros. If A is +/-0, the first two transformations will
5803 change the sign of the result (from +0 to -0, or vice
5804 versa). The last four will fix the sign of the result,
5805 even though the original expressions could be positive or
5806 negative, depending on the sign of A.
5808 Note that all these transformations are correct if A is
5809 NaN, since the two alternatives (A and -A) are also NaNs. */
5810 if (!HONOR_SIGNED_ZEROS (type)
5811 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5812 ? real_zerop (arg01)
5813 : integer_zerop (arg01))
5814 && ((TREE_CODE (arg2) == NEGATE_EXPR
5815 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5816 /* In the case that A is of the form X-Y, '-A' (arg2) may
5817 have already been folded to Y-X, check for that. */
5818 || (TREE_CODE (arg1) == MINUS_EXPR
5819 && TREE_CODE (arg2) == MINUS_EXPR
5820 && operand_equal_p (TREE_OPERAND (arg1, 0),
5821 TREE_OPERAND (arg2, 1), 0)
5822 && operand_equal_p (TREE_OPERAND (arg1, 1),
5823 TREE_OPERAND (arg2, 0), 0))))
5824 switch (comp_code)
5826 case EQ_EXPR:
5827 case UNEQ_EXPR:
5828 tem = fold_convert_loc (loc, arg1_type, arg1);
5829 return fold_convert_loc (loc, type, negate_expr (tem));
5830 case NE_EXPR:
5831 case LTGT_EXPR:
5832 return fold_convert_loc (loc, type, arg1);
5833 case UNGE_EXPR:
5834 case UNGT_EXPR:
5835 if (flag_trapping_math)
5836 break;
5837 /* Fall through. */
5838 case GE_EXPR:
5839 case GT_EXPR:
5840 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5841 break;
5842 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5843 return fold_convert_loc (loc, type, tem);
5844 case UNLE_EXPR:
5845 case UNLT_EXPR:
5846 if (flag_trapping_math)
5847 break;
5848 /* FALLTHRU */
5849 case LE_EXPR:
5850 case LT_EXPR:
5851 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5852 break;
5853 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5854 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
5856 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
5857 is not, invokes UB both in abs and in the negation of it.
5858 So, use ABSU_EXPR instead. */
5859 tree utype = unsigned_type_for (TREE_TYPE (arg1));
5860 tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
5861 tem = negate_expr (tem);
5862 return fold_convert_loc (loc, type, tem);
5864 else
5866 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5867 return negate_expr (fold_convert_loc (loc, type, tem));
5869 default:
5870 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5871 break;
5874 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5875 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5876 both transformations are correct when A is NaN: A != 0
5877 is then true, and A == 0 is false. */
5879 if (!HONOR_SIGNED_ZEROS (type)
5880 && integer_zerop (arg01) && integer_zerop (arg2))
5882 if (comp_code == NE_EXPR)
5883 return fold_convert_loc (loc, type, arg1);
5884 else if (comp_code == EQ_EXPR)
5885 return build_zero_cst (type);
5888 /* Try some transformations of A op B ? A : B.
5890 A == B? A : B same as B
5891 A != B? A : B same as A
5892 A >= B? A : B same as max (A, B)
5893 A > B? A : B same as max (B, A)
5894 A <= B? A : B same as min (A, B)
5895 A < B? A : B same as min (B, A)
5897 As above, these transformations don't work in the presence
5898 of signed zeros. For example, if A and B are zeros of
5899 opposite sign, the first two transformations will change
5900 the sign of the result. In the last four, the original
5901 expressions give different results for (A=+0, B=-0) and
5902 (A=-0, B=+0), but the transformed expressions do not.
5904 The first two transformations are correct if either A or B
5905 is a NaN. In the first transformation, the condition will
5906 be false, and B will indeed be chosen. In the case of the
5907 second transformation, the condition A != B will be true,
5908 and A will be chosen.
5910 The conversions to max() and min() are not correct if B is
5911 a number and A is not. The conditions in the original
5912 expressions will be false, so all four give B. The min()
5913 and max() versions would give a NaN instead. */
5914 if (!HONOR_SIGNED_ZEROS (type)
5915 && operand_equal_for_comparison_p (arg01, arg2)
5916 /* Avoid these transformations if the COND_EXPR may be used
5917 as an lvalue in the C++ front-end. PR c++/19199. */
5918 && (in_gimple_form
5919 || VECTOR_TYPE_P (type)
5920 || (! lang_GNU_CXX ()
5921 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5922 || ! maybe_lvalue_p (arg1)
5923 || ! maybe_lvalue_p (arg2)))
5925 tree comp_op0 = arg00;
5926 tree comp_op1 = arg01;
5927 tree comp_type = TREE_TYPE (comp_op0);
5929 switch (comp_code)
5931 case EQ_EXPR:
5932 return fold_convert_loc (loc, type, arg2);
5933 case NE_EXPR:
5934 return fold_convert_loc (loc, type, arg1);
5935 case LE_EXPR:
5936 case LT_EXPR:
5937 case UNLE_EXPR:
5938 case UNLT_EXPR:
5939 /* In C++ a ?: expression can be an lvalue, so put the
5940 operand which will be used if they are equal first
5941 so that we can convert this back to the
5942 corresponding COND_EXPR. */
5943 if (!HONOR_NANS (arg1))
5945 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5946 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5947 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5948 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5949 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5950 comp_op1, comp_op0);
5951 return fold_convert_loc (loc, type, tem);
5953 break;
5954 case GE_EXPR:
5955 case GT_EXPR:
5956 case UNGE_EXPR:
5957 case UNGT_EXPR:
5958 if (!HONOR_NANS (arg1))
5960 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5961 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5962 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5963 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5964 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5965 comp_op1, comp_op0);
5966 return fold_convert_loc (loc, type, tem);
5968 break;
5969 case UNEQ_EXPR:
5970 if (!HONOR_NANS (arg1))
5971 return fold_convert_loc (loc, type, arg2);
5972 break;
5973 case LTGT_EXPR:
5974 if (!HONOR_NANS (arg1))
5975 return fold_convert_loc (loc, type, arg1);
5976 break;
5977 default:
5978 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5979 break;
5983 return NULL_TREE;
5988 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5989 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5990 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5991 false) >= 2)
5992 #endif
5994 /* EXP is some logical combination of boolean tests. See if we can
5995 merge it into some range test. Return the new tree if so. */
5997 static tree
5998 fold_range_test (location_t loc, enum tree_code code, tree type,
5999 tree op0, tree op1)
6001 int or_op = (code == TRUTH_ORIF_EXPR
6002 || code == TRUTH_OR_EXPR);
6003 int in0_p, in1_p, in_p;
6004 tree low0, low1, low, high0, high1, high;
6005 bool strict_overflow_p = false;
6006 tree tem, lhs, rhs;
6007 const char * const warnmsg = G_("assuming signed overflow does not occur "
6008 "when simplifying range test");
6010 if (!INTEGRAL_TYPE_P (type))
6011 return 0;
6013 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
6014 /* If op0 is known true or false and this is a short-circuiting
6015 operation we must not merge with op1 since that makes side-effects
6016 unconditional. So special-case this. */
6017 if (!lhs
6018 && ((code == TRUTH_ORIF_EXPR && in0_p)
6019 || (code == TRUTH_ANDIF_EXPR && !in0_p)))
6020 return op0;
6021 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
6023 /* If this is an OR operation, invert both sides; we will invert
6024 again at the end. */
6025 if (or_op)
6026 in0_p = ! in0_p, in1_p = ! in1_p;
6028 /* If both expressions are the same, if we can merge the ranges, and we
6029 can build the range test, return it or it inverted. If one of the
6030 ranges is always true or always false, consider it to be the same
6031 expression as the other. */
6032 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
6033 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
6034 in1_p, low1, high1)
6035 && (tem = (build_range_check (loc, type,
6036 lhs != 0 ? lhs
6037 : rhs != 0 ? rhs : integer_zero_node,
6038 in_p, low, high))) != 0)
6040 if (strict_overflow_p)
6041 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
6042 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6045 /* On machines where the branch cost is expensive, if this is a
6046 short-circuited branch and the underlying object on both sides
6047 is the same, make a non-short-circuit operation. */
6048 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6049 if (param_logical_op_non_short_circuit != -1)
6050 logical_op_non_short_circuit
6051 = param_logical_op_non_short_circuit;
6052 if (logical_op_non_short_circuit
6053 && !sanitize_coverage_p ()
6054 && lhs != 0 && rhs != 0
6055 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6056 && operand_equal_p (lhs, rhs, 0))
6058 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6059 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6060 which cases we can't do this. */
6061 if (simple_operand_p (lhs))
6062 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6063 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6064 type, op0, op1);
6066 else if (!lang_hooks.decls.global_bindings_p ()
6067 && !CONTAINS_PLACEHOLDER_P (lhs))
6069 tree common = save_expr (lhs);
6071 if ((lhs = build_range_check (loc, type, common,
6072 or_op ? ! in0_p : in0_p,
6073 low0, high0)) != 0
6074 && (rhs = build_range_check (loc, type, common,
6075 or_op ? ! in1_p : in1_p,
6076 low1, high1)) != 0)
6078 if (strict_overflow_p)
6079 fold_overflow_warning (warnmsg,
6080 WARN_STRICT_OVERFLOW_COMPARISON);
6081 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6082 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6083 type, lhs, rhs);
6088 return 0;
6091 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6092 bit value. Arrange things so the extra bits will be set to zero if and
6093 only if C is signed-extended to its full width. If MASK is nonzero,
6094 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6096 static tree
6097 unextend (tree c, int p, int unsignedp, tree mask)
6099 tree type = TREE_TYPE (c);
6100 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6101 tree temp;
6103 if (p == modesize || unsignedp)
6104 return c;
6106 /* We work by getting just the sign bit into the low-order bit, then
6107 into the high-order bit, then sign-extend. We then XOR that value
6108 with C. */
6109 temp = build_int_cst (TREE_TYPE (c),
6110 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6112 /* We must use a signed type in order to get an arithmetic right shift.
6113 However, we must also avoid introducing accidental overflows, so that
6114 a subsequent call to integer_zerop will work. Hence we must
6115 do the type conversion here. At this point, the constant is either
6116 zero or one, and the conversion to a signed type can never overflow.
6117 We could get an overflow if this conversion is done anywhere else. */
6118 if (TYPE_UNSIGNED (type))
6119 temp = fold_convert (signed_type_for (type), temp);
6121 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6122 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6123 if (mask != 0)
6124 temp = const_binop (BIT_AND_EXPR, temp,
6125 fold_convert (TREE_TYPE (c), mask));
6126 /* If necessary, convert the type back to match the type of C. */
6127 if (TYPE_UNSIGNED (type))
6128 temp = fold_convert (type, temp);
6130 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6133 /* For an expression that has the form
6134 (A && B) || ~B
6136 (A || B) && ~B,
6137 we can drop one of the inner expressions and simplify to
6138 A || ~B
6140 A && ~B
6141 LOC is the location of the resulting expression. OP is the inner
6142 logical operation; the left-hand side in the examples above, while CMPOP
6143 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6144 removing a condition that guards another, as in
6145 (A != NULL && A->...) || A == NULL
6146 which we must not transform. If RHS_ONLY is true, only eliminate the
6147 right-most operand of the inner logical operation. */
6149 static tree
6150 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6151 bool rhs_only)
6153 tree type = TREE_TYPE (cmpop);
6154 enum tree_code code = TREE_CODE (cmpop);
6155 enum tree_code truthop_code = TREE_CODE (op);
6156 tree lhs = TREE_OPERAND (op, 0);
6157 tree rhs = TREE_OPERAND (op, 1);
6158 tree orig_lhs = lhs, orig_rhs = rhs;
6159 enum tree_code rhs_code = TREE_CODE (rhs);
6160 enum tree_code lhs_code = TREE_CODE (lhs);
6161 enum tree_code inv_code;
6163 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6164 return NULL_TREE;
6166 if (TREE_CODE_CLASS (code) != tcc_comparison)
6167 return NULL_TREE;
6169 if (rhs_code == truthop_code)
6171 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6172 if (newrhs != NULL_TREE)
6174 rhs = newrhs;
6175 rhs_code = TREE_CODE (rhs);
6178 if (lhs_code == truthop_code && !rhs_only)
6180 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6181 if (newlhs != NULL_TREE)
6183 lhs = newlhs;
6184 lhs_code = TREE_CODE (lhs);
6188 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6189 if (inv_code == rhs_code
6190 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6191 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6192 return lhs;
6193 if (!rhs_only && inv_code == lhs_code
6194 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6195 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6196 return rhs;
6197 if (rhs != orig_rhs || lhs != orig_lhs)
6198 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6199 lhs, rhs);
6200 return NULL_TREE;
6203 /* Find ways of folding logical expressions of LHS and RHS:
6204 Try to merge two comparisons to the same innermost item.
6205 Look for range tests like "ch >= '0' && ch <= '9'".
6206 Look for combinations of simple terms on machines with expensive branches
6207 and evaluate the RHS unconditionally.
6209 For example, if we have p->a == 2 && p->b == 4 and we can make an
6210 object large enough to span both A and B, we can do this with a comparison
6211 against the object ANDed with the a mask.
6213 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6214 operations to do this with one comparison.
6216 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6217 function and the one above.
6219 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6220 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6222 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6223 two operands.
6225 We return the simplified tree or 0 if no optimization is possible. */
6227 static tree
6228 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6229 tree lhs, tree rhs)
6231 /* If this is the "or" of two comparisons, we can do something if
6232 the comparisons are NE_EXPR. If this is the "and", we can do something
6233 if the comparisons are EQ_EXPR. I.e.,
6234 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6236 WANTED_CODE is this operation code. For single bit fields, we can
6237 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6238 comparison for one-bit fields. */
6240 enum tree_code wanted_code;
6241 enum tree_code lcode, rcode;
6242 tree ll_arg, lr_arg, rl_arg, rr_arg;
6243 tree ll_inner, lr_inner, rl_inner, rr_inner;
6244 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6245 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6246 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6247 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6248 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6249 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6250 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6251 scalar_int_mode lnmode, rnmode;
6252 tree ll_mask, lr_mask, rl_mask, rr_mask;
6253 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6254 tree l_const, r_const;
6255 tree lntype, rntype, result;
6256 HOST_WIDE_INT first_bit, end_bit;
6257 int volatilep;
6259 /* Start by getting the comparison codes. Fail if anything is volatile.
6260 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6261 it were surrounded with a NE_EXPR. */
6263 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6264 return 0;
6266 lcode = TREE_CODE (lhs);
6267 rcode = TREE_CODE (rhs);
6269 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6271 lhs = build2 (NE_EXPR, truth_type, lhs,
6272 build_int_cst (TREE_TYPE (lhs), 0));
6273 lcode = NE_EXPR;
6276 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6278 rhs = build2 (NE_EXPR, truth_type, rhs,
6279 build_int_cst (TREE_TYPE (rhs), 0));
6280 rcode = NE_EXPR;
6283 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6284 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6285 return 0;
6287 ll_arg = TREE_OPERAND (lhs, 0);
6288 lr_arg = TREE_OPERAND (lhs, 1);
6289 rl_arg = TREE_OPERAND (rhs, 0);
6290 rr_arg = TREE_OPERAND (rhs, 1);
6292 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6293 if (simple_operand_p (ll_arg)
6294 && simple_operand_p (lr_arg))
6296 if (operand_equal_p (ll_arg, rl_arg, 0)
6297 && operand_equal_p (lr_arg, rr_arg, 0))
6299 result = combine_comparisons (loc, code, lcode, rcode,
6300 truth_type, ll_arg, lr_arg);
6301 if (result)
6302 return result;
6304 else if (operand_equal_p (ll_arg, rr_arg, 0)
6305 && operand_equal_p (lr_arg, rl_arg, 0))
6307 result = combine_comparisons (loc, code, lcode,
6308 swap_tree_comparison (rcode),
6309 truth_type, ll_arg, lr_arg);
6310 if (result)
6311 return result;
6315 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6316 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6318 /* If the RHS can be evaluated unconditionally and its operands are
6319 simple, it wins to evaluate the RHS unconditionally on machines
6320 with expensive branches. In this case, this isn't a comparison
6321 that can be merged. */
6323 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6324 false) >= 2
6325 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6326 && simple_operand_p (rl_arg)
6327 && simple_operand_p (rr_arg))
6329 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6330 if (code == TRUTH_OR_EXPR
6331 && lcode == NE_EXPR && integer_zerop (lr_arg)
6332 && rcode == NE_EXPR && integer_zerop (rr_arg)
6333 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6334 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6335 return build2_loc (loc, NE_EXPR, truth_type,
6336 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6337 ll_arg, rl_arg),
6338 build_int_cst (TREE_TYPE (ll_arg), 0));
6340 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6341 if (code == TRUTH_AND_EXPR
6342 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6343 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6344 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6345 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6346 return build2_loc (loc, EQ_EXPR, truth_type,
6347 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6348 ll_arg, rl_arg),
6349 build_int_cst (TREE_TYPE (ll_arg), 0));
6352 /* See if the comparisons can be merged. Then get all the parameters for
6353 each side. */
6355 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6356 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6357 return 0;
6359 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6360 volatilep = 0;
6361 ll_inner = decode_field_reference (loc, &ll_arg,
6362 &ll_bitsize, &ll_bitpos, &ll_mode,
6363 &ll_unsignedp, &ll_reversep, &volatilep,
6364 &ll_mask, &ll_and_mask);
6365 lr_inner = decode_field_reference (loc, &lr_arg,
6366 &lr_bitsize, &lr_bitpos, &lr_mode,
6367 &lr_unsignedp, &lr_reversep, &volatilep,
6368 &lr_mask, &lr_and_mask);
6369 rl_inner = decode_field_reference (loc, &rl_arg,
6370 &rl_bitsize, &rl_bitpos, &rl_mode,
6371 &rl_unsignedp, &rl_reversep, &volatilep,
6372 &rl_mask, &rl_and_mask);
6373 rr_inner = decode_field_reference (loc, &rr_arg,
6374 &rr_bitsize, &rr_bitpos, &rr_mode,
6375 &rr_unsignedp, &rr_reversep, &volatilep,
6376 &rr_mask, &rr_and_mask);
6378 /* It must be true that the inner operation on the lhs of each
6379 comparison must be the same if we are to be able to do anything.
6380 Then see if we have constants. If not, the same must be true for
6381 the rhs's. */
6382 if (volatilep
6383 || ll_reversep != rl_reversep
6384 || ll_inner == 0 || rl_inner == 0
6385 || ! operand_equal_p (ll_inner, rl_inner, 0))
6386 return 0;
6388 if (TREE_CODE (lr_arg) == INTEGER_CST
6389 && TREE_CODE (rr_arg) == INTEGER_CST)
6391 l_const = lr_arg, r_const = rr_arg;
6392 lr_reversep = ll_reversep;
6394 else if (lr_reversep != rr_reversep
6395 || lr_inner == 0 || rr_inner == 0
6396 || ! operand_equal_p (lr_inner, rr_inner, 0))
6397 return 0;
6398 else
6399 l_const = r_const = 0;
6401 /* If either comparison code is not correct for our logical operation,
6402 fail. However, we can convert a one-bit comparison against zero into
6403 the opposite comparison against that bit being set in the field. */
6405 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6406 if (lcode != wanted_code)
6408 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6410 /* Make the left operand unsigned, since we are only interested
6411 in the value of one bit. Otherwise we are doing the wrong
6412 thing below. */
6413 ll_unsignedp = 1;
6414 l_const = ll_mask;
6416 else
6417 return 0;
6420 /* This is analogous to the code for l_const above. */
6421 if (rcode != wanted_code)
6423 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6425 rl_unsignedp = 1;
6426 r_const = rl_mask;
6428 else
6429 return 0;
6432 /* See if we can find a mode that contains both fields being compared on
6433 the left. If we can't, fail. Otherwise, update all constants and masks
6434 to be relative to a field of that size. */
6435 first_bit = MIN (ll_bitpos, rl_bitpos);
6436 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6437 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6438 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6439 volatilep, &lnmode))
6440 return 0;
6442 lnbitsize = GET_MODE_BITSIZE (lnmode);
6443 lnbitpos = first_bit & ~ (lnbitsize - 1);
6444 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6445 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6447 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6449 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6450 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6453 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6454 size_int (xll_bitpos));
6455 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6456 size_int (xrl_bitpos));
6458 if (l_const)
6460 l_const = fold_convert_loc (loc, lntype, l_const);
6461 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6462 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6463 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6464 fold_build1_loc (loc, BIT_NOT_EXPR,
6465 lntype, ll_mask))))
6467 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6469 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6472 if (r_const)
6474 r_const = fold_convert_loc (loc, lntype, r_const);
6475 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6476 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6477 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6478 fold_build1_loc (loc, BIT_NOT_EXPR,
6479 lntype, rl_mask))))
6481 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6483 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6487 /* If the right sides are not constant, do the same for it. Also,
6488 disallow this optimization if a size, signedness or storage order
6489 mismatch occurs between the left and right sides. */
6490 if (l_const == 0)
6492 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6493 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6494 || ll_reversep != lr_reversep
6495 /* Make sure the two fields on the right
6496 correspond to the left without being swapped. */
6497 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6498 return 0;
6500 first_bit = MIN (lr_bitpos, rr_bitpos);
6501 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6502 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6503 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6504 volatilep, &rnmode))
6505 return 0;
6507 rnbitsize = GET_MODE_BITSIZE (rnmode);
6508 rnbitpos = first_bit & ~ (rnbitsize - 1);
6509 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6510 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6512 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6514 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6515 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6518 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6519 rntype, lr_mask),
6520 size_int (xlr_bitpos));
6521 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6522 rntype, rr_mask),
6523 size_int (xrr_bitpos));
6525 /* Make a mask that corresponds to both fields being compared.
6526 Do this for both items being compared. If the operands are the
6527 same size and the bits being compared are in the same position
6528 then we can do this by masking both and comparing the masked
6529 results. */
6530 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6531 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6532 if (lnbitsize == rnbitsize
6533 && xll_bitpos == xlr_bitpos
6534 && lnbitpos >= 0
6535 && rnbitpos >= 0)
6537 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6538 lntype, lnbitsize, lnbitpos,
6539 ll_unsignedp || rl_unsignedp, ll_reversep);
6540 if (! all_ones_mask_p (ll_mask, lnbitsize))
6541 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6543 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6544 rntype, rnbitsize, rnbitpos,
6545 lr_unsignedp || rr_unsignedp, lr_reversep);
6546 if (! all_ones_mask_p (lr_mask, rnbitsize))
6547 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6549 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6552 /* There is still another way we can do something: If both pairs of
6553 fields being compared are adjacent, we may be able to make a wider
6554 field containing them both.
6556 Note that we still must mask the lhs/rhs expressions. Furthermore,
6557 the mask must be shifted to account for the shift done by
6558 make_bit_field_ref. */
6559 if (((ll_bitsize + ll_bitpos == rl_bitpos
6560 && lr_bitsize + lr_bitpos == rr_bitpos)
6561 || (ll_bitpos == rl_bitpos + rl_bitsize
6562 && lr_bitpos == rr_bitpos + rr_bitsize))
6563 && ll_bitpos >= 0
6564 && rl_bitpos >= 0
6565 && lr_bitpos >= 0
6566 && rr_bitpos >= 0)
6568 tree type;
6570 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6571 ll_bitsize + rl_bitsize,
6572 MIN (ll_bitpos, rl_bitpos),
6573 ll_unsignedp, ll_reversep);
6574 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6575 lr_bitsize + rr_bitsize,
6576 MIN (lr_bitpos, rr_bitpos),
6577 lr_unsignedp, lr_reversep);
6579 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6580 size_int (MIN (xll_bitpos, xrl_bitpos)));
6581 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6582 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6584 /* Convert to the smaller type before masking out unwanted bits. */
6585 type = lntype;
6586 if (lntype != rntype)
6588 if (lnbitsize > rnbitsize)
6590 lhs = fold_convert_loc (loc, rntype, lhs);
6591 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6592 type = rntype;
6594 else if (lnbitsize < rnbitsize)
6596 rhs = fold_convert_loc (loc, lntype, rhs);
6597 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6598 type = lntype;
6602 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6603 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6605 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6606 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6608 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6611 return 0;
6614 /* Handle the case of comparisons with constants. If there is something in
6615 common between the masks, those bits of the constants must be the same.
6616 If not, the condition is always false. Test for this to avoid generating
6617 incorrect code below. */
6618 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6619 if (! integer_zerop (result)
6620 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6621 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6623 if (wanted_code == NE_EXPR)
6625 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6626 return constant_boolean_node (true, truth_type);
6628 else
6630 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6631 return constant_boolean_node (false, truth_type);
6635 if (lnbitpos < 0)
6636 return 0;
6638 /* Construct the expression we will return. First get the component
6639 reference we will make. Unless the mask is all ones the width of
6640 that field, perform the mask operation. Then compare with the
6641 merged constant. */
6642 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6643 lntype, lnbitsize, lnbitpos,
6644 ll_unsignedp || rl_unsignedp, ll_reversep);
6646 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6647 if (! all_ones_mask_p (ll_mask, lnbitsize))
6648 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6650 return build2_loc (loc, wanted_code, truth_type, result,
6651 const_binop (BIT_IOR_EXPR, l_const, r_const));
6654 /* T is an integer expression that is being multiplied, divided, or taken a
6655 modulus (CODE says which and what kind of divide or modulus) by a
6656 constant C. See if we can eliminate that operation by folding it with
6657 other operations already in T. WIDE_TYPE, if non-null, is a type that
6658 should be used for the computation if wider than our type.
6660 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6661 (X * 2) + (Y * 4). We must, however, be assured that either the original
6662 expression would not overflow or that overflow is undefined for the type
6663 in the language in question.
6665 If we return a non-null expression, it is an equivalent form of the
6666 original computation, but need not be in the original type.
6668 We set *STRICT_OVERFLOW_P to true if the return values depends on
6669 signed overflow being undefined. Otherwise we do not change
6670 *STRICT_OVERFLOW_P. */
6672 static tree
6673 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6674 bool *strict_overflow_p)
6676 /* To avoid exponential search depth, refuse to allow recursion past
6677 three levels. Beyond that (1) it's highly unlikely that we'll find
6678 something interesting and (2) we've probably processed it before
6679 when we built the inner expression. */
6681 static int depth;
6682 tree ret;
6684 if (depth > 3)
6685 return NULL;
6687 depth++;
6688 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6689 depth--;
6691 return ret;
6694 static tree
6695 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6696 bool *strict_overflow_p)
6698 tree type = TREE_TYPE (t);
6699 enum tree_code tcode = TREE_CODE (t);
6700 tree ctype = (wide_type != 0
6701 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6702 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6703 ? wide_type : type);
6704 tree t1, t2;
6705 int same_p = tcode == code;
6706 tree op0 = NULL_TREE, op1 = NULL_TREE;
6707 bool sub_strict_overflow_p;
6709 /* Don't deal with constants of zero here; they confuse the code below. */
6710 if (integer_zerop (c))
6711 return NULL_TREE;
6713 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6714 op0 = TREE_OPERAND (t, 0);
6716 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6717 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6719 /* Note that we need not handle conditional operations here since fold
6720 already handles those cases. So just do arithmetic here. */
6721 switch (tcode)
6723 case INTEGER_CST:
6724 /* For a constant, we can always simplify if we are a multiply
6725 or (for divide and modulus) if it is a multiple of our constant. */
6726 if (code == MULT_EXPR
6727 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6728 TYPE_SIGN (type)))
6730 tree tem = const_binop (code, fold_convert (ctype, t),
6731 fold_convert (ctype, c));
6732 /* If the multiplication overflowed, we lost information on it.
6733 See PR68142 and PR69845. */
6734 if (TREE_OVERFLOW (tem))
6735 return NULL_TREE;
6736 return tem;
6738 break;
6740 CASE_CONVERT: case NON_LVALUE_EXPR:
6741 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0)))
6742 break;
6743 /* If op0 is an expression ... */
6744 if ((COMPARISON_CLASS_P (op0)
6745 || UNARY_CLASS_P (op0)
6746 || BINARY_CLASS_P (op0)
6747 || VL_EXP_CLASS_P (op0)
6748 || EXPRESSION_CLASS_P (op0))
6749 /* ... and has wrapping overflow, and its type is smaller
6750 than ctype, then we cannot pass through as widening. */
6751 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6752 && (TYPE_PRECISION (ctype)
6753 > TYPE_PRECISION (TREE_TYPE (op0))))
6754 /* ... or this is a truncation (t is narrower than op0),
6755 then we cannot pass through this narrowing. */
6756 || (TYPE_PRECISION (type)
6757 < TYPE_PRECISION (TREE_TYPE (op0)))
6758 /* ... or signedness changes for division or modulus,
6759 then we cannot pass through this conversion. */
6760 || (code != MULT_EXPR
6761 && (TYPE_UNSIGNED (ctype)
6762 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6763 /* ... or has undefined overflow while the converted to
6764 type has not, we cannot do the operation in the inner type
6765 as that would introduce undefined overflow. */
6766 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6767 && !TYPE_OVERFLOW_UNDEFINED (type))))
6768 break;
6770 /* Pass the constant down and see if we can make a simplification. If
6771 we can, replace this expression with the inner simplification for
6772 possible later conversion to our or some other type. */
6773 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6774 && TREE_CODE (t2) == INTEGER_CST
6775 && !TREE_OVERFLOW (t2)
6776 && (t1 = extract_muldiv (op0, t2, code,
6777 code == MULT_EXPR ? ctype : NULL_TREE,
6778 strict_overflow_p)) != 0)
6779 return t1;
6780 break;
6782 case ABS_EXPR:
6783 /* If widening the type changes it from signed to unsigned, then we
6784 must avoid building ABS_EXPR itself as unsigned. */
6785 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6787 tree cstype = (*signed_type_for) (ctype);
6788 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6789 != 0)
6791 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6792 return fold_convert (ctype, t1);
6794 break;
6796 /* If the constant is negative, we cannot simplify this. */
6797 if (tree_int_cst_sgn (c) == -1)
6798 break;
6799 /* FALLTHROUGH */
6800 case NEGATE_EXPR:
6801 /* For division and modulus, type can't be unsigned, as e.g.
6802 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6803 For signed types, even with wrapping overflow, this is fine. */
6804 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6805 break;
6806 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6807 != 0)
6808 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6809 break;
6811 case MIN_EXPR: case MAX_EXPR:
6812 /* If widening the type changes the signedness, then we can't perform
6813 this optimization as that changes the result. */
6814 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6815 break;
6817 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6818 sub_strict_overflow_p = false;
6819 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6820 &sub_strict_overflow_p)) != 0
6821 && (t2 = extract_muldiv (op1, c, code, wide_type,
6822 &sub_strict_overflow_p)) != 0)
6824 if (tree_int_cst_sgn (c) < 0)
6825 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6826 if (sub_strict_overflow_p)
6827 *strict_overflow_p = true;
6828 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6829 fold_convert (ctype, t2));
6831 break;
6833 case LSHIFT_EXPR: case RSHIFT_EXPR:
6834 /* If the second operand is constant, this is a multiplication
6835 or floor division, by a power of two, so we can treat it that
6836 way unless the multiplier or divisor overflows. Signed
6837 left-shift overflow is implementation-defined rather than
6838 undefined in C90, so do not convert signed left shift into
6839 multiplication. */
6840 if (TREE_CODE (op1) == INTEGER_CST
6841 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6842 /* const_binop may not detect overflow correctly,
6843 so check for it explicitly here. */
6844 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6845 wi::to_wide (op1))
6846 && (t1 = fold_convert (ctype,
6847 const_binop (LSHIFT_EXPR, size_one_node,
6848 op1))) != 0
6849 && !TREE_OVERFLOW (t1))
6850 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6851 ? MULT_EXPR : FLOOR_DIV_EXPR,
6852 ctype,
6853 fold_convert (ctype, op0),
6854 t1),
6855 c, code, wide_type, strict_overflow_p);
6856 break;
6858 case PLUS_EXPR: case MINUS_EXPR:
6859 /* See if we can eliminate the operation on both sides. If we can, we
6860 can return a new PLUS or MINUS. If we can't, the only remaining
6861 cases where we can do anything are if the second operand is a
6862 constant. */
6863 sub_strict_overflow_p = false;
6864 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6865 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6866 if (t1 != 0 && t2 != 0
6867 && TYPE_OVERFLOW_WRAPS (ctype)
6868 && (code == MULT_EXPR
6869 /* If not multiplication, we can only do this if both operands
6870 are divisible by c. */
6871 || (multiple_of_p (ctype, op0, c)
6872 && multiple_of_p (ctype, op1, c))))
6874 if (sub_strict_overflow_p)
6875 *strict_overflow_p = true;
6876 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6877 fold_convert (ctype, t2));
6880 /* If this was a subtraction, negate OP1 and set it to be an addition.
6881 This simplifies the logic below. */
6882 if (tcode == MINUS_EXPR)
6884 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6885 /* If OP1 was not easily negatable, the constant may be OP0. */
6886 if (TREE_CODE (op0) == INTEGER_CST)
6888 std::swap (op0, op1);
6889 std::swap (t1, t2);
6893 if (TREE_CODE (op1) != INTEGER_CST)
6894 break;
6896 /* If either OP1 or C are negative, this optimization is not safe for
6897 some of the division and remainder types while for others we need
6898 to change the code. */
6899 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6901 if (code == CEIL_DIV_EXPR)
6902 code = FLOOR_DIV_EXPR;
6903 else if (code == FLOOR_DIV_EXPR)
6904 code = CEIL_DIV_EXPR;
6905 else if (code != MULT_EXPR
6906 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6907 break;
6910 /* If it's a multiply or a division/modulus operation of a multiple
6911 of our constant, do the operation and verify it doesn't overflow. */
6912 if (code == MULT_EXPR
6913 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6914 TYPE_SIGN (type)))
6916 op1 = const_binop (code, fold_convert (ctype, op1),
6917 fold_convert (ctype, c));
6918 /* We allow the constant to overflow with wrapping semantics. */
6919 if (op1 == 0
6920 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6921 break;
6923 else
6924 break;
6926 /* If we have an unsigned type, we cannot widen the operation since it
6927 will change the result if the original computation overflowed. */
6928 if (TYPE_UNSIGNED (ctype) && ctype != type)
6929 break;
6931 /* The last case is if we are a multiply. In that case, we can
6932 apply the distributive law to commute the multiply and addition
6933 if the multiplication of the constants doesn't overflow
6934 and overflow is defined. With undefined overflow
6935 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6936 But fold_plusminus_mult_expr would factor back any power-of-two
6937 value so do not distribute in the first place in this case. */
6938 if (code == MULT_EXPR
6939 && TYPE_OVERFLOW_WRAPS (ctype)
6940 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
6941 return fold_build2 (tcode, ctype,
6942 fold_build2 (code, ctype,
6943 fold_convert (ctype, op0),
6944 fold_convert (ctype, c)),
6945 op1);
6947 break;
6949 case MULT_EXPR:
6950 /* We have a special case here if we are doing something like
6951 (C * 8) % 4 since we know that's zero. */
6952 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6953 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6954 /* If the multiplication can overflow we cannot optimize this. */
6955 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6956 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6957 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6958 TYPE_SIGN (type)))
6960 *strict_overflow_p = true;
6961 return omit_one_operand (type, integer_zero_node, op0);
6964 /* ... fall through ... */
6966 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6967 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6968 /* If we can extract our operation from the LHS, do so and return a
6969 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6970 do something only if the second operand is a constant. */
6971 if (same_p
6972 && TYPE_OVERFLOW_WRAPS (ctype)
6973 && (t1 = extract_muldiv (op0, c, code, wide_type,
6974 strict_overflow_p)) != 0)
6975 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6976 fold_convert (ctype, op1));
6977 else if (tcode == MULT_EXPR && code == MULT_EXPR
6978 && TYPE_OVERFLOW_WRAPS (ctype)
6979 && (t1 = extract_muldiv (op1, c, code, wide_type,
6980 strict_overflow_p)) != 0)
6981 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6982 fold_convert (ctype, t1));
6983 else if (TREE_CODE (op1) != INTEGER_CST)
6984 return 0;
6986 /* If these are the same operation types, we can associate them
6987 assuming no overflow. */
6988 if (tcode == code)
6990 bool overflow_p = false;
6991 wi::overflow_type overflow_mul;
6992 signop sign = TYPE_SIGN (ctype);
6993 unsigned prec = TYPE_PRECISION (ctype);
6994 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6995 wi::to_wide (c, prec),
6996 sign, &overflow_mul);
6997 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6998 if (overflow_mul
6999 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
7000 overflow_p = true;
7001 if (!overflow_p)
7002 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7003 wide_int_to_tree (ctype, mul));
7006 /* If these operations "cancel" each other, we have the main
7007 optimizations of this pass, which occur when either constant is a
7008 multiple of the other, in which case we replace this with either an
7009 operation or CODE or TCODE.
7011 If we have an unsigned type, we cannot do this since it will change
7012 the result if the original computation overflowed. */
7013 if (TYPE_OVERFLOW_UNDEFINED (ctype)
7014 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
7015 || (tcode == MULT_EXPR
7016 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
7017 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
7018 && code != MULT_EXPR)))
7020 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7021 TYPE_SIGN (type)))
7023 if (TYPE_OVERFLOW_UNDEFINED (ctype))
7024 *strict_overflow_p = true;
7025 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7026 fold_convert (ctype,
7027 const_binop (TRUNC_DIV_EXPR,
7028 op1, c)));
7030 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
7031 TYPE_SIGN (type)))
7033 if (TYPE_OVERFLOW_UNDEFINED (ctype))
7034 *strict_overflow_p = true;
7035 return fold_build2 (code, ctype, fold_convert (ctype, op0),
7036 fold_convert (ctype,
7037 const_binop (TRUNC_DIV_EXPR,
7038 c, op1)));
7041 break;
7043 default:
7044 break;
7047 return 0;
7050 /* Return a node which has the indicated constant VALUE (either 0 or
7051 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7052 and is of the indicated TYPE. */
7054 tree
7055 constant_boolean_node (bool value, tree type)
7057 if (type == integer_type_node)
7058 return value ? integer_one_node : integer_zero_node;
7059 else if (type == boolean_type_node)
7060 return value ? boolean_true_node : boolean_false_node;
7061 else if (TREE_CODE (type) == VECTOR_TYPE)
7062 return build_vector_from_val (type,
7063 build_int_cst (TREE_TYPE (type),
7064 value ? -1 : 0));
7065 else
7066 return fold_convert (type, value ? integer_one_node : integer_zero_node);
7070 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7071 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7072 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7073 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7074 COND is the first argument to CODE; otherwise (as in the example
7075 given here), it is the second argument. TYPE is the type of the
7076 original expression. Return NULL_TREE if no simplification is
7077 possible. */
7079 static tree
7080 fold_binary_op_with_conditional_arg (location_t loc,
7081 enum tree_code code,
7082 tree type, tree op0, tree op1,
7083 tree cond, tree arg, int cond_first_p)
7085 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7086 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7087 tree test, true_value, false_value;
7088 tree lhs = NULL_TREE;
7089 tree rhs = NULL_TREE;
7090 enum tree_code cond_code = COND_EXPR;
7092 /* Do not move possibly trapping operations into the conditional as this
7093 pessimizes code and causes gimplification issues when applied late. */
7094 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7095 ANY_INTEGRAL_TYPE_P (type)
7096 && TYPE_OVERFLOW_TRAPS (type), op1))
7097 return NULL_TREE;
7099 if (TREE_CODE (cond) == COND_EXPR
7100 || TREE_CODE (cond) == VEC_COND_EXPR)
7102 test = TREE_OPERAND (cond, 0);
7103 true_value = TREE_OPERAND (cond, 1);
7104 false_value = TREE_OPERAND (cond, 2);
7105 /* If this operand throws an expression, then it does not make
7106 sense to try to perform a logical or arithmetic operation
7107 involving it. */
7108 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7109 lhs = true_value;
7110 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7111 rhs = false_value;
7113 else if (!(TREE_CODE (type) != VECTOR_TYPE
7114 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
7116 tree testtype = TREE_TYPE (cond);
7117 test = cond;
7118 true_value = constant_boolean_node (true, testtype);
7119 false_value = constant_boolean_node (false, testtype);
7121 else
7122 /* Detect the case of mixing vector and scalar types - bail out. */
7123 return NULL_TREE;
7125 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
7126 cond_code = VEC_COND_EXPR;
7128 /* This transformation is only worthwhile if we don't have to wrap ARG
7129 in a SAVE_EXPR and the operation can be simplified without recursing
7130 on at least one of the branches once its pushed inside the COND_EXPR. */
7131 if (!TREE_CONSTANT (arg)
7132 && (TREE_SIDE_EFFECTS (arg)
7133 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7134 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7135 return NULL_TREE;
7137 arg = fold_convert_loc (loc, arg_type, arg);
7138 if (lhs == 0)
7140 true_value = fold_convert_loc (loc, cond_type, true_value);
7141 if (cond_first_p)
7142 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7143 else
7144 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7146 if (rhs == 0)
7148 false_value = fold_convert_loc (loc, cond_type, false_value);
7149 if (cond_first_p)
7150 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7151 else
7152 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7155 /* Check that we have simplified at least one of the branches. */
7156 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7157 return NULL_TREE;
7159 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7163 /* Subroutine of fold() that checks for the addition of ARG +/- 0.0.
7165 If !NEGATE, return true if ZERO_ARG is +/-0.0 and, for all ARG of
7166 type TYPE, ARG + ZERO_ARG is the same as ARG. If NEGATE, return true
7167 if ARG - ZERO_ARG is the same as X.
7169 If ARG is NULL, check for any value of type TYPE.
7171 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7172 and finite. The problematic cases are when X is zero, and its mode
7173 has signed zeros. In the case of rounding towards -infinity,
7174 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7175 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7177 bool
7178 fold_real_zero_addition_p (const_tree type, const_tree arg,
7179 const_tree zero_arg, int negate)
7181 if (!real_zerop (zero_arg))
7182 return false;
7184 /* Don't allow the fold with -fsignaling-nans. */
7185 if (arg ? tree_expr_maybe_signaling_nan_p (arg) : HONOR_SNANS (type))
7186 return false;
7188 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7189 if (!HONOR_SIGNED_ZEROS (type))
7190 return true;
7192 /* There is no case that is safe for all rounding modes. */
7193 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7194 return false;
7196 /* In a vector or complex, we would need to check the sign of all zeros. */
7197 if (TREE_CODE (zero_arg) == VECTOR_CST)
7198 zero_arg = uniform_vector_p (zero_arg);
7199 if (!zero_arg || TREE_CODE (zero_arg) != REAL_CST)
7200 return false;
7202 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7203 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (zero_arg)))
7204 negate = !negate;
7206 /* The mode has signed zeros, and we have to honor their sign.
7207 In this situation, there are only two cases we can return true for.
7208 (i) X - 0 is the same as X with default rounding.
7209 (ii) X + 0 is X when X can't possibly be -0.0. */
7210 return negate || (arg && !tree_expr_maybe_real_minus_zero_p (arg));
7213 /* Subroutine of match.pd that optimizes comparisons of a division by
7214 a nonzero integer constant against an integer constant, i.e.
7215 X/C1 op C2.
7217 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7218 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7220 enum tree_code
7221 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7222 tree *hi, bool *neg_overflow)
7224 tree prod, tmp, type = TREE_TYPE (c1);
7225 signop sign = TYPE_SIGN (type);
7226 wi::overflow_type overflow;
7228 /* We have to do this the hard way to detect unsigned overflow.
7229 prod = int_const_binop (MULT_EXPR, c1, c2); */
7230 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7231 prod = force_fit_type (type, val, -1, overflow);
7232 *neg_overflow = false;
7234 if (sign == UNSIGNED)
7236 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7237 *lo = prod;
7239 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7240 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7241 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7243 else if (tree_int_cst_sgn (c1) >= 0)
7245 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7246 switch (tree_int_cst_sgn (c2))
7248 case -1:
7249 *neg_overflow = true;
7250 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7251 *hi = prod;
7252 break;
7254 case 0:
7255 *lo = fold_negate_const (tmp, type);
7256 *hi = tmp;
7257 break;
7259 case 1:
7260 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7261 *lo = prod;
7262 break;
7264 default:
7265 gcc_unreachable ();
7268 else
7270 /* A negative divisor reverses the relational operators. */
7271 code = swap_tree_comparison (code);
7273 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7274 switch (tree_int_cst_sgn (c2))
7276 case -1:
7277 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7278 *lo = prod;
7279 break;
7281 case 0:
7282 *hi = fold_negate_const (tmp, type);
7283 *lo = tmp;
7284 break;
7286 case 1:
7287 *neg_overflow = true;
7288 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7289 *hi = prod;
7290 break;
7292 default:
7293 gcc_unreachable ();
7297 if (code != EQ_EXPR && code != NE_EXPR)
7298 return code;
7300 if (TREE_OVERFLOW (*lo)
7301 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7302 *lo = NULL_TREE;
7303 if (TREE_OVERFLOW (*hi)
7304 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7305 *hi = NULL_TREE;
7307 return code;
7311 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7312 equality/inequality test, then return a simplified form of the test
7313 using a sign testing. Otherwise return NULL. TYPE is the desired
7314 result type. */
7316 static tree
7317 fold_single_bit_test_into_sign_test (location_t loc,
7318 enum tree_code code, tree arg0, tree arg1,
7319 tree result_type)
7321 /* If this is testing a single bit, we can optimize the test. */
7322 if ((code == NE_EXPR || code == EQ_EXPR)
7323 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7324 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7326 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7327 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7328 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7330 if (arg00 != NULL_TREE
7331 /* This is only a win if casting to a signed type is cheap,
7332 i.e. when arg00's type is not a partial mode. */
7333 && type_has_mode_precision_p (TREE_TYPE (arg00)))
7335 tree stype = signed_type_for (TREE_TYPE (arg00));
7336 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7337 result_type,
7338 fold_convert_loc (loc, stype, arg00),
7339 build_int_cst (stype, 0));
7343 return NULL_TREE;
7346 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7347 equality/inequality test, then return a simplified form of
7348 the test using shifts and logical operations. Otherwise return
7349 NULL. TYPE is the desired result type. */
7351 tree
7352 fold_single_bit_test (location_t loc, enum tree_code code,
7353 tree arg0, tree arg1, tree result_type)
7355 /* If this is testing a single bit, we can optimize the test. */
7356 if ((code == NE_EXPR || code == EQ_EXPR)
7357 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7358 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7360 tree inner = TREE_OPERAND (arg0, 0);
7361 tree type = TREE_TYPE (arg0);
7362 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7363 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
7364 int ops_unsigned;
7365 tree signed_type, unsigned_type, intermediate_type;
7366 tree tem, one;
7368 /* First, see if we can fold the single bit test into a sign-bit
7369 test. */
7370 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7371 result_type);
7372 if (tem)
7373 return tem;
7375 /* Otherwise we have (A & C) != 0 where C is a single bit,
7376 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7377 Similarly for (A & C) == 0. */
7379 /* If INNER is a right shift of a constant and it plus BITNUM does
7380 not overflow, adjust BITNUM and INNER. */
7381 if (TREE_CODE (inner) == RSHIFT_EXPR
7382 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7383 && bitnum < TYPE_PRECISION (type)
7384 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
7385 TYPE_PRECISION (type) - bitnum))
7387 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
7388 inner = TREE_OPERAND (inner, 0);
7391 /* If we are going to be able to omit the AND below, we must do our
7392 operations as unsigned. If we must use the AND, we have a choice.
7393 Normally unsigned is faster, but for some machines signed is. */
7394 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
7395 && !flag_syntax_only) ? 0 : 1;
7397 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7398 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7399 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7400 inner = fold_convert_loc (loc, intermediate_type, inner);
7402 if (bitnum != 0)
7403 inner = build2 (RSHIFT_EXPR, intermediate_type,
7404 inner, size_int (bitnum));
7406 one = build_int_cst (intermediate_type, 1);
7408 if (code == EQ_EXPR)
7409 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7411 /* Put the AND last so it can combine with more things. */
7412 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7414 /* Make sure to return the proper type. */
7415 inner = fold_convert_loc (loc, result_type, inner);
7417 return inner;
7419 return NULL_TREE;
7422 /* Test whether it is preferable to swap two operands, ARG0 and
7423 ARG1, for example because ARG0 is an integer constant and ARG1
7424 isn't. */
7426 bool
7427 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7429 if (CONSTANT_CLASS_P (arg1))
7430 return 0;
7431 if (CONSTANT_CLASS_P (arg0))
7432 return 1;
7434 STRIP_NOPS (arg0);
7435 STRIP_NOPS (arg1);
7437 if (TREE_CONSTANT (arg1))
7438 return 0;
7439 if (TREE_CONSTANT (arg0))
7440 return 1;
7442 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7443 for commutative and comparison operators. Ensuring a canonical
7444 form allows the optimizers to find additional redundancies without
7445 having to explicitly check for both orderings. */
7446 if (TREE_CODE (arg0) == SSA_NAME
7447 && TREE_CODE (arg1) == SSA_NAME
7448 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7449 return 1;
7451 /* Put SSA_NAMEs last. */
7452 if (TREE_CODE (arg1) == SSA_NAME)
7453 return 0;
7454 if (TREE_CODE (arg0) == SSA_NAME)
7455 return 1;
7457 /* Put variables last. */
7458 if (DECL_P (arg1))
7459 return 0;
7460 if (DECL_P (arg0))
7461 return 1;
7463 return 0;
7467 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7468 means A >= Y && A != MAX, but in this case we know that
7469 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7471 static tree
7472 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7474 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7476 if (TREE_CODE (bound) == LT_EXPR)
7477 a = TREE_OPERAND (bound, 0);
7478 else if (TREE_CODE (bound) == GT_EXPR)
7479 a = TREE_OPERAND (bound, 1);
7480 else
7481 return NULL_TREE;
7483 typea = TREE_TYPE (a);
7484 if (!INTEGRAL_TYPE_P (typea)
7485 && !POINTER_TYPE_P (typea))
7486 return NULL_TREE;
7488 if (TREE_CODE (ineq) == LT_EXPR)
7490 a1 = TREE_OPERAND (ineq, 1);
7491 y = TREE_OPERAND (ineq, 0);
7493 else if (TREE_CODE (ineq) == GT_EXPR)
7495 a1 = TREE_OPERAND (ineq, 0);
7496 y = TREE_OPERAND (ineq, 1);
7498 else
7499 return NULL_TREE;
7501 if (TREE_TYPE (a1) != typea)
7502 return NULL_TREE;
7504 if (POINTER_TYPE_P (typea))
7506 /* Convert the pointer types into integer before taking the difference. */
7507 tree ta = fold_convert_loc (loc, ssizetype, a);
7508 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7509 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7511 else
7512 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7514 if (!diff || !integer_onep (diff))
7515 return NULL_TREE;
7517 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7520 /* Fold a sum or difference of at least one multiplication.
7521 Returns the folded tree or NULL if no simplification could be made. */
7523 static tree
7524 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7525 tree arg0, tree arg1)
7527 tree arg00, arg01, arg10, arg11;
7528 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7530 /* (A * C) +- (B * C) -> (A+-B) * C.
7531 (A * C) +- A -> A * (C+-1).
7532 We are most concerned about the case where C is a constant,
7533 but other combinations show up during loop reduction. Since
7534 it is not difficult, try all four possibilities. */
7536 if (TREE_CODE (arg0) == MULT_EXPR)
7538 arg00 = TREE_OPERAND (arg0, 0);
7539 arg01 = TREE_OPERAND (arg0, 1);
7541 else if (TREE_CODE (arg0) == INTEGER_CST)
7543 arg00 = build_one_cst (type);
7544 arg01 = arg0;
7546 else
7548 /* We cannot generate constant 1 for fract. */
7549 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7550 return NULL_TREE;
7551 arg00 = arg0;
7552 arg01 = build_one_cst (type);
7554 if (TREE_CODE (arg1) == MULT_EXPR)
7556 arg10 = TREE_OPERAND (arg1, 0);
7557 arg11 = TREE_OPERAND (arg1, 1);
7559 else if (TREE_CODE (arg1) == INTEGER_CST)
7561 arg10 = build_one_cst (type);
7562 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7563 the purpose of this canonicalization. */
7564 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7565 && negate_expr_p (arg1)
7566 && code == PLUS_EXPR)
7568 arg11 = negate_expr (arg1);
7569 code = MINUS_EXPR;
7571 else
7572 arg11 = arg1;
7574 else
7576 /* We cannot generate constant 1 for fract. */
7577 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7578 return NULL_TREE;
7579 arg10 = arg1;
7580 arg11 = build_one_cst (type);
7582 same = NULL_TREE;
7584 /* Prefer factoring a common non-constant. */
7585 if (operand_equal_p (arg00, arg10, 0))
7586 same = arg00, alt0 = arg01, alt1 = arg11;
7587 else if (operand_equal_p (arg01, arg11, 0))
7588 same = arg01, alt0 = arg00, alt1 = arg10;
7589 else if (operand_equal_p (arg00, arg11, 0))
7590 same = arg00, alt0 = arg01, alt1 = arg10;
7591 else if (operand_equal_p (arg01, arg10, 0))
7592 same = arg01, alt0 = arg00, alt1 = arg11;
7594 /* No identical multiplicands; see if we can find a common
7595 power-of-two factor in non-power-of-two multiplies. This
7596 can help in multi-dimensional array access. */
7597 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7599 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7600 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7601 HOST_WIDE_INT tmp;
7602 bool swap = false;
7603 tree maybe_same;
7605 /* Move min of absolute values to int11. */
7606 if (absu_hwi (int01) < absu_hwi (int11))
7608 tmp = int01, int01 = int11, int11 = tmp;
7609 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7610 maybe_same = arg01;
7611 swap = true;
7613 else
7614 maybe_same = arg11;
7616 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7617 if (factor > 1
7618 && pow2p_hwi (factor)
7619 && (int01 & (factor - 1)) == 0
7620 /* The remainder should not be a constant, otherwise we
7621 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7622 increased the number of multiplications necessary. */
7623 && TREE_CODE (arg10) != INTEGER_CST)
7625 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7626 build_int_cst (TREE_TYPE (arg00),
7627 int01 / int11));
7628 alt1 = arg10;
7629 same = maybe_same;
7630 if (swap)
7631 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7635 if (!same)
7636 return NULL_TREE;
7638 if (! ANY_INTEGRAL_TYPE_P (type)
7639 || TYPE_OVERFLOW_WRAPS (type)
7640 /* We are neither factoring zero nor minus one. */
7641 || TREE_CODE (same) == INTEGER_CST)
7642 return fold_build2_loc (loc, MULT_EXPR, type,
7643 fold_build2_loc (loc, code, type,
7644 fold_convert_loc (loc, type, alt0),
7645 fold_convert_loc (loc, type, alt1)),
7646 fold_convert_loc (loc, type, same));
7648 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7649 same may be minus one and thus the multiplication may overflow. Perform
7650 the sum operation in an unsigned type. */
7651 tree utype = unsigned_type_for (type);
7652 tree tem = fold_build2_loc (loc, code, utype,
7653 fold_convert_loc (loc, utype, alt0),
7654 fold_convert_loc (loc, utype, alt1));
7655 /* If the sum evaluated to a constant that is not -INF the multiplication
7656 cannot overflow. */
7657 if (TREE_CODE (tem) == INTEGER_CST
7658 && (wi::to_wide (tem)
7659 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7660 return fold_build2_loc (loc, MULT_EXPR, type,
7661 fold_convert (type, tem), same);
7663 /* Do not resort to unsigned multiplication because
7664 we lose the no-overflow property of the expression. */
7665 return NULL_TREE;
7668 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7669 specified by EXPR into the buffer PTR of length LEN bytes.
7670 Return the number of bytes placed in the buffer, or zero
7671 upon failure. */
7673 static int
7674 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7676 tree type = TREE_TYPE (expr);
7677 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7678 int byte, offset, word, words;
7679 unsigned char value;
7681 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7682 return 0;
7683 if (off == -1)
7684 off = 0;
7686 if (ptr == NULL)
7687 /* Dry run. */
7688 return MIN (len, total_bytes - off);
7690 words = total_bytes / UNITS_PER_WORD;
7692 for (byte = 0; byte < total_bytes; byte++)
7694 int bitpos = byte * BITS_PER_UNIT;
7695 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7696 number of bytes. */
7697 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7699 if (total_bytes > UNITS_PER_WORD)
7701 word = byte / UNITS_PER_WORD;
7702 if (WORDS_BIG_ENDIAN)
7703 word = (words - 1) - word;
7704 offset = word * UNITS_PER_WORD;
7705 if (BYTES_BIG_ENDIAN)
7706 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7707 else
7708 offset += byte % UNITS_PER_WORD;
7710 else
7711 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7712 if (offset >= off && offset - off < len)
7713 ptr[offset - off] = value;
7715 return MIN (len, total_bytes - off);
7719 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7720 specified by EXPR into the buffer PTR of length LEN bytes.
7721 Return the number of bytes placed in the buffer, or zero
7722 upon failure. */
7724 static int
7725 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7727 tree type = TREE_TYPE (expr);
7728 scalar_mode mode = SCALAR_TYPE_MODE (type);
7729 int total_bytes = GET_MODE_SIZE (mode);
7730 FIXED_VALUE_TYPE value;
7731 tree i_value, i_type;
7733 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7734 return 0;
7736 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7738 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7739 return 0;
7741 value = TREE_FIXED_CST (expr);
7742 i_value = double_int_to_tree (i_type, value.data);
7744 return native_encode_int (i_value, ptr, len, off);
7748 /* Subroutine of native_encode_expr. Encode the REAL_CST
7749 specified by EXPR into the buffer PTR of length LEN bytes.
7750 Return the number of bytes placed in the buffer, or zero
7751 upon failure. */
7753 static int
7754 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7756 tree type = TREE_TYPE (expr);
7757 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7758 int byte, offset, word, words, bitpos;
7759 unsigned char value;
7761 /* There are always 32 bits in each long, no matter the size of
7762 the hosts long. We handle floating point representations with
7763 up to 192 bits. */
7764 long tmp[6];
7766 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7767 return 0;
7768 if (off == -1)
7769 off = 0;
7771 if (ptr == NULL)
7772 /* Dry run. */
7773 return MIN (len, total_bytes - off);
7775 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7777 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7779 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7780 bitpos += BITS_PER_UNIT)
7782 byte = (bitpos / BITS_PER_UNIT) & 3;
7783 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7785 if (UNITS_PER_WORD < 4)
7787 word = byte / UNITS_PER_WORD;
7788 if (WORDS_BIG_ENDIAN)
7789 word = (words - 1) - word;
7790 offset = word * UNITS_PER_WORD;
7791 if (BYTES_BIG_ENDIAN)
7792 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7793 else
7794 offset += byte % UNITS_PER_WORD;
7796 else
7798 offset = byte;
7799 if (BYTES_BIG_ENDIAN)
7801 /* Reverse bytes within each long, or within the entire float
7802 if it's smaller than a long (for HFmode). */
7803 offset = MIN (3, total_bytes - 1) - offset;
7804 gcc_assert (offset >= 0);
7807 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7808 if (offset >= off
7809 && offset - off < len)
7810 ptr[offset - off] = value;
7812 return MIN (len, total_bytes - off);
7815 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7816 specified by EXPR into the buffer PTR of length LEN bytes.
7817 Return the number of bytes placed in the buffer, or zero
7818 upon failure. */
7820 static int
7821 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7823 int rsize, isize;
7824 tree part;
7826 part = TREE_REALPART (expr);
7827 rsize = native_encode_expr (part, ptr, len, off);
7828 if (off == -1 && rsize == 0)
7829 return 0;
7830 part = TREE_IMAGPART (expr);
7831 if (off != -1)
7832 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7833 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7834 len - rsize, off);
7835 if (off == -1 && isize != rsize)
7836 return 0;
7837 return rsize + isize;
7840 /* Like native_encode_vector, but only encode the first COUNT elements.
7841 The other arguments are as for native_encode_vector. */
7843 static int
7844 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7845 int off, unsigned HOST_WIDE_INT count)
7847 tree itype = TREE_TYPE (TREE_TYPE (expr));
7848 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7849 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7851 /* This is the only case in which elements can be smaller than a byte.
7852 Element 0 is always in the lsb of the containing byte. */
7853 unsigned int elt_bits = TYPE_PRECISION (itype);
7854 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7855 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7856 return 0;
7858 if (off == -1)
7859 off = 0;
7861 /* Zero the buffer and then set bits later where necessary. */
7862 int extract_bytes = MIN (len, total_bytes - off);
7863 if (ptr)
7864 memset (ptr, 0, extract_bytes);
7866 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7867 unsigned int first_elt = off * elts_per_byte;
7868 unsigned int extract_elts = extract_bytes * elts_per_byte;
7869 for (unsigned int i = 0; i < extract_elts; ++i)
7871 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7872 if (TREE_CODE (elt) != INTEGER_CST)
7873 return 0;
7875 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7877 unsigned int bit = i * elt_bits;
7878 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7881 return extract_bytes;
7884 int offset = 0;
7885 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7886 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7888 if (off >= size)
7890 off -= size;
7891 continue;
7893 tree elem = VECTOR_CST_ELT (expr, i);
7894 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7895 len - offset, off);
7896 if ((off == -1 && res != size) || res == 0)
7897 return 0;
7898 offset += res;
7899 if (offset >= len)
7900 return (off == -1 && i < count - 1) ? 0 : offset;
7901 if (off != -1)
7902 off = 0;
7904 return offset;
7907 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7908 specified by EXPR into the buffer PTR of length LEN bytes.
7909 Return the number of bytes placed in the buffer, or zero
7910 upon failure. */
7912 static int
7913 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7915 unsigned HOST_WIDE_INT count;
7916 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7917 return 0;
7918 return native_encode_vector_part (expr, ptr, len, off, count);
7922 /* Subroutine of native_encode_expr. Encode the STRING_CST
7923 specified by EXPR into the buffer PTR of length LEN bytes.
7924 Return the number of bytes placed in the buffer, or zero
7925 upon failure. */
7927 static int
7928 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7930 tree type = TREE_TYPE (expr);
7932 /* Wide-char strings are encoded in target byte-order so native
7933 encoding them is trivial. */
7934 if (BITS_PER_UNIT != CHAR_BIT
7935 || TREE_CODE (type) != ARRAY_TYPE
7936 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7937 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7938 return 0;
7940 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7941 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7942 return 0;
7943 if (off == -1)
7944 off = 0;
7945 len = MIN (total_bytes - off, len);
7946 if (ptr == NULL)
7947 /* Dry run. */;
7948 else
7950 int written = 0;
7951 if (off < TREE_STRING_LENGTH (expr))
7953 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7954 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7956 memset (ptr + written, 0, len - written);
7958 return len;
7962 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
7963 FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
7964 the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
7965 anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
7966 sufficient to encode the entire EXPR, or if OFF is out of bounds.
7967 Otherwise, start at byte offset OFF and encode at most LEN bytes.
7968 Return the number of bytes placed in the buffer, or zero upon failure. */
7971 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7973 /* We don't support starting at negative offset and -1 is special. */
7974 if (off < -1)
7975 return 0;
7977 switch (TREE_CODE (expr))
7979 case INTEGER_CST:
7980 return native_encode_int (expr, ptr, len, off);
7982 case REAL_CST:
7983 return native_encode_real (expr, ptr, len, off);
7985 case FIXED_CST:
7986 return native_encode_fixed (expr, ptr, len, off);
7988 case COMPLEX_CST:
7989 return native_encode_complex (expr, ptr, len, off);
7991 case VECTOR_CST:
7992 return native_encode_vector (expr, ptr, len, off);
7994 case STRING_CST:
7995 return native_encode_string (expr, ptr, len, off);
7997 default:
7998 return 0;
8002 /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
8003 or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
8004 of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
8005 machine modes, we can't just use build_nonstandard_integer_type. */
8007 tree
8008 find_bitfield_repr_type (int fieldsize, int len)
8010 machine_mode mode;
8011 for (int pass = 0; pass < 2; pass++)
8013 enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
8014 FOR_EACH_MODE_IN_CLASS (mode, mclass)
8015 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8016 && known_eq (GET_MODE_PRECISION (mode),
8017 GET_MODE_BITSIZE (mode))
8018 && known_le (GET_MODE_SIZE (mode), len))
8020 tree ret = lang_hooks.types.type_for_mode (mode, 1);
8021 if (ret && TYPE_MODE (ret) == mode)
8022 return ret;
8026 for (int i = 0; i < NUM_INT_N_ENTS; i ++)
8027 if (int_n_enabled_p[i]
8028 && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
8029 && int_n_trees[i].unsigned_type)
8031 tree ret = int_n_trees[i].unsigned_type;
8032 mode = TYPE_MODE (ret);
8033 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8034 && known_eq (GET_MODE_PRECISION (mode),
8035 GET_MODE_BITSIZE (mode))
8036 && known_le (GET_MODE_SIZE (mode), len))
8037 return ret;
8040 return NULL_TREE;
8043 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8044 NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
8045 to be non-NULL and OFF zero), then in addition to filling the
8046 bytes pointed by PTR with the value also clear any bits pointed
8047 by MASK that are known to be initialized, keep them as is for
8048 e.g. uninitialized padding bits or uninitialized fields. */
8051 native_encode_initializer (tree init, unsigned char *ptr, int len,
8052 int off, unsigned char *mask)
8054 int r;
8056 /* We don't support starting at negative offset and -1 is special. */
8057 if (off < -1 || init == NULL_TREE)
8058 return 0;
8060 gcc_assert (mask == NULL || (off == 0 && ptr));
8062 STRIP_NOPS (init);
8063 switch (TREE_CODE (init))
8065 case VIEW_CONVERT_EXPR:
8066 case NON_LVALUE_EXPR:
8067 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
8068 mask);
8069 default:
8070 r = native_encode_expr (init, ptr, len, off);
8071 if (mask)
8072 memset (mask, 0, r);
8073 return r;
8074 case CONSTRUCTOR:
8075 tree type = TREE_TYPE (init);
8076 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
8077 if (total_bytes < 0)
8078 return 0;
8079 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8080 return 0;
8081 int o = off == -1 ? 0 : off;
8082 if (TREE_CODE (type) == ARRAY_TYPE)
8084 tree min_index;
8085 unsigned HOST_WIDE_INT cnt;
8086 HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
8087 constructor_elt *ce;
8089 if (!TYPE_DOMAIN (type)
8090 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
8091 return 0;
8093 fieldsize = int_size_in_bytes (TREE_TYPE (type));
8094 if (fieldsize <= 0)
8095 return 0;
8097 min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8098 if (ptr)
8099 memset (ptr, '\0', MIN (total_bytes - off, len));
8101 for (cnt = 0; ; cnt++)
8103 tree val = NULL_TREE, index = NULL_TREE;
8104 HOST_WIDE_INT pos = curpos, count = 0;
8105 bool full = false;
8106 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8108 val = ce->value;
8109 index = ce->index;
8111 else if (mask == NULL
8112 || CONSTRUCTOR_NO_CLEARING (init)
8113 || curpos >= total_bytes)
8114 break;
8115 else
8116 pos = total_bytes;
8118 if (index && TREE_CODE (index) == RANGE_EXPR)
8120 if (TREE_CODE (TREE_OPERAND (index, 0)) != INTEGER_CST
8121 || TREE_CODE (TREE_OPERAND (index, 1)) != INTEGER_CST)
8122 return 0;
8123 offset_int wpos
8124 = wi::sext (wi::to_offset (TREE_OPERAND (index, 0))
8125 - wi::to_offset (min_index),
8126 TYPE_PRECISION (sizetype));
8127 wpos *= fieldsize;
8128 if (!wi::fits_shwi_p (pos))
8129 return 0;
8130 pos = wpos.to_shwi ();
8131 offset_int wcount
8132 = wi::sext (wi::to_offset (TREE_OPERAND (index, 1))
8133 - wi::to_offset (TREE_OPERAND (index, 0)),
8134 TYPE_PRECISION (sizetype));
8135 if (!wi::fits_shwi_p (wcount))
8136 return 0;
8137 count = wcount.to_shwi ();
8139 else if (index)
8141 if (TREE_CODE (index) != INTEGER_CST)
8142 return 0;
8143 offset_int wpos
8144 = wi::sext (wi::to_offset (index)
8145 - wi::to_offset (min_index),
8146 TYPE_PRECISION (sizetype));
8147 wpos *= fieldsize;
8148 if (!wi::fits_shwi_p (wpos))
8149 return 0;
8150 pos = wpos.to_shwi ();
8153 if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
8155 if (valueinit == -1)
8157 tree zero = build_zero_cst (TREE_TYPE (type));
8158 r = native_encode_initializer (zero, ptr + curpos,
8159 fieldsize, 0,
8160 mask + curpos);
8161 if (TREE_CODE (zero) == CONSTRUCTOR)
8162 ggc_free (zero);
8163 if (!r)
8164 return 0;
8165 valueinit = curpos;
8166 curpos += fieldsize;
8168 while (curpos != pos)
8170 memcpy (ptr + curpos, ptr + valueinit, fieldsize);
8171 memcpy (mask + curpos, mask + valueinit, fieldsize);
8172 curpos += fieldsize;
8176 curpos = pos;
8177 if (val)
8180 if (off == -1
8181 || (curpos >= off
8182 && (curpos + fieldsize
8183 <= (HOST_WIDE_INT) off + len)))
8185 if (full)
8187 if (ptr)
8188 memcpy (ptr + (curpos - o), ptr + (pos - o),
8189 fieldsize);
8190 if (mask)
8191 memcpy (mask + curpos, mask + pos, fieldsize);
8193 else if (!native_encode_initializer (val,
8195 ? ptr + curpos - o
8196 : NULL,
8197 fieldsize,
8198 off == -1 ? -1
8199 : 0,
8200 mask
8201 ? mask + curpos
8202 : NULL))
8203 return 0;
8204 else
8206 full = true;
8207 pos = curpos;
8210 else if (curpos + fieldsize > off
8211 && curpos < (HOST_WIDE_INT) off + len)
8213 /* Partial overlap. */
8214 unsigned char *p = NULL;
8215 int no = 0;
8216 int l;
8217 gcc_assert (mask == NULL);
8218 if (curpos >= off)
8220 if (ptr)
8221 p = ptr + curpos - off;
8222 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8223 fieldsize);
8225 else
8227 p = ptr;
8228 no = off - curpos;
8229 l = len;
8231 if (!native_encode_initializer (val, p, l, no, NULL))
8232 return 0;
8234 curpos += fieldsize;
8236 while (count-- != 0);
8238 return MIN (total_bytes - off, len);
8240 else if (TREE_CODE (type) == RECORD_TYPE
8241 || TREE_CODE (type) == UNION_TYPE)
8243 unsigned HOST_WIDE_INT cnt;
8244 constructor_elt *ce;
8245 tree fld_base = TYPE_FIELDS (type);
8246 tree to_free = NULL_TREE;
8248 gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
8249 if (ptr != NULL)
8250 memset (ptr, '\0', MIN (total_bytes - o, len));
8251 for (cnt = 0; ; cnt++)
8253 tree val = NULL_TREE, field = NULL_TREE;
8254 HOST_WIDE_INT pos = 0, fieldsize;
8255 unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8257 if (to_free)
8259 ggc_free (to_free);
8260 to_free = NULL_TREE;
8263 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8265 val = ce->value;
8266 field = ce->index;
8267 if (field == NULL_TREE)
8268 return 0;
8270 pos = int_byte_position (field);
8271 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8272 continue;
8274 else if (mask == NULL
8275 || CONSTRUCTOR_NO_CLEARING (init))
8276 break;
8277 else
8278 pos = total_bytes;
8280 if (mask && !CONSTRUCTOR_NO_CLEARING (init))
8282 tree fld;
8283 for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
8285 if (TREE_CODE (fld) != FIELD_DECL)
8286 continue;
8287 if (fld == field)
8288 break;
8289 if (DECL_PADDING_P (fld))
8290 continue;
8291 if (DECL_SIZE_UNIT (fld) == NULL_TREE
8292 || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
8293 return 0;
8294 if (integer_zerop (DECL_SIZE_UNIT (fld)))
8295 continue;
8296 break;
8298 if (fld == NULL_TREE)
8300 if (ce == NULL)
8301 break;
8302 return 0;
8304 fld_base = DECL_CHAIN (fld);
8305 if (fld != field)
8307 cnt--;
8308 field = fld;
8309 pos = int_byte_position (field);
8310 val = build_zero_cst (TREE_TYPE (fld));
8311 if (TREE_CODE (val) == CONSTRUCTOR)
8312 to_free = val;
8316 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8317 && TYPE_DOMAIN (TREE_TYPE (field))
8318 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8320 if (mask || off != -1)
8321 return 0;
8322 if (val == NULL_TREE)
8323 continue;
8324 if (TREE_CODE (TREE_TYPE (val)) != ARRAY_TYPE)
8325 return 0;
8326 fieldsize = int_size_in_bytes (TREE_TYPE (val));
8327 if (fieldsize < 0
8328 || (int) fieldsize != fieldsize
8329 || (pos + fieldsize) > INT_MAX)
8330 return 0;
8331 if (pos + fieldsize > total_bytes)
8333 if (ptr != NULL && total_bytes < len)
8334 memset (ptr + total_bytes, '\0',
8335 MIN (pos + fieldsize, len) - total_bytes);
8336 total_bytes = pos + fieldsize;
8339 else
8341 if (DECL_SIZE_UNIT (field) == NULL_TREE
8342 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8343 return 0;
8344 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8346 if (fieldsize == 0)
8347 continue;
8349 if (DECL_BIT_FIELD (field))
8351 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8352 return 0;
8353 fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8354 bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8355 if (bpos % BITS_PER_UNIT)
8356 bpos %= BITS_PER_UNIT;
8357 else
8358 bpos = 0;
8359 fieldsize += bpos;
8360 epos = fieldsize % BITS_PER_UNIT;
8361 fieldsize += BITS_PER_UNIT - 1;
8362 fieldsize /= BITS_PER_UNIT;
8365 if (off != -1 && pos + fieldsize <= off)
8366 continue;
8368 if (val == NULL_TREE)
8369 continue;
8371 if (DECL_BIT_FIELD (field))
8373 /* FIXME: Handle PDP endian. */
8374 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8375 return 0;
8377 if (TREE_CODE (val) != INTEGER_CST)
8378 return 0;
8380 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8381 tree repr_type = NULL_TREE;
8382 HOST_WIDE_INT rpos = 0;
8383 if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8385 rpos = int_byte_position (repr);
8386 repr_type = TREE_TYPE (repr);
8388 else
8390 repr_type = find_bitfield_repr_type (fieldsize, len);
8391 if (repr_type == NULL_TREE)
8392 return 0;
8393 HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
8394 gcc_assert (repr_size > 0 && repr_size <= len);
8395 if (pos + repr_size <= o + len)
8396 rpos = pos;
8397 else
8399 rpos = o + len - repr_size;
8400 gcc_assert (rpos <= pos);
8404 if (rpos > pos)
8405 return 0;
8406 wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
8407 int diff = (TYPE_PRECISION (repr_type)
8408 - TYPE_PRECISION (TREE_TYPE (field)));
8409 HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8410 if (!BYTES_BIG_ENDIAN)
8411 w = wi::lshift (w, bitoff);
8412 else
8413 w = wi::lshift (w, diff - bitoff);
8414 val = wide_int_to_tree (repr_type, w);
8416 unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8417 / BITS_PER_UNIT + 1];
8418 int l = native_encode_int (val, buf, sizeof buf, 0);
8419 if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
8420 return 0;
8422 if (ptr == NULL)
8423 continue;
8425 /* If the bitfield does not start at byte boundary, handle
8426 the partial byte at the start. */
8427 if (bpos
8428 && (off == -1 || (pos >= off && len >= 1)))
8430 if (!BYTES_BIG_ENDIAN)
8432 int msk = (1 << bpos) - 1;
8433 buf[pos - rpos] &= ~msk;
8434 buf[pos - rpos] |= ptr[pos - o] & msk;
8435 if (mask)
8437 if (fieldsize > 1 || epos == 0)
8438 mask[pos] &= msk;
8439 else
8440 mask[pos] &= (msk | ~((1 << epos) - 1));
8443 else
8445 int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
8446 buf[pos - rpos] &= msk;
8447 buf[pos - rpos] |= ptr[pos - o] & ~msk;
8448 if (mask)
8450 if (fieldsize > 1 || epos == 0)
8451 mask[pos] &= ~msk;
8452 else
8453 mask[pos] &= (~msk
8454 | ((1 << (BITS_PER_UNIT - epos))
8455 - 1));
8459 /* If the bitfield does not end at byte boundary, handle
8460 the partial byte at the end. */
8461 if (epos
8462 && (off == -1
8463 || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8465 if (!BYTES_BIG_ENDIAN)
8467 int msk = (1 << epos) - 1;
8468 buf[pos - rpos + fieldsize - 1] &= msk;
8469 buf[pos - rpos + fieldsize - 1]
8470 |= ptr[pos + fieldsize - 1 - o] & ~msk;
8471 if (mask && (fieldsize > 1 || bpos == 0))
8472 mask[pos + fieldsize - 1] &= ~msk;
8474 else
8476 int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
8477 buf[pos - rpos + fieldsize - 1] &= ~msk;
8478 buf[pos - rpos + fieldsize - 1]
8479 |= ptr[pos + fieldsize - 1 - o] & msk;
8480 if (mask && (fieldsize > 1 || bpos == 0))
8481 mask[pos + fieldsize - 1] &= msk;
8484 if (off == -1
8485 || (pos >= off
8486 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8488 memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8489 if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
8490 memset (mask + pos + (bpos != 0), 0,
8491 fieldsize - (bpos != 0) - (epos != 0));
8493 else
8495 /* Partial overlap. */
8496 HOST_WIDE_INT fsz = fieldsize;
8497 gcc_assert (mask == NULL);
8498 if (pos < off)
8500 fsz -= (off - pos);
8501 pos = off;
8503 if (pos + fsz > (HOST_WIDE_INT) off + len)
8504 fsz = (HOST_WIDE_INT) off + len - pos;
8505 memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8507 continue;
8510 if (off == -1
8511 || (pos >= off
8512 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8514 int fldsize = fieldsize;
8515 if (off == -1)
8517 tree fld = DECL_CHAIN (field);
8518 while (fld)
8520 if (TREE_CODE (fld) == FIELD_DECL)
8521 break;
8522 fld = DECL_CHAIN (fld);
8524 if (fld == NULL_TREE)
8525 fldsize = len - pos;
8527 r = native_encode_initializer (val, ptr ? ptr + pos - o
8528 : NULL,
8529 fldsize,
8530 off == -1 ? -1 : 0,
8531 mask ? mask + pos : NULL);
8532 if (!r)
8533 return 0;
8534 if (off == -1
8535 && fldsize != fieldsize
8536 && r > fieldsize
8537 && pos + r > total_bytes)
8538 total_bytes = pos + r;
8540 else
8542 /* Partial overlap. */
8543 unsigned char *p = NULL;
8544 int no = 0;
8545 int l;
8546 gcc_assert (mask == NULL);
8547 if (pos >= off)
8549 if (ptr)
8550 p = ptr + pos - off;
8551 l = MIN ((HOST_WIDE_INT) off + len - pos,
8552 fieldsize);
8554 else
8556 p = ptr;
8557 no = off - pos;
8558 l = len;
8560 if (!native_encode_initializer (val, p, l, no, NULL))
8561 return 0;
8564 return MIN (total_bytes - off, len);
8566 return 0;
8571 /* Subroutine of native_interpret_expr. Interpret the contents of
8572 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8573 If the buffer cannot be interpreted, return NULL_TREE. */
8575 static tree
8576 native_interpret_int (tree type, const unsigned char *ptr, int len)
8578 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8580 if (total_bytes > len
8581 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8582 return NULL_TREE;
8584 wide_int result = wi::from_buffer (ptr, total_bytes);
8586 return wide_int_to_tree (type, result);
8590 /* Subroutine of native_interpret_expr. Interpret the contents of
8591 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8592 If the buffer cannot be interpreted, return NULL_TREE. */
8594 static tree
8595 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8597 scalar_mode mode = SCALAR_TYPE_MODE (type);
8598 int total_bytes = GET_MODE_SIZE (mode);
8599 double_int result;
8600 FIXED_VALUE_TYPE fixed_value;
8602 if (total_bytes > len
8603 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8604 return NULL_TREE;
8606 result = double_int::from_buffer (ptr, total_bytes);
8607 fixed_value = fixed_from_double_int (result, mode);
8609 return build_fixed (type, fixed_value);
8613 /* Subroutine of native_interpret_expr. Interpret the contents of
8614 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8615 If the buffer cannot be interpreted, return NULL_TREE. */
8617 static tree
8618 native_interpret_real (tree type, const unsigned char *ptr, int len)
8620 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8621 int total_bytes = GET_MODE_SIZE (mode);
8622 unsigned char value;
8623 /* There are always 32 bits in each long, no matter the size of
8624 the hosts long. We handle floating point representations with
8625 up to 192 bits. */
8626 REAL_VALUE_TYPE r;
8627 long tmp[6];
8629 if (total_bytes > len || total_bytes > 24)
8630 return NULL_TREE;
8631 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8633 memset (tmp, 0, sizeof (tmp));
8634 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8635 bitpos += BITS_PER_UNIT)
8637 /* Both OFFSET and BYTE index within a long;
8638 bitpos indexes the whole float. */
8639 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8640 if (UNITS_PER_WORD < 4)
8642 int word = byte / UNITS_PER_WORD;
8643 if (WORDS_BIG_ENDIAN)
8644 word = (words - 1) - word;
8645 offset = word * UNITS_PER_WORD;
8646 if (BYTES_BIG_ENDIAN)
8647 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8648 else
8649 offset += byte % UNITS_PER_WORD;
8651 else
8653 offset = byte;
8654 if (BYTES_BIG_ENDIAN)
8656 /* Reverse bytes within each long, or within the entire float
8657 if it's smaller than a long (for HFmode). */
8658 offset = MIN (3, total_bytes - 1) - offset;
8659 gcc_assert (offset >= 0);
8662 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8664 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8667 real_from_target (&r, tmp, mode);
8668 tree ret = build_real (type, r);
8669 if (MODE_COMPOSITE_P (mode))
8671 /* For floating point values in composite modes, punt if this folding
8672 doesn't preserve bit representation. As the mode doesn't have fixed
8673 precision while GCC pretends it does, there could be valid values that
8674 GCC can't really represent accurately. See PR95450. */
8675 unsigned char buf[24];
8676 if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
8677 || memcmp (ptr, buf, total_bytes) != 0)
8678 ret = NULL_TREE;
8680 return ret;
8684 /* Subroutine of native_interpret_expr. Interpret the contents of
8685 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8686 If the buffer cannot be interpreted, return NULL_TREE. */
8688 static tree
8689 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8691 tree etype, rpart, ipart;
8692 int size;
8694 etype = TREE_TYPE (type);
8695 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8696 if (size * 2 > len)
8697 return NULL_TREE;
8698 rpart = native_interpret_expr (etype, ptr, size);
8699 if (!rpart)
8700 return NULL_TREE;
8701 ipart = native_interpret_expr (etype, ptr+size, size);
8702 if (!ipart)
8703 return NULL_TREE;
8704 return build_complex (type, rpart, ipart);
8707 /* Read a vector of type TYPE from the target memory image given by BYTES,
8708 which contains LEN bytes. The vector is known to be encodable using
8709 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8711 Return the vector on success, otherwise return null. */
8713 static tree
8714 native_interpret_vector_part (tree type, const unsigned char *bytes,
8715 unsigned int len, unsigned int npatterns,
8716 unsigned int nelts_per_pattern)
8718 tree elt_type = TREE_TYPE (type);
8719 if (VECTOR_BOOLEAN_TYPE_P (type)
8720 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8722 /* This is the only case in which elements can be smaller than a byte.
8723 Element 0 is always in the lsb of the containing byte. */
8724 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8725 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8726 return NULL_TREE;
8728 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8729 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8731 unsigned int bit_index = i * elt_bits;
8732 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8733 unsigned int lsb = bit_index % BITS_PER_UNIT;
8734 builder.quick_push (bytes[byte_index] & (1 << lsb)
8735 ? build_all_ones_cst (elt_type)
8736 : build_zero_cst (elt_type));
8738 return builder.build ();
8741 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8742 if (elt_bytes * npatterns * nelts_per_pattern > len)
8743 return NULL_TREE;
8745 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8746 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8748 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8749 if (!elt)
8750 return NULL_TREE;
8751 builder.quick_push (elt);
8752 bytes += elt_bytes;
8754 return builder.build ();
8757 /* Subroutine of native_interpret_expr. Interpret the contents of
8758 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8759 If the buffer cannot be interpreted, return NULL_TREE. */
8761 static tree
8762 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8764 tree etype;
8765 unsigned int size;
8766 unsigned HOST_WIDE_INT count;
8768 etype = TREE_TYPE (type);
8769 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8770 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
8771 || size * count > len)
8772 return NULL_TREE;
8774 return native_interpret_vector_part (type, ptr, len, count, 1);
8778 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8779 the buffer PTR of length LEN as a constant of type TYPE. For
8780 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8781 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8782 return NULL_TREE. */
8784 tree
8785 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8787 switch (TREE_CODE (type))
8789 case INTEGER_TYPE:
8790 case ENUMERAL_TYPE:
8791 case BOOLEAN_TYPE:
8792 case POINTER_TYPE:
8793 case REFERENCE_TYPE:
8794 case OFFSET_TYPE:
8795 return native_interpret_int (type, ptr, len);
8797 case REAL_TYPE:
8798 return native_interpret_real (type, ptr, len);
8800 case FIXED_POINT_TYPE:
8801 return native_interpret_fixed (type, ptr, len);
8803 case COMPLEX_TYPE:
8804 return native_interpret_complex (type, ptr, len);
8806 case VECTOR_TYPE:
8807 return native_interpret_vector (type, ptr, len);
8809 default:
8810 return NULL_TREE;
8814 /* Returns true if we can interpret the contents of a native encoding
8815 as TYPE. */
8817 bool
8818 can_native_interpret_type_p (tree type)
8820 switch (TREE_CODE (type))
8822 case INTEGER_TYPE:
8823 case ENUMERAL_TYPE:
8824 case BOOLEAN_TYPE:
8825 case POINTER_TYPE:
8826 case REFERENCE_TYPE:
8827 case FIXED_POINT_TYPE:
8828 case REAL_TYPE:
8829 case COMPLEX_TYPE:
8830 case VECTOR_TYPE:
8831 case OFFSET_TYPE:
8832 return true;
8833 default:
8834 return false;
8838 /* Attempt to interpret aggregate of TYPE from bytes encoded in target
8839 byte order at PTR + OFF with LEN bytes. Does not handle unions. */
8841 tree
8842 native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
8843 int len)
8845 vec<constructor_elt, va_gc> *elts = NULL;
8846 if (TREE_CODE (type) == ARRAY_TYPE)
8848 HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
8849 if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
8850 return NULL_TREE;
8852 HOST_WIDE_INT cnt = 0;
8853 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
8855 if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
8856 return NULL_TREE;
8857 cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
8859 if (eltsz == 0)
8860 cnt = 0;
8861 HOST_WIDE_INT pos = 0;
8862 for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
8864 tree v = NULL_TREE;
8865 if (pos >= len || pos + eltsz > len)
8866 return NULL_TREE;
8867 if (can_native_interpret_type_p (TREE_TYPE (type)))
8869 v = native_interpret_expr (TREE_TYPE (type),
8870 ptr + off + pos, eltsz);
8871 if (v == NULL_TREE)
8872 return NULL_TREE;
8874 else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
8875 || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
8876 v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos,
8877 eltsz);
8878 if (v == NULL_TREE)
8879 return NULL_TREE;
8880 CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
8882 return build_constructor (type, elts);
8884 if (TREE_CODE (type) != RECORD_TYPE)
8885 return NULL_TREE;
8886 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8888 if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field))
8889 continue;
8890 tree fld = field;
8891 HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
8892 int diff = 0;
8893 tree v = NULL_TREE;
8894 if (DECL_BIT_FIELD (field))
8896 fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
8897 if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
8899 poly_int64 bitoffset;
8900 poly_uint64 field_offset, fld_offset;
8901 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8902 && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
8903 bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
8904 else
8905 bitoffset = 0;
8906 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8907 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
8908 diff = (TYPE_PRECISION (TREE_TYPE (fld))
8909 - TYPE_PRECISION (TREE_TYPE (field)));
8910 if (!bitoffset.is_constant (&bitoff)
8911 || bitoff < 0
8912 || bitoff > diff)
8913 return NULL_TREE;
8915 else
8917 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8918 return NULL_TREE;
8919 int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8920 int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8921 bpos %= BITS_PER_UNIT;
8922 fieldsize += bpos;
8923 fieldsize += BITS_PER_UNIT - 1;
8924 fieldsize /= BITS_PER_UNIT;
8925 tree repr_type = find_bitfield_repr_type (fieldsize, len);
8926 if (repr_type == NULL_TREE)
8927 return NULL_TREE;
8928 sz = int_size_in_bytes (repr_type);
8929 if (sz < 0 || sz > len)
8930 return NULL_TREE;
8931 pos = int_byte_position (field);
8932 if (pos < 0 || pos > len || pos + fieldsize > len)
8933 return NULL_TREE;
8934 HOST_WIDE_INT rpos;
8935 if (pos + sz <= len)
8936 rpos = pos;
8937 else
8939 rpos = len - sz;
8940 gcc_assert (rpos <= pos);
8942 bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
8943 pos = rpos;
8944 diff = (TYPE_PRECISION (repr_type)
8945 - TYPE_PRECISION (TREE_TYPE (field)));
8946 v = native_interpret_expr (repr_type, ptr + off + pos, sz);
8947 if (v == NULL_TREE)
8948 return NULL_TREE;
8949 fld = NULL_TREE;
8953 if (fld)
8955 sz = int_size_in_bytes (TREE_TYPE (fld));
8956 if (sz < 0 || sz > len)
8957 return NULL_TREE;
8958 tree byte_pos = byte_position (fld);
8959 if (!tree_fits_shwi_p (byte_pos))
8960 return NULL_TREE;
8961 pos = tree_to_shwi (byte_pos);
8962 if (pos < 0 || pos > len || pos + sz > len)
8963 return NULL_TREE;
8965 if (fld == NULL_TREE)
8966 /* Already handled above. */;
8967 else if (can_native_interpret_type_p (TREE_TYPE (fld)))
8969 v = native_interpret_expr (TREE_TYPE (fld),
8970 ptr + off + pos, sz);
8971 if (v == NULL_TREE)
8972 return NULL_TREE;
8974 else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
8975 || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
8976 v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz);
8977 if (v == NULL_TREE)
8978 return NULL_TREE;
8979 if (fld != field)
8981 if (TREE_CODE (v) != INTEGER_CST)
8982 return NULL_TREE;
8984 /* FIXME: Figure out how to handle PDP endian bitfields. */
8985 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8986 return NULL_TREE;
8987 if (!BYTES_BIG_ENDIAN)
8988 v = wide_int_to_tree (TREE_TYPE (field),
8989 wi::lrshift (wi::to_wide (v), bitoff));
8990 else
8991 v = wide_int_to_tree (TREE_TYPE (field),
8992 wi::lrshift (wi::to_wide (v),
8993 diff - bitoff));
8995 CONSTRUCTOR_APPEND_ELT (elts, field, v);
8997 return build_constructor (type, elts);
9000 /* Routines for manipulation of native_encode_expr encoded data if the encoded
9001 or extracted constant positions and/or sizes aren't byte aligned. */
9003 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
9004 bits between adjacent elements. AMNT should be within
9005 [0, BITS_PER_UNIT).
9006 Example, AMNT = 2:
9007 00011111|11100000 << 2 = 01111111|10000000
9008 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
9010 void
9011 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
9012 unsigned int amnt)
9014 if (amnt == 0)
9015 return;
9017 unsigned char carry_over = 0U;
9018 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
9019 unsigned char clear_mask = (~0U) << amnt;
9021 for (unsigned int i = 0; i < sz; i++)
9023 unsigned prev_carry_over = carry_over;
9024 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
9026 ptr[i] <<= amnt;
9027 if (i != 0)
9029 ptr[i] &= clear_mask;
9030 ptr[i] |= prev_carry_over;
9035 /* Like shift_bytes_in_array_left but for big-endian.
9036 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
9037 bits between adjacent elements. AMNT should be within
9038 [0, BITS_PER_UNIT).
9039 Example, AMNT = 2:
9040 00011111|11100000 >> 2 = 00000111|11111000
9041 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
9043 void
9044 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
9045 unsigned int amnt)
9047 if (amnt == 0)
9048 return;
9050 unsigned char carry_over = 0U;
9051 unsigned char carry_mask = ~(~0U << amnt);
9053 for (unsigned int i = 0; i < sz; i++)
9055 unsigned prev_carry_over = carry_over;
9056 carry_over = ptr[i] & carry_mask;
9058 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
9059 ptr[i] >>= amnt;
9060 ptr[i] |= prev_carry_over;
9064 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9065 directly on the VECTOR_CST encoding, in a way that works for variable-
9066 length vectors. Return the resulting VECTOR_CST on success or null
9067 on failure. */
9069 static tree
9070 fold_view_convert_vector_encoding (tree type, tree expr)
9072 tree expr_type = TREE_TYPE (expr);
9073 poly_uint64 type_bits, expr_bits;
9074 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
9075 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
9076 return NULL_TREE;
9078 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
9079 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
9080 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
9081 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
9083 /* We can only preserve the semantics of a stepped pattern if the new
9084 vector element is an integer of the same size. */
9085 if (VECTOR_CST_STEPPED_P (expr)
9086 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
9087 return NULL_TREE;
9089 /* The number of bits needed to encode one element from every pattern
9090 of the original vector. */
9091 unsigned int expr_sequence_bits
9092 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
9094 /* The number of bits needed to encode one element from every pattern
9095 of the result. */
9096 unsigned int type_sequence_bits
9097 = least_common_multiple (expr_sequence_bits, type_elt_bits);
9099 /* Don't try to read more bytes than are available, which can happen
9100 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9101 The general VIEW_CONVERT handling can cope with that case, so there's
9102 no point complicating things here. */
9103 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
9104 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
9105 BITS_PER_UNIT);
9106 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
9107 if (known_gt (buffer_bits, expr_bits))
9108 return NULL_TREE;
9110 /* Get enough bytes of EXPR to form the new encoding. */
9111 auto_vec<unsigned char, 128> buffer (buffer_bytes);
9112 buffer.quick_grow (buffer_bytes);
9113 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
9114 buffer_bits / expr_elt_bits)
9115 != (int) buffer_bytes)
9116 return NULL_TREE;
9118 /* Reencode the bytes as TYPE. */
9119 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
9120 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
9121 type_npatterns, nelts_per_pattern);
9124 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9125 TYPE at compile-time. If we're unable to perform the conversion
9126 return NULL_TREE. */
9128 static tree
9129 fold_view_convert_expr (tree type, tree expr)
9131 /* We support up to 512-bit values (for V8DFmode). */
9132 unsigned char buffer[64];
9133 int len;
9135 /* Check that the host and target are sane. */
9136 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
9137 return NULL_TREE;
9139 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
9140 if (tree res = fold_view_convert_vector_encoding (type, expr))
9141 return res;
9143 len = native_encode_expr (expr, buffer, sizeof (buffer));
9144 if (len == 0)
9145 return NULL_TREE;
9147 return native_interpret_expr (type, buffer, len);
9150 /* Build an expression for the address of T. Folds away INDIRECT_REF
9151 to avoid confusing the gimplify process. */
9153 tree
9154 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
9156 /* The size of the object is not relevant when talking about its address. */
9157 if (TREE_CODE (t) == WITH_SIZE_EXPR)
9158 t = TREE_OPERAND (t, 0);
9160 if (TREE_CODE (t) == INDIRECT_REF)
9162 t = TREE_OPERAND (t, 0);
9164 if (TREE_TYPE (t) != ptrtype)
9165 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
9167 else if (TREE_CODE (t) == MEM_REF
9168 && integer_zerop (TREE_OPERAND (t, 1)))
9170 t = TREE_OPERAND (t, 0);
9172 if (TREE_TYPE (t) != ptrtype)
9173 t = fold_convert_loc (loc, ptrtype, t);
9175 else if (TREE_CODE (t) == MEM_REF
9176 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
9177 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
9178 TREE_OPERAND (t, 0),
9179 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
9180 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
9182 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
9184 if (TREE_TYPE (t) != ptrtype)
9185 t = fold_convert_loc (loc, ptrtype, t);
9187 else
9188 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
9190 return t;
9193 /* Build an expression for the address of T. */
9195 tree
9196 build_fold_addr_expr_loc (location_t loc, tree t)
9198 tree ptrtype = build_pointer_type (TREE_TYPE (t));
9200 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
9203 /* Fold a unary expression of code CODE and type TYPE with operand
9204 OP0. Return the folded expression if folding is successful.
9205 Otherwise, return NULL_TREE. */
9207 tree
9208 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
9210 tree tem;
9211 tree arg0;
9212 enum tree_code_class kind = TREE_CODE_CLASS (code);
9214 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9215 && TREE_CODE_LENGTH (code) == 1);
9217 arg0 = op0;
9218 if (arg0)
9220 if (CONVERT_EXPR_CODE_P (code)
9221 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
9223 /* Don't use STRIP_NOPS, because signedness of argument type
9224 matters. */
9225 STRIP_SIGN_NOPS (arg0);
9227 else
9229 /* Strip any conversions that don't change the mode. This
9230 is safe for every expression, except for a comparison
9231 expression because its signedness is derived from its
9232 operands.
9234 Note that this is done as an internal manipulation within
9235 the constant folder, in order to find the simplest
9236 representation of the arguments so that their form can be
9237 studied. In any cases, the appropriate type conversions
9238 should be put back in the tree that will get out of the
9239 constant folder. */
9240 STRIP_NOPS (arg0);
9243 if (CONSTANT_CLASS_P (arg0))
9245 tree tem = const_unop (code, type, arg0);
9246 if (tem)
9248 if (TREE_TYPE (tem) != type)
9249 tem = fold_convert_loc (loc, type, tem);
9250 return tem;
9255 tem = generic_simplify (loc, code, type, op0);
9256 if (tem)
9257 return tem;
9259 if (TREE_CODE_CLASS (code) == tcc_unary)
9261 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9262 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9263 fold_build1_loc (loc, code, type,
9264 fold_convert_loc (loc, TREE_TYPE (op0),
9265 TREE_OPERAND (arg0, 1))));
9266 else if (TREE_CODE (arg0) == COND_EXPR)
9268 tree arg01 = TREE_OPERAND (arg0, 1);
9269 tree arg02 = TREE_OPERAND (arg0, 2);
9270 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
9271 arg01 = fold_build1_loc (loc, code, type,
9272 fold_convert_loc (loc,
9273 TREE_TYPE (op0), arg01));
9274 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
9275 arg02 = fold_build1_loc (loc, code, type,
9276 fold_convert_loc (loc,
9277 TREE_TYPE (op0), arg02));
9278 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
9279 arg01, arg02);
9281 /* If this was a conversion, and all we did was to move into
9282 inside the COND_EXPR, bring it back out. But leave it if
9283 it is a conversion from integer to integer and the
9284 result precision is no wider than a word since such a
9285 conversion is cheap and may be optimized away by combine,
9286 while it couldn't if it were outside the COND_EXPR. Then return
9287 so we don't get into an infinite recursion loop taking the
9288 conversion out and then back in. */
9290 if ((CONVERT_EXPR_CODE_P (code)
9291 || code == NON_LVALUE_EXPR)
9292 && TREE_CODE (tem) == COND_EXPR
9293 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
9294 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
9295 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
9296 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
9297 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
9298 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
9299 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9300 && (INTEGRAL_TYPE_P
9301 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
9302 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
9303 || flag_syntax_only))
9304 tem = build1_loc (loc, code, type,
9305 build3 (COND_EXPR,
9306 TREE_TYPE (TREE_OPERAND
9307 (TREE_OPERAND (tem, 1), 0)),
9308 TREE_OPERAND (tem, 0),
9309 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
9310 TREE_OPERAND (TREE_OPERAND (tem, 2),
9311 0)));
9312 return tem;
9316 switch (code)
9318 case NON_LVALUE_EXPR:
9319 if (!maybe_lvalue_p (op0))
9320 return fold_convert_loc (loc, type, op0);
9321 return NULL_TREE;
9323 CASE_CONVERT:
9324 case FLOAT_EXPR:
9325 case FIX_TRUNC_EXPR:
9326 if (COMPARISON_CLASS_P (op0))
9328 /* If we have (type) (a CMP b) and type is an integral type, return
9329 new expression involving the new type. Canonicalize
9330 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9331 non-integral type.
9332 Do not fold the result as that would not simplify further, also
9333 folding again results in recursions. */
9334 if (TREE_CODE (type) == BOOLEAN_TYPE)
9335 return build2_loc (loc, TREE_CODE (op0), type,
9336 TREE_OPERAND (op0, 0),
9337 TREE_OPERAND (op0, 1));
9338 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
9339 && TREE_CODE (type) != VECTOR_TYPE)
9340 return build3_loc (loc, COND_EXPR, type, op0,
9341 constant_boolean_node (true, type),
9342 constant_boolean_node (false, type));
9345 /* Handle (T *)&A.B.C for A being of type T and B and C
9346 living at offset zero. This occurs frequently in
9347 C++ upcasting and then accessing the base. */
9348 if (TREE_CODE (op0) == ADDR_EXPR
9349 && POINTER_TYPE_P (type)
9350 && handled_component_p (TREE_OPERAND (op0, 0)))
9352 poly_int64 bitsize, bitpos;
9353 tree offset;
9354 machine_mode mode;
9355 int unsignedp, reversep, volatilep;
9356 tree base
9357 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
9358 &offset, &mode, &unsignedp, &reversep,
9359 &volatilep);
9360 /* If the reference was to a (constant) zero offset, we can use
9361 the address of the base if it has the same base type
9362 as the result type and the pointer type is unqualified. */
9363 if (!offset
9364 && known_eq (bitpos, 0)
9365 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
9366 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
9367 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
9368 return fold_convert_loc (loc, type,
9369 build_fold_addr_expr_loc (loc, base));
9372 if (TREE_CODE (op0) == MODIFY_EXPR
9373 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
9374 /* Detect assigning a bitfield. */
9375 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
9376 && DECL_BIT_FIELD
9377 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
9379 /* Don't leave an assignment inside a conversion
9380 unless assigning a bitfield. */
9381 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
9382 /* First do the assignment, then return converted constant. */
9383 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
9384 suppress_warning (tem /* What warning? */);
9385 TREE_USED (tem) = 1;
9386 return tem;
9389 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9390 constants (if x has signed type, the sign bit cannot be set
9391 in c). This folds extension into the BIT_AND_EXPR.
9392 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9393 very likely don't have maximal range for their precision and this
9394 transformation effectively doesn't preserve non-maximal ranges. */
9395 if (TREE_CODE (type) == INTEGER_TYPE
9396 && TREE_CODE (op0) == BIT_AND_EXPR
9397 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9399 tree and_expr = op0;
9400 tree and0 = TREE_OPERAND (and_expr, 0);
9401 tree and1 = TREE_OPERAND (and_expr, 1);
9402 int change = 0;
9404 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
9405 || (TYPE_PRECISION (type)
9406 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
9407 change = 1;
9408 else if (TYPE_PRECISION (TREE_TYPE (and1))
9409 <= HOST_BITS_PER_WIDE_INT
9410 && tree_fits_uhwi_p (and1))
9412 unsigned HOST_WIDE_INT cst;
9414 cst = tree_to_uhwi (and1);
9415 cst &= HOST_WIDE_INT_M1U
9416 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
9417 change = (cst == 0);
9418 if (change
9419 && !flag_syntax_only
9420 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
9421 == ZERO_EXTEND))
9423 tree uns = unsigned_type_for (TREE_TYPE (and0));
9424 and0 = fold_convert_loc (loc, uns, and0);
9425 and1 = fold_convert_loc (loc, uns, and1);
9428 if (change)
9430 tem = force_fit_type (type, wi::to_widest (and1), 0,
9431 TREE_OVERFLOW (and1));
9432 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9433 fold_convert_loc (loc, type, and0), tem);
9437 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9438 cast (T1)X will fold away. We assume that this happens when X itself
9439 is a cast. */
9440 if (POINTER_TYPE_P (type)
9441 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
9442 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
9444 tree arg00 = TREE_OPERAND (arg0, 0);
9445 tree arg01 = TREE_OPERAND (arg0, 1);
9447 /* If -fsanitize=alignment, avoid this optimization in GENERIC
9448 when the pointed type needs higher alignment than
9449 the p+ first operand's pointed type. */
9450 if (!in_gimple_form
9451 && sanitize_flags_p (SANITIZE_ALIGNMENT)
9452 && (min_align_of_type (TREE_TYPE (type))
9453 > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00)))))
9454 return NULL_TREE;
9456 arg00 = fold_convert_loc (loc, type, arg00);
9457 return fold_build_pointer_plus_loc (loc, arg00, arg01);
9460 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9461 of the same precision, and X is an integer type not narrower than
9462 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9463 if (INTEGRAL_TYPE_P (type)
9464 && TREE_CODE (op0) == BIT_NOT_EXPR
9465 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9466 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
9467 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
9469 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
9470 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9471 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9472 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9473 fold_convert_loc (loc, type, tem));
9476 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9477 type of X and Y (integer types only). */
9478 if (INTEGRAL_TYPE_P (type)
9479 && TREE_CODE (op0) == MULT_EXPR
9480 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9481 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
9483 /* Be careful not to introduce new overflows. */
9484 tree mult_type;
9485 if (TYPE_OVERFLOW_WRAPS (type))
9486 mult_type = type;
9487 else
9488 mult_type = unsigned_type_for (type);
9490 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9492 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9493 fold_convert_loc (loc, mult_type,
9494 TREE_OPERAND (op0, 0)),
9495 fold_convert_loc (loc, mult_type,
9496 TREE_OPERAND (op0, 1)));
9497 return fold_convert_loc (loc, type, tem);
9501 return NULL_TREE;
9503 case VIEW_CONVERT_EXPR:
9504 if (TREE_CODE (op0) == MEM_REF)
9506 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9507 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9508 tem = fold_build2_loc (loc, MEM_REF, type,
9509 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9510 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9511 return tem;
9514 return NULL_TREE;
9516 case NEGATE_EXPR:
9517 tem = fold_negate_expr (loc, arg0);
9518 if (tem)
9519 return fold_convert_loc (loc, type, tem);
9520 return NULL_TREE;
9522 case ABS_EXPR:
9523 /* Convert fabs((double)float) into (double)fabsf(float). */
9524 if (TREE_CODE (arg0) == NOP_EXPR
9525 && TREE_CODE (type) == REAL_TYPE)
9527 tree targ0 = strip_float_extensions (arg0);
9528 if (targ0 != arg0)
9529 return fold_convert_loc (loc, type,
9530 fold_build1_loc (loc, ABS_EXPR,
9531 TREE_TYPE (targ0),
9532 targ0));
9534 return NULL_TREE;
9536 case BIT_NOT_EXPR:
9537 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9538 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9539 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9540 fold_convert_loc (loc, type,
9541 TREE_OPERAND (arg0, 0)))))
9542 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9543 fold_convert_loc (loc, type,
9544 TREE_OPERAND (arg0, 1)));
9545 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9546 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9547 fold_convert_loc (loc, type,
9548 TREE_OPERAND (arg0, 1)))))
9549 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9550 fold_convert_loc (loc, type,
9551 TREE_OPERAND (arg0, 0)), tem);
9553 return NULL_TREE;
9555 case TRUTH_NOT_EXPR:
9556 /* Note that the operand of this must be an int
9557 and its values must be 0 or 1.
9558 ("true" is a fixed value perhaps depending on the language,
9559 but we don't handle values other than 1 correctly yet.) */
9560 tem = fold_truth_not_expr (loc, arg0);
9561 if (!tem)
9562 return NULL_TREE;
9563 return fold_convert_loc (loc, type, tem);
9565 case INDIRECT_REF:
9566 /* Fold *&X to X if X is an lvalue. */
9567 if (TREE_CODE (op0) == ADDR_EXPR)
9569 tree op00 = TREE_OPERAND (op0, 0);
9570 if ((VAR_P (op00)
9571 || TREE_CODE (op00) == PARM_DECL
9572 || TREE_CODE (op00) == RESULT_DECL)
9573 && !TREE_READONLY (op00))
9574 return op00;
9576 return NULL_TREE;
9578 default:
9579 return NULL_TREE;
9580 } /* switch (code) */
9584 /* If the operation was a conversion do _not_ mark a resulting constant
9585 with TREE_OVERFLOW if the original constant was not. These conversions
9586 have implementation defined behavior and retaining the TREE_OVERFLOW
9587 flag here would confuse later passes such as VRP. */
9588 tree
9589 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9590 tree type, tree op0)
9592 tree res = fold_unary_loc (loc, code, type, op0);
9593 if (res
9594 && TREE_CODE (res) == INTEGER_CST
9595 && TREE_CODE (op0) == INTEGER_CST
9596 && CONVERT_EXPR_CODE_P (code))
9597 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9599 return res;
9602 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9603 operands OP0 and OP1. LOC is the location of the resulting expression.
9604 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9605 Return the folded expression if folding is successful. Otherwise,
9606 return NULL_TREE. */
9607 static tree
9608 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9609 tree arg0, tree arg1, tree op0, tree op1)
9611 tree tem;
9613 /* We only do these simplifications if we are optimizing. */
9614 if (!optimize)
9615 return NULL_TREE;
9617 /* Check for things like (A || B) && (A || C). We can convert this
9618 to A || (B && C). Note that either operator can be any of the four
9619 truth and/or operations and the transformation will still be
9620 valid. Also note that we only care about order for the
9621 ANDIF and ORIF operators. If B contains side effects, this
9622 might change the truth-value of A. */
9623 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9624 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9625 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9626 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9627 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9628 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9630 tree a00 = TREE_OPERAND (arg0, 0);
9631 tree a01 = TREE_OPERAND (arg0, 1);
9632 tree a10 = TREE_OPERAND (arg1, 0);
9633 tree a11 = TREE_OPERAND (arg1, 1);
9634 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9635 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9636 && (code == TRUTH_AND_EXPR
9637 || code == TRUTH_OR_EXPR));
9639 if (operand_equal_p (a00, a10, 0))
9640 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9641 fold_build2_loc (loc, code, type, a01, a11));
9642 else if (commutative && operand_equal_p (a00, a11, 0))
9643 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9644 fold_build2_loc (loc, code, type, a01, a10));
9645 else if (commutative && operand_equal_p (a01, a10, 0))
9646 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9647 fold_build2_loc (loc, code, type, a00, a11));
9649 /* This case if tricky because we must either have commutative
9650 operators or else A10 must not have side-effects. */
9652 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9653 && operand_equal_p (a01, a11, 0))
9654 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9655 fold_build2_loc (loc, code, type, a00, a10),
9656 a01);
9659 /* See if we can build a range comparison. */
9660 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9661 return tem;
9663 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9664 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9666 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9667 if (tem)
9668 return fold_build2_loc (loc, code, type, tem, arg1);
9671 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9672 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9674 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9675 if (tem)
9676 return fold_build2_loc (loc, code, type, arg0, tem);
9679 /* Check for the possibility of merging component references. If our
9680 lhs is another similar operation, try to merge its rhs with our
9681 rhs. Then try to merge our lhs and rhs. */
9682 if (TREE_CODE (arg0) == code
9683 && (tem = fold_truth_andor_1 (loc, code, type,
9684 TREE_OPERAND (arg0, 1), arg1)) != 0)
9685 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9687 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9688 return tem;
9690 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9691 if (param_logical_op_non_short_circuit != -1)
9692 logical_op_non_short_circuit
9693 = param_logical_op_non_short_circuit;
9694 if (logical_op_non_short_circuit
9695 && !sanitize_coverage_p ()
9696 && (code == TRUTH_AND_EXPR
9697 || code == TRUTH_ANDIF_EXPR
9698 || code == TRUTH_OR_EXPR
9699 || code == TRUTH_ORIF_EXPR))
9701 enum tree_code ncode, icode;
9703 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9704 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9705 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9707 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9708 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9709 We don't want to pack more than two leafs to a non-IF AND/OR
9710 expression.
9711 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9712 equal to IF-CODE, then we don't want to add right-hand operand.
9713 If the inner right-hand side of left-hand operand has
9714 side-effects, or isn't simple, then we can't add to it,
9715 as otherwise we might destroy if-sequence. */
9716 if (TREE_CODE (arg0) == icode
9717 && simple_operand_p_2 (arg1)
9718 /* Needed for sequence points to handle trappings, and
9719 side-effects. */
9720 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
9722 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9723 arg1);
9724 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9725 tem);
9727 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9728 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9729 else if (TREE_CODE (arg1) == icode
9730 && simple_operand_p_2 (arg0)
9731 /* Needed for sequence points to handle trappings, and
9732 side-effects. */
9733 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
9735 tem = fold_build2_loc (loc, ncode, type,
9736 arg0, TREE_OPERAND (arg1, 0));
9737 return fold_build2_loc (loc, icode, type, tem,
9738 TREE_OPERAND (arg1, 1));
9740 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9741 into (A OR B).
9742 For sequence point consistancy, we need to check for trapping,
9743 and side-effects. */
9744 else if (code == icode && simple_operand_p_2 (arg0)
9745 && simple_operand_p_2 (arg1))
9746 return fold_build2_loc (loc, ncode, type, arg0, arg1);
9749 return NULL_TREE;
9752 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9753 by changing CODE to reduce the magnitude of constants involved in
9754 ARG0 of the comparison.
9755 Returns a canonicalized comparison tree if a simplification was
9756 possible, otherwise returns NULL_TREE.
9757 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9758 valid if signed overflow is undefined. */
9760 static tree
9761 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9762 tree arg0, tree arg1,
9763 bool *strict_overflow_p)
9765 enum tree_code code0 = TREE_CODE (arg0);
9766 tree t, cst0 = NULL_TREE;
9767 int sgn0;
9769 /* Match A +- CST code arg1. We can change this only if overflow
9770 is undefined. */
9771 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9772 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9773 /* In principle pointers also have undefined overflow behavior,
9774 but that causes problems elsewhere. */
9775 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9776 && (code0 == MINUS_EXPR
9777 || code0 == PLUS_EXPR)
9778 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9779 return NULL_TREE;
9781 /* Identify the constant in arg0 and its sign. */
9782 cst0 = TREE_OPERAND (arg0, 1);
9783 sgn0 = tree_int_cst_sgn (cst0);
9785 /* Overflowed constants and zero will cause problems. */
9786 if (integer_zerop (cst0)
9787 || TREE_OVERFLOW (cst0))
9788 return NULL_TREE;
9790 /* See if we can reduce the magnitude of the constant in
9791 arg0 by changing the comparison code. */
9792 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9793 if (code == LT_EXPR
9794 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9795 code = LE_EXPR;
9796 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9797 else if (code == GT_EXPR
9798 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9799 code = GE_EXPR;
9800 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9801 else if (code == LE_EXPR
9802 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9803 code = LT_EXPR;
9804 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9805 else if (code == GE_EXPR
9806 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9807 code = GT_EXPR;
9808 else
9809 return NULL_TREE;
9810 *strict_overflow_p = true;
9812 /* Now build the constant reduced in magnitude. But not if that
9813 would produce one outside of its types range. */
9814 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9815 && ((sgn0 == 1
9816 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9817 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9818 || (sgn0 == -1
9819 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9820 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9821 return NULL_TREE;
9823 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9824 cst0, build_int_cst (TREE_TYPE (cst0), 1));
9825 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9826 t = fold_convert (TREE_TYPE (arg1), t);
9828 return fold_build2_loc (loc, code, type, t, arg1);
9831 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9832 overflow further. Try to decrease the magnitude of constants involved
9833 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9834 and put sole constants at the second argument position.
9835 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9837 static tree
9838 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9839 tree arg0, tree arg1)
9841 tree t;
9842 bool strict_overflow_p;
9843 const char * const warnmsg = G_("assuming signed overflow does not occur "
9844 "when reducing constant in comparison");
9846 /* Try canonicalization by simplifying arg0. */
9847 strict_overflow_p = false;
9848 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9849 &strict_overflow_p);
9850 if (t)
9852 if (strict_overflow_p)
9853 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9854 return t;
9857 /* Try canonicalization by simplifying arg1 using the swapped
9858 comparison. */
9859 code = swap_tree_comparison (code);
9860 strict_overflow_p = false;
9861 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9862 &strict_overflow_p);
9863 if (t && strict_overflow_p)
9864 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9865 return t;
9868 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9869 space. This is used to avoid issuing overflow warnings for
9870 expressions like &p->x which cannot wrap. */
9872 static bool
9873 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9875 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9876 return true;
9878 if (maybe_lt (bitpos, 0))
9879 return true;
9881 poly_wide_int wi_offset;
9882 int precision = TYPE_PRECISION (TREE_TYPE (base));
9883 if (offset == NULL_TREE)
9884 wi_offset = wi::zero (precision);
9885 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9886 return true;
9887 else
9888 wi_offset = wi::to_poly_wide (offset);
9890 wi::overflow_type overflow;
9891 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9892 precision);
9893 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9894 if (overflow)
9895 return true;
9897 poly_uint64 total_hwi, size;
9898 if (!total.to_uhwi (&total_hwi)
9899 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
9900 &size)
9901 || known_eq (size, 0U))
9902 return true;
9904 if (known_le (total_hwi, size))
9905 return false;
9907 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9908 array. */
9909 if (TREE_CODE (base) == ADDR_EXPR
9910 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
9911 &size)
9912 && maybe_ne (size, 0U)
9913 && known_le (total_hwi, size))
9914 return false;
9916 return true;
9919 /* Return a positive integer when the symbol DECL is known to have
9920 a nonzero address, zero when it's known not to (e.g., it's a weak
9921 symbol), and a negative integer when the symbol is not yet in the
9922 symbol table and so whether or not its address is zero is unknown.
9923 For function local objects always return positive integer. */
9924 static int
9925 maybe_nonzero_address (tree decl)
9927 /* Normally, don't do anything for variables and functions before symtab is
9928 built; it is quite possible that DECL will be declared weak later.
9929 But if folding_initializer, we need a constant answer now, so create
9930 the symtab entry and prevent later weak declaration. */
9931 if (DECL_P (decl) && decl_in_symtab_p (decl))
9932 if (struct symtab_node *symbol
9933 = (folding_initializer
9934 ? symtab_node::get_create (decl)
9935 : symtab_node::get (decl)))
9936 return symbol->nonzero_address ();
9938 /* Function local objects are never NULL. */
9939 if (DECL_P (decl)
9940 && (DECL_CONTEXT (decl)
9941 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
9942 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
9943 return 1;
9945 return -1;
9948 /* Subroutine of fold_binary. This routine performs all of the
9949 transformations that are common to the equality/inequality
9950 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9951 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9952 fold_binary should call fold_binary. Fold a comparison with
9953 tree code CODE and type TYPE with operands OP0 and OP1. Return
9954 the folded comparison or NULL_TREE. */
9956 static tree
9957 fold_comparison (location_t loc, enum tree_code code, tree type,
9958 tree op0, tree op1)
9960 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
9961 tree arg0, arg1, tem;
9963 arg0 = op0;
9964 arg1 = op1;
9966 STRIP_SIGN_NOPS (arg0);
9967 STRIP_SIGN_NOPS (arg1);
9969 /* For comparisons of pointers we can decompose it to a compile time
9970 comparison of the base objects and the offsets into the object.
9971 This requires at least one operand being an ADDR_EXPR or a
9972 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9973 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9974 && (TREE_CODE (arg0) == ADDR_EXPR
9975 || TREE_CODE (arg1) == ADDR_EXPR
9976 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9977 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9979 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9980 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
9981 machine_mode mode;
9982 int volatilep, reversep, unsignedp;
9983 bool indirect_base0 = false, indirect_base1 = false;
9985 /* Get base and offset for the access. Strip ADDR_EXPR for
9986 get_inner_reference, but put it back by stripping INDIRECT_REF
9987 off the base object if possible. indirect_baseN will be true
9988 if baseN is not an address but refers to the object itself. */
9989 base0 = arg0;
9990 if (TREE_CODE (arg0) == ADDR_EXPR)
9992 base0
9993 = get_inner_reference (TREE_OPERAND (arg0, 0),
9994 &bitsize, &bitpos0, &offset0, &mode,
9995 &unsignedp, &reversep, &volatilep);
9996 if (TREE_CODE (base0) == INDIRECT_REF)
9997 base0 = TREE_OPERAND (base0, 0);
9998 else
9999 indirect_base0 = true;
10001 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10003 base0 = TREE_OPERAND (arg0, 0);
10004 STRIP_SIGN_NOPS (base0);
10005 if (TREE_CODE (base0) == ADDR_EXPR)
10007 base0
10008 = get_inner_reference (TREE_OPERAND (base0, 0),
10009 &bitsize, &bitpos0, &offset0, &mode,
10010 &unsignedp, &reversep, &volatilep);
10011 if (TREE_CODE (base0) == INDIRECT_REF)
10012 base0 = TREE_OPERAND (base0, 0);
10013 else
10014 indirect_base0 = true;
10016 if (offset0 == NULL_TREE || integer_zerop (offset0))
10017 offset0 = TREE_OPERAND (arg0, 1);
10018 else
10019 offset0 = size_binop (PLUS_EXPR, offset0,
10020 TREE_OPERAND (arg0, 1));
10021 if (poly_int_tree_p (offset0))
10023 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
10024 TYPE_PRECISION (sizetype));
10025 tem <<= LOG2_BITS_PER_UNIT;
10026 tem += bitpos0;
10027 if (tem.to_shwi (&bitpos0))
10028 offset0 = NULL_TREE;
10032 base1 = arg1;
10033 if (TREE_CODE (arg1) == ADDR_EXPR)
10035 base1
10036 = get_inner_reference (TREE_OPERAND (arg1, 0),
10037 &bitsize, &bitpos1, &offset1, &mode,
10038 &unsignedp, &reversep, &volatilep);
10039 if (TREE_CODE (base1) == INDIRECT_REF)
10040 base1 = TREE_OPERAND (base1, 0);
10041 else
10042 indirect_base1 = true;
10044 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10046 base1 = TREE_OPERAND (arg1, 0);
10047 STRIP_SIGN_NOPS (base1);
10048 if (TREE_CODE (base1) == ADDR_EXPR)
10050 base1
10051 = get_inner_reference (TREE_OPERAND (base1, 0),
10052 &bitsize, &bitpos1, &offset1, &mode,
10053 &unsignedp, &reversep, &volatilep);
10054 if (TREE_CODE (base1) == INDIRECT_REF)
10055 base1 = TREE_OPERAND (base1, 0);
10056 else
10057 indirect_base1 = true;
10059 if (offset1 == NULL_TREE || integer_zerop (offset1))
10060 offset1 = TREE_OPERAND (arg1, 1);
10061 else
10062 offset1 = size_binop (PLUS_EXPR, offset1,
10063 TREE_OPERAND (arg1, 1));
10064 if (poly_int_tree_p (offset1))
10066 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
10067 TYPE_PRECISION (sizetype));
10068 tem <<= LOG2_BITS_PER_UNIT;
10069 tem += bitpos1;
10070 if (tem.to_shwi (&bitpos1))
10071 offset1 = NULL_TREE;
10075 /* If we have equivalent bases we might be able to simplify. */
10076 if (indirect_base0 == indirect_base1
10077 && operand_equal_p (base0, base1,
10078 indirect_base0 ? OEP_ADDRESS_OF : 0))
10080 /* We can fold this expression to a constant if the non-constant
10081 offset parts are equal. */
10082 if ((offset0 == offset1
10083 || (offset0 && offset1
10084 && operand_equal_p (offset0, offset1, 0)))
10085 && (equality_code
10086 || (indirect_base0
10087 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10088 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10090 if (!equality_code
10091 && maybe_ne (bitpos0, bitpos1)
10092 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10093 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10094 fold_overflow_warning (("assuming pointer wraparound does not "
10095 "occur when comparing P +- C1 with "
10096 "P +- C2"),
10097 WARN_STRICT_OVERFLOW_CONDITIONAL);
10099 switch (code)
10101 case EQ_EXPR:
10102 if (known_eq (bitpos0, bitpos1))
10103 return constant_boolean_node (true, type);
10104 if (known_ne (bitpos0, bitpos1))
10105 return constant_boolean_node (false, type);
10106 break;
10107 case NE_EXPR:
10108 if (known_ne (bitpos0, bitpos1))
10109 return constant_boolean_node (true, type);
10110 if (known_eq (bitpos0, bitpos1))
10111 return constant_boolean_node (false, type);
10112 break;
10113 case LT_EXPR:
10114 if (known_lt (bitpos0, bitpos1))
10115 return constant_boolean_node (true, type);
10116 if (known_ge (bitpos0, bitpos1))
10117 return constant_boolean_node (false, type);
10118 break;
10119 case LE_EXPR:
10120 if (known_le (bitpos0, bitpos1))
10121 return constant_boolean_node (true, type);
10122 if (known_gt (bitpos0, bitpos1))
10123 return constant_boolean_node (false, type);
10124 break;
10125 case GE_EXPR:
10126 if (known_ge (bitpos0, bitpos1))
10127 return constant_boolean_node (true, type);
10128 if (known_lt (bitpos0, bitpos1))
10129 return constant_boolean_node (false, type);
10130 break;
10131 case GT_EXPR:
10132 if (known_gt (bitpos0, bitpos1))
10133 return constant_boolean_node (true, type);
10134 if (known_le (bitpos0, bitpos1))
10135 return constant_boolean_node (false, type);
10136 break;
10137 default:;
10140 /* We can simplify the comparison to a comparison of the variable
10141 offset parts if the constant offset parts are equal.
10142 Be careful to use signed sizetype here because otherwise we
10143 mess with array offsets in the wrong way. This is possible
10144 because pointer arithmetic is restricted to retain within an
10145 object and overflow on pointer differences is undefined as of
10146 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10147 else if (known_eq (bitpos0, bitpos1)
10148 && (equality_code
10149 || (indirect_base0
10150 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10151 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10153 /* By converting to signed sizetype we cover middle-end pointer
10154 arithmetic which operates on unsigned pointer types of size
10155 type size and ARRAY_REF offsets which are properly sign or
10156 zero extended from their type in case it is narrower than
10157 sizetype. */
10158 if (offset0 == NULL_TREE)
10159 offset0 = build_int_cst (ssizetype, 0);
10160 else
10161 offset0 = fold_convert_loc (loc, ssizetype, offset0);
10162 if (offset1 == NULL_TREE)
10163 offset1 = build_int_cst (ssizetype, 0);
10164 else
10165 offset1 = fold_convert_loc (loc, ssizetype, offset1);
10167 if (!equality_code
10168 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10169 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10170 fold_overflow_warning (("assuming pointer wraparound does not "
10171 "occur when comparing P +- C1 with "
10172 "P +- C2"),
10173 WARN_STRICT_OVERFLOW_COMPARISON);
10175 return fold_build2_loc (loc, code, type, offset0, offset1);
10178 /* For equal offsets we can simplify to a comparison of the
10179 base addresses. */
10180 else if (known_eq (bitpos0, bitpos1)
10181 && (indirect_base0
10182 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
10183 && (indirect_base1
10184 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
10185 && ((offset0 == offset1)
10186 || (offset0 && offset1
10187 && operand_equal_p (offset0, offset1, 0))))
10189 if (indirect_base0)
10190 base0 = build_fold_addr_expr_loc (loc, base0);
10191 if (indirect_base1)
10192 base1 = build_fold_addr_expr_loc (loc, base1);
10193 return fold_build2_loc (loc, code, type, base0, base1);
10195 /* Comparison between an ordinary (non-weak) symbol and a null
10196 pointer can be eliminated since such symbols must have a non
10197 null address. In C, relational expressions between pointers
10198 to objects and null pointers are undefined. The results
10199 below follow the C++ rules with the additional property that
10200 every object pointer compares greater than a null pointer.
10202 else if (((DECL_P (base0)
10203 && maybe_nonzero_address (base0) > 0
10204 /* Avoid folding references to struct members at offset 0 to
10205 prevent tests like '&ptr->firstmember == 0' from getting
10206 eliminated. When ptr is null, although the -> expression
10207 is strictly speaking invalid, GCC retains it as a matter
10208 of QoI. See PR c/44555. */
10209 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
10210 || CONSTANT_CLASS_P (base0))
10211 && indirect_base0
10212 /* The caller guarantees that when one of the arguments is
10213 constant (i.e., null in this case) it is second. */
10214 && integer_zerop (arg1))
10216 switch (code)
10218 case EQ_EXPR:
10219 case LE_EXPR:
10220 case LT_EXPR:
10221 return constant_boolean_node (false, type);
10222 case GE_EXPR:
10223 case GT_EXPR:
10224 case NE_EXPR:
10225 return constant_boolean_node (true, type);
10226 default:
10227 gcc_unreachable ();
10232 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10233 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10234 the resulting offset is smaller in absolute value than the
10235 original one and has the same sign. */
10236 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10237 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
10238 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10239 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10240 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
10241 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
10242 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10243 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
10245 tree const1 = TREE_OPERAND (arg0, 1);
10246 tree const2 = TREE_OPERAND (arg1, 1);
10247 tree variable1 = TREE_OPERAND (arg0, 0);
10248 tree variable2 = TREE_OPERAND (arg1, 0);
10249 tree cst;
10250 const char * const warnmsg = G_("assuming signed overflow does not "
10251 "occur when combining constants around "
10252 "a comparison");
10254 /* Put the constant on the side where it doesn't overflow and is
10255 of lower absolute value and of same sign than before. */
10256 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10257 ? MINUS_EXPR : PLUS_EXPR,
10258 const2, const1);
10259 if (!TREE_OVERFLOW (cst)
10260 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
10261 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
10263 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10264 return fold_build2_loc (loc, code, type,
10265 variable1,
10266 fold_build2_loc (loc, TREE_CODE (arg1),
10267 TREE_TYPE (arg1),
10268 variable2, cst));
10271 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10272 ? MINUS_EXPR : PLUS_EXPR,
10273 const1, const2);
10274 if (!TREE_OVERFLOW (cst)
10275 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
10276 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
10278 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10279 return fold_build2_loc (loc, code, type,
10280 fold_build2_loc (loc, TREE_CODE (arg0),
10281 TREE_TYPE (arg0),
10282 variable1, cst),
10283 variable2);
10287 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
10288 if (tem)
10289 return tem;
10291 /* If we are comparing an expression that just has comparisons
10292 of two integer values, arithmetic expressions of those comparisons,
10293 and constants, we can simplify it. There are only three cases
10294 to check: the two values can either be equal, the first can be
10295 greater, or the second can be greater. Fold the expression for
10296 those three values. Since each value must be 0 or 1, we have
10297 eight possibilities, each of which corresponds to the constant 0
10298 or 1 or one of the six possible comparisons.
10300 This handles common cases like (a > b) == 0 but also handles
10301 expressions like ((x > y) - (y > x)) > 0, which supposedly
10302 occur in macroized code. */
10304 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
10306 tree cval1 = 0, cval2 = 0;
10308 if (twoval_comparison_p (arg0, &cval1, &cval2)
10309 /* Don't handle degenerate cases here; they should already
10310 have been handled anyway. */
10311 && cval1 != 0 && cval2 != 0
10312 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
10313 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
10314 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
10315 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
10316 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
10317 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
10318 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
10320 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
10321 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
10323 /* We can't just pass T to eval_subst in case cval1 or cval2
10324 was the same as ARG1. */
10326 tree high_result
10327 = fold_build2_loc (loc, code, type,
10328 eval_subst (loc, arg0, cval1, maxval,
10329 cval2, minval),
10330 arg1);
10331 tree equal_result
10332 = fold_build2_loc (loc, code, type,
10333 eval_subst (loc, arg0, cval1, maxval,
10334 cval2, maxval),
10335 arg1);
10336 tree low_result
10337 = fold_build2_loc (loc, code, type,
10338 eval_subst (loc, arg0, cval1, minval,
10339 cval2, maxval),
10340 arg1);
10342 /* All three of these results should be 0 or 1. Confirm they are.
10343 Then use those values to select the proper code to use. */
10345 if (TREE_CODE (high_result) == INTEGER_CST
10346 && TREE_CODE (equal_result) == INTEGER_CST
10347 && TREE_CODE (low_result) == INTEGER_CST)
10349 /* Make a 3-bit mask with the high-order bit being the
10350 value for `>', the next for '=', and the low for '<'. */
10351 switch ((integer_onep (high_result) * 4)
10352 + (integer_onep (equal_result) * 2)
10353 + integer_onep (low_result))
10355 case 0:
10356 /* Always false. */
10357 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10358 case 1:
10359 code = LT_EXPR;
10360 break;
10361 case 2:
10362 code = EQ_EXPR;
10363 break;
10364 case 3:
10365 code = LE_EXPR;
10366 break;
10367 case 4:
10368 code = GT_EXPR;
10369 break;
10370 case 5:
10371 code = NE_EXPR;
10372 break;
10373 case 6:
10374 code = GE_EXPR;
10375 break;
10376 case 7:
10377 /* Always true. */
10378 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10381 return fold_build2_loc (loc, code, type, cval1, cval2);
10386 return NULL_TREE;
10390 /* Subroutine of fold_binary. Optimize complex multiplications of the
10391 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10392 argument EXPR represents the expression "z" of type TYPE. */
10394 static tree
10395 fold_mult_zconjz (location_t loc, tree type, tree expr)
10397 tree itype = TREE_TYPE (type);
10398 tree rpart, ipart, tem;
10400 if (TREE_CODE (expr) == COMPLEX_EXPR)
10402 rpart = TREE_OPERAND (expr, 0);
10403 ipart = TREE_OPERAND (expr, 1);
10405 else if (TREE_CODE (expr) == COMPLEX_CST)
10407 rpart = TREE_REALPART (expr);
10408 ipart = TREE_IMAGPART (expr);
10410 else
10412 expr = save_expr (expr);
10413 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
10414 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
10417 rpart = save_expr (rpart);
10418 ipart = save_expr (ipart);
10419 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
10420 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
10421 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
10422 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
10423 build_zero_cst (itype));
10427 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10428 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10429 true if successful. */
10431 static bool
10432 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
10434 unsigned HOST_WIDE_INT i, nunits;
10436 if (TREE_CODE (arg) == VECTOR_CST
10437 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
10439 for (i = 0; i < nunits; ++i)
10440 elts[i] = VECTOR_CST_ELT (arg, i);
10442 else if (TREE_CODE (arg) == CONSTRUCTOR)
10444 constructor_elt *elt;
10446 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
10447 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
10448 return false;
10449 else
10450 elts[i] = elt->value;
10452 else
10453 return false;
10454 for (; i < nelts; i++)
10455 elts[i]
10456 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
10457 return true;
10460 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10461 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10462 NULL_TREE otherwise. */
10464 tree
10465 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
10467 unsigned int i;
10468 unsigned HOST_WIDE_INT nelts;
10469 bool need_ctor = false;
10471 if (!sel.length ().is_constant (&nelts))
10472 return NULL_TREE;
10473 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
10474 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
10475 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
10476 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
10477 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10478 return NULL_TREE;
10480 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10481 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
10482 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
10483 return NULL_TREE;
10485 tree_vector_builder out_elts (type, nelts, 1);
10486 for (i = 0; i < nelts; i++)
10488 HOST_WIDE_INT index;
10489 if (!sel[i].is_constant (&index))
10490 return NULL_TREE;
10491 if (!CONSTANT_CLASS_P (in_elts[index]))
10492 need_ctor = true;
10493 out_elts.quick_push (unshare_expr (in_elts[index]));
10496 if (need_ctor)
10498 vec<constructor_elt, va_gc> *v;
10499 vec_alloc (v, nelts);
10500 for (i = 0; i < nelts; i++)
10501 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
10502 return build_constructor (type, v);
10504 else
10505 return out_elts.build ();
10508 /* Try to fold a pointer difference of type TYPE two address expressions of
10509 array references AREF0 and AREF1 using location LOC. Return a
10510 simplified expression for the difference or NULL_TREE. */
10512 static tree
10513 fold_addr_of_array_ref_difference (location_t loc, tree type,
10514 tree aref0, tree aref1,
10515 bool use_pointer_diff)
10517 tree base0 = TREE_OPERAND (aref0, 0);
10518 tree base1 = TREE_OPERAND (aref1, 0);
10519 tree base_offset = build_int_cst (type, 0);
10521 /* If the bases are array references as well, recurse. If the bases
10522 are pointer indirections compute the difference of the pointers.
10523 If the bases are equal, we are set. */
10524 if ((TREE_CODE (base0) == ARRAY_REF
10525 && TREE_CODE (base1) == ARRAY_REF
10526 && (base_offset
10527 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
10528 use_pointer_diff)))
10529 || (INDIRECT_REF_P (base0)
10530 && INDIRECT_REF_P (base1)
10531 && (base_offset
10532 = use_pointer_diff
10533 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
10534 TREE_OPERAND (base0, 0),
10535 TREE_OPERAND (base1, 0))
10536 : fold_binary_loc (loc, MINUS_EXPR, type,
10537 fold_convert (type,
10538 TREE_OPERAND (base0, 0)),
10539 fold_convert (type,
10540 TREE_OPERAND (base1, 0)))))
10541 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
10543 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10544 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10545 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
10546 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
10547 return fold_build2_loc (loc, PLUS_EXPR, type,
10548 base_offset,
10549 fold_build2_loc (loc, MULT_EXPR, type,
10550 diff, esz));
10552 return NULL_TREE;
10555 /* If the real or vector real constant CST of type TYPE has an exact
10556 inverse, return it, else return NULL. */
10558 tree
10559 exact_inverse (tree type, tree cst)
10561 REAL_VALUE_TYPE r;
10562 tree unit_type;
10563 machine_mode mode;
10565 switch (TREE_CODE (cst))
10567 case REAL_CST:
10568 r = TREE_REAL_CST (cst);
10570 if (exact_real_inverse (TYPE_MODE (type), &r))
10571 return build_real (type, r);
10573 return NULL_TREE;
10575 case VECTOR_CST:
10577 unit_type = TREE_TYPE (type);
10578 mode = TYPE_MODE (unit_type);
10580 tree_vector_builder elts;
10581 if (!elts.new_unary_operation (type, cst, false))
10582 return NULL_TREE;
10583 unsigned int count = elts.encoded_nelts ();
10584 for (unsigned int i = 0; i < count; ++i)
10586 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10587 if (!exact_real_inverse (mode, &r))
10588 return NULL_TREE;
10589 elts.quick_push (build_real (unit_type, r));
10592 return elts.build ();
10595 default:
10596 return NULL_TREE;
10600 /* Mask out the tz least significant bits of X of type TYPE where
10601 tz is the number of trailing zeroes in Y. */
10602 static wide_int
10603 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10605 int tz = wi::ctz (y);
10606 if (tz > 0)
10607 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10608 return x;
10611 /* Return true when T is an address and is known to be nonzero.
10612 For floating point we further ensure that T is not denormal.
10613 Similar logic is present in nonzero_address in rtlanal.h.
10615 If the return value is based on the assumption that signed overflow
10616 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10617 change *STRICT_OVERFLOW_P. */
10619 static bool
10620 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10622 tree type = TREE_TYPE (t);
10623 enum tree_code code;
10625 /* Doing something useful for floating point would need more work. */
10626 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10627 return false;
10629 code = TREE_CODE (t);
10630 switch (TREE_CODE_CLASS (code))
10632 case tcc_unary:
10633 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10634 strict_overflow_p);
10635 case tcc_binary:
10636 case tcc_comparison:
10637 return tree_binary_nonzero_warnv_p (code, type,
10638 TREE_OPERAND (t, 0),
10639 TREE_OPERAND (t, 1),
10640 strict_overflow_p);
10641 case tcc_constant:
10642 case tcc_declaration:
10643 case tcc_reference:
10644 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10646 default:
10647 break;
10650 switch (code)
10652 case TRUTH_NOT_EXPR:
10653 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10654 strict_overflow_p);
10656 case TRUTH_AND_EXPR:
10657 case TRUTH_OR_EXPR:
10658 case TRUTH_XOR_EXPR:
10659 return tree_binary_nonzero_warnv_p (code, type,
10660 TREE_OPERAND (t, 0),
10661 TREE_OPERAND (t, 1),
10662 strict_overflow_p);
10664 case COND_EXPR:
10665 case CONSTRUCTOR:
10666 case OBJ_TYPE_REF:
10667 case ASSERT_EXPR:
10668 case ADDR_EXPR:
10669 case WITH_SIZE_EXPR:
10670 case SSA_NAME:
10671 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10673 case COMPOUND_EXPR:
10674 case MODIFY_EXPR:
10675 case BIND_EXPR:
10676 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10677 strict_overflow_p);
10679 case SAVE_EXPR:
10680 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10681 strict_overflow_p);
10683 case CALL_EXPR:
10685 tree fndecl = get_callee_fndecl (t);
10686 if (!fndecl) return false;
10687 if (flag_delete_null_pointer_checks && !flag_check_new
10688 && DECL_IS_OPERATOR_NEW_P (fndecl)
10689 && !TREE_NOTHROW (fndecl))
10690 return true;
10691 if (flag_delete_null_pointer_checks
10692 && lookup_attribute ("returns_nonnull",
10693 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10694 return true;
10695 return alloca_call_p (t);
10698 default:
10699 break;
10701 return false;
10704 /* Return true when T is an address and is known to be nonzero.
10705 Handle warnings about undefined signed overflow. */
10707 bool
10708 tree_expr_nonzero_p (tree t)
10710 bool ret, strict_overflow_p;
10712 strict_overflow_p = false;
10713 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10714 if (strict_overflow_p)
10715 fold_overflow_warning (("assuming signed overflow does not occur when "
10716 "determining that expression is always "
10717 "non-zero"),
10718 WARN_STRICT_OVERFLOW_MISC);
10719 return ret;
10722 /* Return true if T is known not to be equal to an integer W. */
10724 bool
10725 expr_not_equal_to (tree t, const wide_int &w)
10727 value_range vr;
10728 switch (TREE_CODE (t))
10730 case INTEGER_CST:
10731 return wi::to_wide (t) != w;
10733 case SSA_NAME:
10734 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10735 return false;
10737 if (cfun)
10738 get_range_query (cfun)->range_of_expr (vr, t);
10739 else
10740 get_global_range_query ()->range_of_expr (vr, t);
10742 if (!vr.undefined_p ()
10743 && !vr.contains_p (wide_int_to_tree (TREE_TYPE (t), w)))
10744 return true;
10745 /* If T has some known zero bits and W has any of those bits set,
10746 then T is known not to be equal to W. */
10747 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10748 TYPE_PRECISION (TREE_TYPE (t))), 0))
10749 return true;
10750 return false;
10752 default:
10753 return false;
10757 /* Fold a binary expression of code CODE and type TYPE with operands
10758 OP0 and OP1. LOC is the location of the resulting expression.
10759 Return the folded expression if folding is successful. Otherwise,
10760 return NULL_TREE. */
10762 tree
10763 fold_binary_loc (location_t loc, enum tree_code code, tree type,
10764 tree op0, tree op1)
10766 enum tree_code_class kind = TREE_CODE_CLASS (code);
10767 tree arg0, arg1, tem;
10768 tree t1 = NULL_TREE;
10769 bool strict_overflow_p;
10770 unsigned int prec;
10772 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10773 && TREE_CODE_LENGTH (code) == 2
10774 && op0 != NULL_TREE
10775 && op1 != NULL_TREE);
10777 arg0 = op0;
10778 arg1 = op1;
10780 /* Strip any conversions that don't change the mode. This is
10781 safe for every expression, except for a comparison expression
10782 because its signedness is derived from its operands. So, in
10783 the latter case, only strip conversions that don't change the
10784 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10785 preserved.
10787 Note that this is done as an internal manipulation within the
10788 constant folder, in order to find the simplest representation
10789 of the arguments so that their form can be studied. In any
10790 cases, the appropriate type conversions should be put back in
10791 the tree that will get out of the constant folder. */
10793 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10795 STRIP_SIGN_NOPS (arg0);
10796 STRIP_SIGN_NOPS (arg1);
10798 else
10800 STRIP_NOPS (arg0);
10801 STRIP_NOPS (arg1);
10804 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10805 constant but we can't do arithmetic on them. */
10806 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
10808 tem = const_binop (code, type, arg0, arg1);
10809 if (tem != NULL_TREE)
10811 if (TREE_TYPE (tem) != type)
10812 tem = fold_convert_loc (loc, type, tem);
10813 return tem;
10817 /* If this is a commutative operation, and ARG0 is a constant, move it
10818 to ARG1 to reduce the number of tests below. */
10819 if (commutative_tree_code (code)
10820 && tree_swap_operands_p (arg0, arg1))
10821 return fold_build2_loc (loc, code, type, op1, op0);
10823 /* Likewise if this is a comparison, and ARG0 is a constant, move it
10824 to ARG1 to reduce the number of tests below. */
10825 if (kind == tcc_comparison
10826 && tree_swap_operands_p (arg0, arg1))
10827 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
10829 tem = generic_simplify (loc, code, type, op0, op1);
10830 if (tem)
10831 return tem;
10833 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10835 First check for cases where an arithmetic operation is applied to a
10836 compound, conditional, or comparison operation. Push the arithmetic
10837 operation inside the compound or conditional to see if any folding
10838 can then be done. Convert comparison to conditional for this purpose.
10839 The also optimizes non-constant cases that used to be done in
10840 expand_expr.
10842 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10843 one of the operands is a comparison and the other is a comparison, a
10844 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10845 code below would make the expression more complex. Change it to a
10846 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10847 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10849 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10850 || code == EQ_EXPR || code == NE_EXPR)
10851 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
10852 && ((truth_value_p (TREE_CODE (arg0))
10853 && (truth_value_p (TREE_CODE (arg1))
10854 || (TREE_CODE (arg1) == BIT_AND_EXPR
10855 && integer_onep (TREE_OPERAND (arg1, 1)))))
10856 || (truth_value_p (TREE_CODE (arg1))
10857 && (truth_value_p (TREE_CODE (arg0))
10858 || (TREE_CODE (arg0) == BIT_AND_EXPR
10859 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10861 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10862 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10863 : TRUTH_XOR_EXPR,
10864 boolean_type_node,
10865 fold_convert_loc (loc, boolean_type_node, arg0),
10866 fold_convert_loc (loc, boolean_type_node, arg1));
10868 if (code == EQ_EXPR)
10869 tem = invert_truthvalue_loc (loc, tem);
10871 return fold_convert_loc (loc, type, tem);
10874 if (TREE_CODE_CLASS (code) == tcc_binary
10875 || TREE_CODE_CLASS (code) == tcc_comparison)
10877 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10879 tem = fold_build2_loc (loc, code, type,
10880 fold_convert_loc (loc, TREE_TYPE (op0),
10881 TREE_OPERAND (arg0, 1)), op1);
10882 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10883 tem);
10885 if (TREE_CODE (arg1) == COMPOUND_EXPR)
10887 tem = fold_build2_loc (loc, code, type, op0,
10888 fold_convert_loc (loc, TREE_TYPE (op1),
10889 TREE_OPERAND (arg1, 1)));
10890 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10891 tem);
10894 if (TREE_CODE (arg0) == COND_EXPR
10895 || TREE_CODE (arg0) == VEC_COND_EXPR
10896 || COMPARISON_CLASS_P (arg0))
10898 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10899 arg0, arg1,
10900 /*cond_first_p=*/1);
10901 if (tem != NULL_TREE)
10902 return tem;
10905 if (TREE_CODE (arg1) == COND_EXPR
10906 || TREE_CODE (arg1) == VEC_COND_EXPR
10907 || COMPARISON_CLASS_P (arg1))
10909 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10910 arg1, arg0,
10911 /*cond_first_p=*/0);
10912 if (tem != NULL_TREE)
10913 return tem;
10917 switch (code)
10919 case MEM_REF:
10920 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10921 if (TREE_CODE (arg0) == ADDR_EXPR
10922 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10924 tree iref = TREE_OPERAND (arg0, 0);
10925 return fold_build2 (MEM_REF, type,
10926 TREE_OPERAND (iref, 0),
10927 int_const_binop (PLUS_EXPR, arg1,
10928 TREE_OPERAND (iref, 1)));
10931 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10932 if (TREE_CODE (arg0) == ADDR_EXPR
10933 && handled_component_p (TREE_OPERAND (arg0, 0)))
10935 tree base;
10936 poly_int64 coffset;
10937 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10938 &coffset);
10939 if (!base)
10940 return NULL_TREE;
10941 return fold_build2 (MEM_REF, type,
10942 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
10943 int_const_binop (PLUS_EXPR, arg1,
10944 size_int (coffset)));
10947 return NULL_TREE;
10949 case POINTER_PLUS_EXPR:
10950 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10951 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10952 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10953 return fold_convert_loc (loc, type,
10954 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10955 fold_convert_loc (loc, sizetype,
10956 arg1),
10957 fold_convert_loc (loc, sizetype,
10958 arg0)));
10960 return NULL_TREE;
10962 case PLUS_EXPR:
10963 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10965 /* X + (X / CST) * -CST is X % CST. */
10966 if (TREE_CODE (arg1) == MULT_EXPR
10967 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10968 && operand_equal_p (arg0,
10969 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10971 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10972 tree cst1 = TREE_OPERAND (arg1, 1);
10973 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10974 cst1, cst0);
10975 if (sum && integer_zerop (sum))
10976 return fold_convert_loc (loc, type,
10977 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10978 TREE_TYPE (arg0), arg0,
10979 cst0));
10983 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10984 one. Make sure the type is not saturating and has the signedness of
10985 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10986 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10987 if ((TREE_CODE (arg0) == MULT_EXPR
10988 || TREE_CODE (arg1) == MULT_EXPR)
10989 && !TYPE_SATURATING (type)
10990 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10991 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10992 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10994 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10995 if (tem)
10996 return tem;
10999 if (! FLOAT_TYPE_P (type))
11001 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
11002 (plus (plus (mult) (mult)) (foo)) so that we can
11003 take advantage of the factoring cases below. */
11004 if (ANY_INTEGRAL_TYPE_P (type)
11005 && TYPE_OVERFLOW_WRAPS (type)
11006 && (((TREE_CODE (arg0) == PLUS_EXPR
11007 || TREE_CODE (arg0) == MINUS_EXPR)
11008 && TREE_CODE (arg1) == MULT_EXPR)
11009 || ((TREE_CODE (arg1) == PLUS_EXPR
11010 || TREE_CODE (arg1) == MINUS_EXPR)
11011 && TREE_CODE (arg0) == MULT_EXPR)))
11013 tree parg0, parg1, parg, marg;
11014 enum tree_code pcode;
11016 if (TREE_CODE (arg1) == MULT_EXPR)
11017 parg = arg0, marg = arg1;
11018 else
11019 parg = arg1, marg = arg0;
11020 pcode = TREE_CODE (parg);
11021 parg0 = TREE_OPERAND (parg, 0);
11022 parg1 = TREE_OPERAND (parg, 1);
11023 STRIP_NOPS (parg0);
11024 STRIP_NOPS (parg1);
11026 if (TREE_CODE (parg0) == MULT_EXPR
11027 && TREE_CODE (parg1) != MULT_EXPR)
11028 return fold_build2_loc (loc, pcode, type,
11029 fold_build2_loc (loc, PLUS_EXPR, type,
11030 fold_convert_loc (loc, type,
11031 parg0),
11032 fold_convert_loc (loc, type,
11033 marg)),
11034 fold_convert_loc (loc, type, parg1));
11035 if (TREE_CODE (parg0) != MULT_EXPR
11036 && TREE_CODE (parg1) == MULT_EXPR)
11037 return
11038 fold_build2_loc (loc, PLUS_EXPR, type,
11039 fold_convert_loc (loc, type, parg0),
11040 fold_build2_loc (loc, pcode, type,
11041 fold_convert_loc (loc, type, marg),
11042 fold_convert_loc (loc, type,
11043 parg1)));
11046 else
11048 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
11049 to __complex__ ( x, y ). This is not the same for SNaNs or
11050 if signed zeros are involved. */
11051 if (!HONOR_SNANS (arg0)
11052 && !HONOR_SIGNED_ZEROS (arg0)
11053 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11055 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11056 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11057 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11058 bool arg0rz = false, arg0iz = false;
11059 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11060 || (arg0i && (arg0iz = real_zerop (arg0i))))
11062 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11063 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11064 if (arg0rz && arg1i && real_zerop (arg1i))
11066 tree rp = arg1r ? arg1r
11067 : build1 (REALPART_EXPR, rtype, arg1);
11068 tree ip = arg0i ? arg0i
11069 : build1 (IMAGPART_EXPR, rtype, arg0);
11070 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11072 else if (arg0iz && arg1r && real_zerop (arg1r))
11074 tree rp = arg0r ? arg0r
11075 : build1 (REALPART_EXPR, rtype, arg0);
11076 tree ip = arg1i ? arg1i
11077 : build1 (IMAGPART_EXPR, rtype, arg1);
11078 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11083 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11084 We associate floats only if the user has specified
11085 -fassociative-math. */
11086 if (flag_associative_math
11087 && TREE_CODE (arg1) == PLUS_EXPR
11088 && TREE_CODE (arg0) != MULT_EXPR)
11090 tree tree10 = TREE_OPERAND (arg1, 0);
11091 tree tree11 = TREE_OPERAND (arg1, 1);
11092 if (TREE_CODE (tree11) == MULT_EXPR
11093 && TREE_CODE (tree10) == MULT_EXPR)
11095 tree tree0;
11096 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
11097 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
11100 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11101 We associate floats only if the user has specified
11102 -fassociative-math. */
11103 if (flag_associative_math
11104 && TREE_CODE (arg0) == PLUS_EXPR
11105 && TREE_CODE (arg1) != MULT_EXPR)
11107 tree tree00 = TREE_OPERAND (arg0, 0);
11108 tree tree01 = TREE_OPERAND (arg0, 1);
11109 if (TREE_CODE (tree01) == MULT_EXPR
11110 && TREE_CODE (tree00) == MULT_EXPR)
11112 tree tree0;
11113 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
11114 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
11119 bit_rotate:
11120 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11121 is a rotate of A by C1 bits. */
11122 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11123 is a rotate of A by B bits.
11124 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11125 though in this case CODE must be | and not + or ^, otherwise
11126 it doesn't return A when B is 0. */
11128 enum tree_code code0, code1;
11129 tree rtype;
11130 code0 = TREE_CODE (arg0);
11131 code1 = TREE_CODE (arg1);
11132 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
11133 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
11134 && operand_equal_p (TREE_OPERAND (arg0, 0),
11135 TREE_OPERAND (arg1, 0), 0)
11136 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
11137 TYPE_UNSIGNED (rtype))
11138 /* Only create rotates in complete modes. Other cases are not
11139 expanded properly. */
11140 && (element_precision (rtype)
11141 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
11143 tree tree01, tree11;
11144 tree orig_tree01, orig_tree11;
11145 enum tree_code code01, code11;
11147 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
11148 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
11149 STRIP_NOPS (tree01);
11150 STRIP_NOPS (tree11);
11151 code01 = TREE_CODE (tree01);
11152 code11 = TREE_CODE (tree11);
11153 if (code11 != MINUS_EXPR
11154 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
11156 std::swap (code0, code1);
11157 std::swap (code01, code11);
11158 std::swap (tree01, tree11);
11159 std::swap (orig_tree01, orig_tree11);
11161 if (code01 == INTEGER_CST
11162 && code11 == INTEGER_CST
11163 && (wi::to_widest (tree01) + wi::to_widest (tree11)
11164 == element_precision (rtype)))
11166 tem = build2_loc (loc, LROTATE_EXPR,
11167 rtype, TREE_OPERAND (arg0, 0),
11168 code0 == LSHIFT_EXPR
11169 ? orig_tree01 : orig_tree11);
11170 return fold_convert_loc (loc, type, tem);
11172 else if (code11 == MINUS_EXPR)
11174 tree tree110, tree111;
11175 tree110 = TREE_OPERAND (tree11, 0);
11176 tree111 = TREE_OPERAND (tree11, 1);
11177 STRIP_NOPS (tree110);
11178 STRIP_NOPS (tree111);
11179 if (TREE_CODE (tree110) == INTEGER_CST
11180 && compare_tree_int (tree110,
11181 element_precision (rtype)) == 0
11182 && operand_equal_p (tree01, tree111, 0))
11184 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11185 ? LROTATE_EXPR : RROTATE_EXPR),
11186 rtype, TREE_OPERAND (arg0, 0),
11187 orig_tree01);
11188 return fold_convert_loc (loc, type, tem);
11191 else if (code == BIT_IOR_EXPR
11192 && code11 == BIT_AND_EXPR
11193 && pow2p_hwi (element_precision (rtype)))
11195 tree tree110, tree111;
11196 tree110 = TREE_OPERAND (tree11, 0);
11197 tree111 = TREE_OPERAND (tree11, 1);
11198 STRIP_NOPS (tree110);
11199 STRIP_NOPS (tree111);
11200 if (TREE_CODE (tree110) == NEGATE_EXPR
11201 && TREE_CODE (tree111) == INTEGER_CST
11202 && compare_tree_int (tree111,
11203 element_precision (rtype) - 1) == 0
11204 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
11206 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11207 ? LROTATE_EXPR : RROTATE_EXPR),
11208 rtype, TREE_OPERAND (arg0, 0),
11209 orig_tree01);
11210 return fold_convert_loc (loc, type, tem);
11216 associate:
11217 /* In most languages, can't associate operations on floats through
11218 parentheses. Rather than remember where the parentheses were, we
11219 don't associate floats at all, unless the user has specified
11220 -fassociative-math.
11221 And, we need to make sure type is not saturating. */
11223 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
11224 && !TYPE_SATURATING (type))
11226 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
11227 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
11228 tree atype = type;
11229 bool ok = true;
11231 /* Split both trees into variables, constants, and literals. Then
11232 associate each group together, the constants with literals,
11233 then the result with variables. This increases the chances of
11234 literals being recombined later and of generating relocatable
11235 expressions for the sum of a constant and literal. */
11236 var0 = split_tree (arg0, type, code,
11237 &minus_var0, &con0, &minus_con0,
11238 &lit0, &minus_lit0, 0);
11239 var1 = split_tree (arg1, type, code,
11240 &minus_var1, &con1, &minus_con1,
11241 &lit1, &minus_lit1, code == MINUS_EXPR);
11243 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11244 if (code == MINUS_EXPR)
11245 code = PLUS_EXPR;
11247 /* With undefined overflow prefer doing association in a type
11248 which wraps on overflow, if that is one of the operand types. */
11249 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
11250 && !TYPE_OVERFLOW_WRAPS (type))
11252 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11253 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11254 atype = TREE_TYPE (arg0);
11255 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11256 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
11257 atype = TREE_TYPE (arg1);
11258 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
11261 /* With undefined overflow we can only associate constants with one
11262 variable, and constants whose association doesn't overflow. */
11263 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
11264 && !TYPE_OVERFLOW_WRAPS (atype))
11266 if ((var0 && var1) || (minus_var0 && minus_var1))
11268 /* ??? If split_tree would handle NEGATE_EXPR we could
11269 simply reject these cases and the allowed cases would
11270 be the var0/minus_var1 ones. */
11271 tree tmp0 = var0 ? var0 : minus_var0;
11272 tree tmp1 = var1 ? var1 : minus_var1;
11273 bool one_neg = false;
11275 if (TREE_CODE (tmp0) == NEGATE_EXPR)
11277 tmp0 = TREE_OPERAND (tmp0, 0);
11278 one_neg = !one_neg;
11280 if (CONVERT_EXPR_P (tmp0)
11281 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11282 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11283 <= TYPE_PRECISION (atype)))
11284 tmp0 = TREE_OPERAND (tmp0, 0);
11285 if (TREE_CODE (tmp1) == NEGATE_EXPR)
11287 tmp1 = TREE_OPERAND (tmp1, 0);
11288 one_neg = !one_neg;
11290 if (CONVERT_EXPR_P (tmp1)
11291 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11292 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11293 <= TYPE_PRECISION (atype)))
11294 tmp1 = TREE_OPERAND (tmp1, 0);
11295 /* The only case we can still associate with two variables
11296 is if they cancel out. */
11297 if (!one_neg
11298 || !operand_equal_p (tmp0, tmp1, 0))
11299 ok = false;
11301 else if ((var0 && minus_var1
11302 && ! operand_equal_p (var0, minus_var1, 0))
11303 || (minus_var0 && var1
11304 && ! operand_equal_p (minus_var0, var1, 0)))
11305 ok = false;
11308 /* Only do something if we found more than two objects. Otherwise,
11309 nothing has changed and we risk infinite recursion. */
11310 if (ok
11311 && ((var0 != 0) + (var1 != 0)
11312 + (minus_var0 != 0) + (minus_var1 != 0)
11313 + (con0 != 0) + (con1 != 0)
11314 + (minus_con0 != 0) + (minus_con1 != 0)
11315 + (lit0 != 0) + (lit1 != 0)
11316 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
11318 var0 = associate_trees (loc, var0, var1, code, atype);
11319 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
11320 code, atype);
11321 con0 = associate_trees (loc, con0, con1, code, atype);
11322 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
11323 code, atype);
11324 lit0 = associate_trees (loc, lit0, lit1, code, atype);
11325 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
11326 code, atype);
11328 if (minus_var0 && var0)
11330 var0 = associate_trees (loc, var0, minus_var0,
11331 MINUS_EXPR, atype);
11332 minus_var0 = 0;
11334 if (minus_con0 && con0)
11336 con0 = associate_trees (loc, con0, minus_con0,
11337 MINUS_EXPR, atype);
11338 minus_con0 = 0;
11341 /* Preserve the MINUS_EXPR if the negative part of the literal is
11342 greater than the positive part. Otherwise, the multiplicative
11343 folding code (i.e extract_muldiv) may be fooled in case
11344 unsigned constants are subtracted, like in the following
11345 example: ((X*2 + 4) - 8U)/2. */
11346 if (minus_lit0 && lit0)
11348 if (TREE_CODE (lit0) == INTEGER_CST
11349 && TREE_CODE (minus_lit0) == INTEGER_CST
11350 && tree_int_cst_lt (lit0, minus_lit0)
11351 /* But avoid ending up with only negated parts. */
11352 && (var0 || con0))
11354 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
11355 MINUS_EXPR, atype);
11356 lit0 = 0;
11358 else
11360 lit0 = associate_trees (loc, lit0, minus_lit0,
11361 MINUS_EXPR, atype);
11362 minus_lit0 = 0;
11366 /* Don't introduce overflows through reassociation. */
11367 if ((lit0 && TREE_OVERFLOW_P (lit0))
11368 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
11369 return NULL_TREE;
11371 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11372 con0 = associate_trees (loc, con0, lit0, code, atype);
11373 lit0 = 0;
11374 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
11375 code, atype);
11376 minus_lit0 = 0;
11378 /* Eliminate minus_con0. */
11379 if (minus_con0)
11381 if (con0)
11382 con0 = associate_trees (loc, con0, minus_con0,
11383 MINUS_EXPR, atype);
11384 else if (var0)
11385 var0 = associate_trees (loc, var0, minus_con0,
11386 MINUS_EXPR, atype);
11387 else
11388 gcc_unreachable ();
11389 minus_con0 = 0;
11392 /* Eliminate minus_var0. */
11393 if (minus_var0)
11395 if (con0)
11396 con0 = associate_trees (loc, con0, minus_var0,
11397 MINUS_EXPR, atype);
11398 else
11399 gcc_unreachable ();
11400 minus_var0 = 0;
11403 return
11404 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
11405 code, atype));
11409 return NULL_TREE;
11411 case POINTER_DIFF_EXPR:
11412 case MINUS_EXPR:
11413 /* Fold &a[i] - &a[j] to i-j. */
11414 if (TREE_CODE (arg0) == ADDR_EXPR
11415 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11416 && TREE_CODE (arg1) == ADDR_EXPR
11417 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11419 tree tem = fold_addr_of_array_ref_difference (loc, type,
11420 TREE_OPERAND (arg0, 0),
11421 TREE_OPERAND (arg1, 0),
11422 code
11423 == POINTER_DIFF_EXPR);
11424 if (tem)
11425 return tem;
11428 /* Further transformations are not for pointers. */
11429 if (code == POINTER_DIFF_EXPR)
11430 return NULL_TREE;
11432 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11433 if (TREE_CODE (arg0) == NEGATE_EXPR
11434 && negate_expr_p (op1)
11435 /* If arg0 is e.g. unsigned int and type is int, then this could
11436 introduce UB, because if A is INT_MIN at runtime, the original
11437 expression can be well defined while the latter is not.
11438 See PR83269. */
11439 && !(ANY_INTEGRAL_TYPE_P (type)
11440 && TYPE_OVERFLOW_UNDEFINED (type)
11441 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11442 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
11443 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
11444 fold_convert_loc (loc, type,
11445 TREE_OPERAND (arg0, 0)));
11447 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11448 __complex__ ( x, -y ). This is not the same for SNaNs or if
11449 signed zeros are involved. */
11450 if (!HONOR_SNANS (arg0)
11451 && !HONOR_SIGNED_ZEROS (arg0)
11452 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11454 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11455 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11456 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11457 bool arg0rz = false, arg0iz = false;
11458 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11459 || (arg0i && (arg0iz = real_zerop (arg0i))))
11461 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11462 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11463 if (arg0rz && arg1i && real_zerop (arg1i))
11465 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11466 arg1r ? arg1r
11467 : build1 (REALPART_EXPR, rtype, arg1));
11468 tree ip = arg0i ? arg0i
11469 : build1 (IMAGPART_EXPR, rtype, arg0);
11470 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11472 else if (arg0iz && arg1r && real_zerop (arg1r))
11474 tree rp = arg0r ? arg0r
11475 : build1 (REALPART_EXPR, rtype, arg0);
11476 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11477 arg1i ? arg1i
11478 : build1 (IMAGPART_EXPR, rtype, arg1));
11479 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11484 /* A - B -> A + (-B) if B is easily negatable. */
11485 if (negate_expr_p (op1)
11486 && ! TYPE_OVERFLOW_SANITIZED (type)
11487 && ((FLOAT_TYPE_P (type)
11488 /* Avoid this transformation if B is a positive REAL_CST. */
11489 && (TREE_CODE (op1) != REAL_CST
11490 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
11491 || INTEGRAL_TYPE_P (type)))
11492 return fold_build2_loc (loc, PLUS_EXPR, type,
11493 fold_convert_loc (loc, type, arg0),
11494 negate_expr (op1));
11496 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11497 one. Make sure the type is not saturating and has the signedness of
11498 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11499 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11500 if ((TREE_CODE (arg0) == MULT_EXPR
11501 || TREE_CODE (arg1) == MULT_EXPR)
11502 && !TYPE_SATURATING (type)
11503 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11504 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11505 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11507 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11508 if (tem)
11509 return tem;
11512 goto associate;
11514 case MULT_EXPR:
11515 if (! FLOAT_TYPE_P (type))
11517 /* Transform x * -C into -x * C if x is easily negatable. */
11518 if (TREE_CODE (op1) == INTEGER_CST
11519 && tree_int_cst_sgn (op1) == -1
11520 && negate_expr_p (op0)
11521 && negate_expr_p (op1)
11522 && (tem = negate_expr (op1)) != op1
11523 && ! TREE_OVERFLOW (tem))
11524 return fold_build2_loc (loc, MULT_EXPR, type,
11525 fold_convert_loc (loc, type,
11526 negate_expr (op0)), tem);
11528 strict_overflow_p = false;
11529 if (TREE_CODE (arg1) == INTEGER_CST
11530 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11531 &strict_overflow_p)) != 0)
11533 if (strict_overflow_p)
11534 fold_overflow_warning (("assuming signed overflow does not "
11535 "occur when simplifying "
11536 "multiplication"),
11537 WARN_STRICT_OVERFLOW_MISC);
11538 return fold_convert_loc (loc, type, tem);
11541 /* Optimize z * conj(z) for integer complex numbers. */
11542 if (TREE_CODE (arg0) == CONJ_EXPR
11543 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11544 return fold_mult_zconjz (loc, type, arg1);
11545 if (TREE_CODE (arg1) == CONJ_EXPR
11546 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11547 return fold_mult_zconjz (loc, type, arg0);
11549 else
11551 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11552 This is not the same for NaNs or if signed zeros are
11553 involved. */
11554 if (!HONOR_NANS (arg0)
11555 && !HONOR_SIGNED_ZEROS (arg0)
11556 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11557 && TREE_CODE (arg1) == COMPLEX_CST
11558 && real_zerop (TREE_REALPART (arg1)))
11560 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11561 if (real_onep (TREE_IMAGPART (arg1)))
11562 return
11563 fold_build2_loc (loc, COMPLEX_EXPR, type,
11564 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11565 rtype, arg0)),
11566 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11567 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11568 return
11569 fold_build2_loc (loc, COMPLEX_EXPR, type,
11570 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11571 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11572 rtype, arg0)));
11575 /* Optimize z * conj(z) for floating point complex numbers.
11576 Guarded by flag_unsafe_math_optimizations as non-finite
11577 imaginary components don't produce scalar results. */
11578 if (flag_unsafe_math_optimizations
11579 && TREE_CODE (arg0) == CONJ_EXPR
11580 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11581 return fold_mult_zconjz (loc, type, arg1);
11582 if (flag_unsafe_math_optimizations
11583 && TREE_CODE (arg1) == CONJ_EXPR
11584 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11585 return fold_mult_zconjz (loc, type, arg0);
11587 goto associate;
11589 case BIT_IOR_EXPR:
11590 /* Canonicalize (X & C1) | C2. */
11591 if (TREE_CODE (arg0) == BIT_AND_EXPR
11592 && TREE_CODE (arg1) == INTEGER_CST
11593 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11595 int width = TYPE_PRECISION (type), w;
11596 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
11597 wide_int c2 = wi::to_wide (arg1);
11599 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11600 if ((c1 & c2) == c1)
11601 return omit_one_operand_loc (loc, type, arg1,
11602 TREE_OPERAND (arg0, 0));
11604 wide_int msk = wi::mask (width, false,
11605 TYPE_PRECISION (TREE_TYPE (arg1)));
11607 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11608 if (wi::bit_and_not (msk, c1 | c2) == 0)
11610 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11611 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11614 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11615 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11616 mode which allows further optimizations. */
11617 c1 &= msk;
11618 c2 &= msk;
11619 wide_int c3 = wi::bit_and_not (c1, c2);
11620 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11622 wide_int mask = wi::mask (w, false,
11623 TYPE_PRECISION (type));
11624 if (((c1 | c2) & mask) == mask
11625 && wi::bit_and_not (c1, mask) == 0)
11627 c3 = mask;
11628 break;
11632 if (c3 != c1)
11634 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11635 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
11636 wide_int_to_tree (type, c3));
11637 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11641 /* See if this can be simplified into a rotate first. If that
11642 is unsuccessful continue in the association code. */
11643 goto bit_rotate;
11645 case BIT_XOR_EXPR:
11646 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11647 if (TREE_CODE (arg0) == BIT_AND_EXPR
11648 && INTEGRAL_TYPE_P (type)
11649 && integer_onep (TREE_OPERAND (arg0, 1))
11650 && integer_onep (arg1))
11651 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11652 build_zero_cst (TREE_TYPE (arg0)));
11654 /* See if this can be simplified into a rotate first. If that
11655 is unsuccessful continue in the association code. */
11656 goto bit_rotate;
11658 case BIT_AND_EXPR:
11659 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11660 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11661 && INTEGRAL_TYPE_P (type)
11662 && integer_onep (TREE_OPERAND (arg0, 1))
11663 && integer_onep (arg1))
11665 tree tem2;
11666 tem = TREE_OPERAND (arg0, 0);
11667 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11668 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11669 tem, tem2);
11670 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11671 build_zero_cst (TREE_TYPE (tem)));
11673 /* Fold ~X & 1 as (X & 1) == 0. */
11674 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11675 && INTEGRAL_TYPE_P (type)
11676 && integer_onep (arg1))
11678 tree tem2;
11679 tem = TREE_OPERAND (arg0, 0);
11680 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11681 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11682 tem, tem2);
11683 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11684 build_zero_cst (TREE_TYPE (tem)));
11686 /* Fold !X & 1 as X == 0. */
11687 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11688 && integer_onep (arg1))
11690 tem = TREE_OPERAND (arg0, 0);
11691 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11692 build_zero_cst (TREE_TYPE (tem)));
11695 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11696 multiple of 1 << CST. */
11697 if (TREE_CODE (arg1) == INTEGER_CST)
11699 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11700 wide_int ncst1 = -cst1;
11701 if ((cst1 & ncst1) == ncst1
11702 && multiple_of_p (type, arg0,
11703 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11704 return fold_convert_loc (loc, type, arg0);
11707 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11708 bits from CST2. */
11709 if (TREE_CODE (arg1) == INTEGER_CST
11710 && TREE_CODE (arg0) == MULT_EXPR
11711 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11713 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11714 wide_int masked
11715 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11717 if (masked == 0)
11718 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11719 arg0, arg1);
11720 else if (masked != warg1)
11722 /* Avoid the transform if arg1 is a mask of some
11723 mode which allows further optimizations. */
11724 int pop = wi::popcount (warg1);
11725 if (!(pop >= BITS_PER_UNIT
11726 && pow2p_hwi (pop)
11727 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11728 return fold_build2_loc (loc, code, type, op0,
11729 wide_int_to_tree (type, masked));
11733 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11734 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11735 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11737 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11739 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11740 if (mask == -1)
11741 return
11742 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11745 goto associate;
11747 case RDIV_EXPR:
11748 /* Don't touch a floating-point divide by zero unless the mode
11749 of the constant can represent infinity. */
11750 if (TREE_CODE (arg1) == REAL_CST
11751 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11752 && real_zerop (arg1))
11753 return NULL_TREE;
11755 /* (-A) / (-B) -> A / B */
11756 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11757 return fold_build2_loc (loc, RDIV_EXPR, type,
11758 TREE_OPERAND (arg0, 0),
11759 negate_expr (arg1));
11760 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11761 return fold_build2_loc (loc, RDIV_EXPR, type,
11762 negate_expr (arg0),
11763 TREE_OPERAND (arg1, 0));
11764 return NULL_TREE;
11766 case TRUNC_DIV_EXPR:
11767 /* Fall through */
11769 case FLOOR_DIV_EXPR:
11770 /* Simplify A / (B << N) where A and B are positive and B is
11771 a power of 2, to A >> (N + log2(B)). */
11772 strict_overflow_p = false;
11773 if (TREE_CODE (arg1) == LSHIFT_EXPR
11774 && (TYPE_UNSIGNED (type)
11775 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11777 tree sval = TREE_OPERAND (arg1, 0);
11778 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11780 tree sh_cnt = TREE_OPERAND (arg1, 1);
11781 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11782 wi::exact_log2 (wi::to_wide (sval)));
11784 if (strict_overflow_p)
11785 fold_overflow_warning (("assuming signed overflow does not "
11786 "occur when simplifying A / (B << N)"),
11787 WARN_STRICT_OVERFLOW_MISC);
11789 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11790 sh_cnt, pow2);
11791 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11792 fold_convert_loc (loc, type, arg0), sh_cnt);
11796 /* Fall through */
11798 case ROUND_DIV_EXPR:
11799 case CEIL_DIV_EXPR:
11800 case EXACT_DIV_EXPR:
11801 if (integer_zerop (arg1))
11802 return NULL_TREE;
11804 /* Convert -A / -B to A / B when the type is signed and overflow is
11805 undefined. */
11806 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11807 && TREE_CODE (op0) == NEGATE_EXPR
11808 && negate_expr_p (op1))
11810 if (ANY_INTEGRAL_TYPE_P (type))
11811 fold_overflow_warning (("assuming signed overflow does not occur "
11812 "when distributing negation across "
11813 "division"),
11814 WARN_STRICT_OVERFLOW_MISC);
11815 return fold_build2_loc (loc, code, type,
11816 fold_convert_loc (loc, type,
11817 TREE_OPERAND (arg0, 0)),
11818 negate_expr (op1));
11820 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11821 && TREE_CODE (arg1) == NEGATE_EXPR
11822 && negate_expr_p (op0))
11824 if (ANY_INTEGRAL_TYPE_P (type))
11825 fold_overflow_warning (("assuming signed overflow does not occur "
11826 "when distributing negation across "
11827 "division"),
11828 WARN_STRICT_OVERFLOW_MISC);
11829 return fold_build2_loc (loc, code, type,
11830 negate_expr (op0),
11831 fold_convert_loc (loc, type,
11832 TREE_OPERAND (arg1, 0)));
11835 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11836 operation, EXACT_DIV_EXPR.
11838 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11839 At one time others generated faster code, it's not clear if they do
11840 after the last round to changes to the DIV code in expmed.c. */
11841 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11842 && multiple_of_p (type, arg0, arg1))
11843 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
11844 fold_convert (type, arg0),
11845 fold_convert (type, arg1));
11847 strict_overflow_p = false;
11848 if (TREE_CODE (arg1) == INTEGER_CST
11849 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11850 &strict_overflow_p)) != 0)
11852 if (strict_overflow_p)
11853 fold_overflow_warning (("assuming signed overflow does not occur "
11854 "when simplifying division"),
11855 WARN_STRICT_OVERFLOW_MISC);
11856 return fold_convert_loc (loc, type, tem);
11859 return NULL_TREE;
11861 case CEIL_MOD_EXPR:
11862 case FLOOR_MOD_EXPR:
11863 case ROUND_MOD_EXPR:
11864 case TRUNC_MOD_EXPR:
11865 strict_overflow_p = false;
11866 if (TREE_CODE (arg1) == INTEGER_CST
11867 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11868 &strict_overflow_p)) != 0)
11870 if (strict_overflow_p)
11871 fold_overflow_warning (("assuming signed overflow does not occur "
11872 "when simplifying modulus"),
11873 WARN_STRICT_OVERFLOW_MISC);
11874 return fold_convert_loc (loc, type, tem);
11877 return NULL_TREE;
11879 case LROTATE_EXPR:
11880 case RROTATE_EXPR:
11881 case RSHIFT_EXPR:
11882 case LSHIFT_EXPR:
11883 /* Since negative shift count is not well-defined,
11884 don't try to compute it in the compiler. */
11885 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11886 return NULL_TREE;
11888 prec = element_precision (type);
11890 /* If we have a rotate of a bit operation with the rotate count and
11891 the second operand of the bit operation both constant,
11892 permute the two operations. */
11893 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11894 && (TREE_CODE (arg0) == BIT_AND_EXPR
11895 || TREE_CODE (arg0) == BIT_IOR_EXPR
11896 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11897 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11899 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11900 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11901 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11902 fold_build2_loc (loc, code, type,
11903 arg00, arg1),
11904 fold_build2_loc (loc, code, type,
11905 arg01, arg1));
11908 /* Two consecutive rotates adding up to the some integer
11909 multiple of the precision of the type can be ignored. */
11910 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11911 && TREE_CODE (arg0) == RROTATE_EXPR
11912 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11913 && wi::umod_trunc (wi::to_wide (arg1)
11914 + wi::to_wide (TREE_OPERAND (arg0, 1)),
11915 prec) == 0)
11916 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11918 return NULL_TREE;
11920 case MIN_EXPR:
11921 case MAX_EXPR:
11922 goto associate;
11924 case TRUTH_ANDIF_EXPR:
11925 /* Note that the operands of this must be ints
11926 and their values must be 0 or 1.
11927 ("true" is a fixed value perhaps depending on the language.) */
11928 /* If first arg is constant zero, return it. */
11929 if (integer_zerop (arg0))
11930 return fold_convert_loc (loc, type, arg0);
11931 /* FALLTHRU */
11932 case TRUTH_AND_EXPR:
11933 /* If either arg is constant true, drop it. */
11934 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11935 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11936 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11937 /* Preserve sequence points. */
11938 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11939 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11940 /* If second arg is constant zero, result is zero, but first arg
11941 must be evaluated. */
11942 if (integer_zerop (arg1))
11943 return omit_one_operand_loc (loc, type, arg1, arg0);
11944 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11945 case will be handled here. */
11946 if (integer_zerop (arg0))
11947 return omit_one_operand_loc (loc, type, arg0, arg1);
11949 /* !X && X is always false. */
11950 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11951 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11952 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11953 /* X && !X is always false. */
11954 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11955 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11956 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11958 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11959 means A >= Y && A != MAX, but in this case we know that
11960 A < X <= MAX. */
11962 if (!TREE_SIDE_EFFECTS (arg0)
11963 && !TREE_SIDE_EFFECTS (arg1))
11965 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11966 if (tem && !operand_equal_p (tem, arg0, 0))
11967 return fold_build2_loc (loc, code, type, tem, arg1);
11969 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11970 if (tem && !operand_equal_p (tem, arg1, 0))
11971 return fold_build2_loc (loc, code, type, arg0, tem);
11974 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11975 != NULL_TREE)
11976 return tem;
11978 return NULL_TREE;
11980 case TRUTH_ORIF_EXPR:
11981 /* Note that the operands of this must be ints
11982 and their values must be 0 or true.
11983 ("true" is a fixed value perhaps depending on the language.) */
11984 /* If first arg is constant true, return it. */
11985 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11986 return fold_convert_loc (loc, type, arg0);
11987 /* FALLTHRU */
11988 case TRUTH_OR_EXPR:
11989 /* If either arg is constant zero, drop it. */
11990 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11991 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11992 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11993 /* Preserve sequence points. */
11994 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11995 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11996 /* If second arg is constant true, result is true, but we must
11997 evaluate first arg. */
11998 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11999 return omit_one_operand_loc (loc, type, arg1, arg0);
12000 /* Likewise for first arg, but note this only occurs here for
12001 TRUTH_OR_EXPR. */
12002 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12003 return omit_one_operand_loc (loc, type, arg0, arg1);
12005 /* !X || X is always true. */
12006 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12007 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12008 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12009 /* X || !X is always true. */
12010 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12011 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12012 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12014 /* (X && !Y) || (!X && Y) is X ^ Y */
12015 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12016 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12018 tree a0, a1, l0, l1, n0, n1;
12020 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12021 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12023 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12024 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12026 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12027 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12029 if ((operand_equal_p (n0, a0, 0)
12030 && operand_equal_p (n1, a1, 0))
12031 || (operand_equal_p (n0, a1, 0)
12032 && operand_equal_p (n1, a0, 0)))
12033 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12036 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12037 != NULL_TREE)
12038 return tem;
12040 return NULL_TREE;
12042 case TRUTH_XOR_EXPR:
12043 /* If the second arg is constant zero, drop it. */
12044 if (integer_zerop (arg1))
12045 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12046 /* If the second arg is constant true, this is a logical inversion. */
12047 if (integer_onep (arg1))
12049 tem = invert_truthvalue_loc (loc, arg0);
12050 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12052 /* Identical arguments cancel to zero. */
12053 if (operand_equal_p (arg0, arg1, 0))
12054 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12056 /* !X ^ X is always true. */
12057 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12058 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12059 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12061 /* X ^ !X is always true. */
12062 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12063 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12064 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12066 return NULL_TREE;
12068 case EQ_EXPR:
12069 case NE_EXPR:
12070 STRIP_NOPS (arg0);
12071 STRIP_NOPS (arg1);
12073 tem = fold_comparison (loc, code, type, op0, op1);
12074 if (tem != NULL_TREE)
12075 return tem;
12077 /* bool_var != 1 becomes !bool_var. */
12078 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12079 && code == NE_EXPR)
12080 return fold_convert_loc (loc, type,
12081 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12082 TREE_TYPE (arg0), arg0));
12084 /* bool_var == 0 becomes !bool_var. */
12085 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12086 && code == EQ_EXPR)
12087 return fold_convert_loc (loc, type,
12088 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12089 TREE_TYPE (arg0), arg0));
12091 /* !exp != 0 becomes !exp */
12092 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12093 && code == NE_EXPR)
12094 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12096 /* If this is an EQ or NE comparison with zero and ARG0 is
12097 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12098 two operations, but the latter can be done in one less insn
12099 on machines that have only two-operand insns or on which a
12100 constant cannot be the first operand. */
12101 if (TREE_CODE (arg0) == BIT_AND_EXPR
12102 && integer_zerop (arg1))
12104 tree arg00 = TREE_OPERAND (arg0, 0);
12105 tree arg01 = TREE_OPERAND (arg0, 1);
12106 if (TREE_CODE (arg00) == LSHIFT_EXPR
12107 && integer_onep (TREE_OPERAND (arg00, 0)))
12109 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12110 arg01, TREE_OPERAND (arg00, 1));
12111 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12112 build_one_cst (TREE_TYPE (arg0)));
12113 return fold_build2_loc (loc, code, type,
12114 fold_convert_loc (loc, TREE_TYPE (arg1),
12115 tem), arg1);
12117 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12118 && integer_onep (TREE_OPERAND (arg01, 0)))
12120 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12121 arg00, TREE_OPERAND (arg01, 1));
12122 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12123 build_one_cst (TREE_TYPE (arg0)));
12124 return fold_build2_loc (loc, code, type,
12125 fold_convert_loc (loc, TREE_TYPE (arg1),
12126 tem), arg1);
12130 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12131 C1 is a valid shift constant, and C2 is a power of two, i.e.
12132 a single bit. */
12133 if (TREE_CODE (arg0) == BIT_AND_EXPR
12134 && integer_pow2p (TREE_OPERAND (arg0, 1))
12135 && integer_zerop (arg1))
12137 tree arg00 = TREE_OPERAND (arg0, 0);
12138 STRIP_NOPS (arg00);
12139 if (TREE_CODE (arg00) == RSHIFT_EXPR
12140 && TREE_CODE (TREE_OPERAND (arg00, 1)) == INTEGER_CST)
12142 tree itype = TREE_TYPE (arg00);
12143 tree arg001 = TREE_OPERAND (arg00, 1);
12144 prec = TYPE_PRECISION (itype);
12146 /* Check for a valid shift count. */
12147 if (wi::ltu_p (wi::to_wide (arg001), prec))
12149 tree arg01 = TREE_OPERAND (arg0, 1);
12150 tree arg000 = TREE_OPERAND (arg00, 0);
12151 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12152 /* If (C2 << C1) doesn't overflow, then
12153 ((X >> C1) & C2) != 0 can be rewritten as
12154 (X & (C2 << C1)) != 0. */
12155 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12157 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype,
12158 arg01, arg001);
12159 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype,
12160 arg000, tem);
12161 return fold_build2_loc (loc, code, type, tem,
12162 fold_convert_loc (loc, itype, arg1));
12164 /* Otherwise, for signed (arithmetic) shifts,
12165 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12166 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12167 else if (!TYPE_UNSIGNED (itype))
12168 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR
12169 : LT_EXPR,
12170 type, arg000,
12171 build_int_cst (itype, 0));
12172 /* Otherwise, of unsigned (logical) shifts,
12173 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12174 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12175 else
12176 return omit_one_operand_loc (loc, type,
12177 code == EQ_EXPR ? integer_one_node
12178 : integer_zero_node,
12179 arg000);
12184 /* If this is a comparison of a field, we may be able to simplify it. */
12185 if ((TREE_CODE (arg0) == COMPONENT_REF
12186 || TREE_CODE (arg0) == BIT_FIELD_REF)
12187 /* Handle the constant case even without -O
12188 to make sure the warnings are given. */
12189 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12191 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12192 if (t1)
12193 return t1;
12196 /* Optimize comparisons of strlen vs zero to a compare of the
12197 first character of the string vs zero. To wit,
12198 strlen(ptr) == 0 => *ptr == 0
12199 strlen(ptr) != 0 => *ptr != 0
12200 Other cases should reduce to one of these two (or a constant)
12201 due to the return value of strlen being unsigned. */
12202 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
12204 tree fndecl = get_callee_fndecl (arg0);
12206 if (fndecl
12207 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
12208 && call_expr_nargs (arg0) == 1
12209 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
12210 == POINTER_TYPE))
12212 tree ptrtype
12213 = build_pointer_type (build_qualified_type (char_type_node,
12214 TYPE_QUAL_CONST));
12215 tree ptr = fold_convert_loc (loc, ptrtype,
12216 CALL_EXPR_ARG (arg0, 0));
12217 tree iref = build_fold_indirect_ref_loc (loc, ptr);
12218 return fold_build2_loc (loc, code, type, iref,
12219 build_int_cst (TREE_TYPE (iref), 0));
12223 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12224 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12225 if (TREE_CODE (arg0) == RSHIFT_EXPR
12226 && integer_zerop (arg1)
12227 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12229 tree arg00 = TREE_OPERAND (arg0, 0);
12230 tree arg01 = TREE_OPERAND (arg0, 1);
12231 tree itype = TREE_TYPE (arg00);
12232 if (wi::to_wide (arg01) == element_precision (itype) - 1)
12234 if (TYPE_UNSIGNED (itype))
12236 itype = signed_type_for (itype);
12237 arg00 = fold_convert_loc (loc, itype, arg00);
12239 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12240 type, arg00, build_zero_cst (itype));
12244 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12245 (X & C) == 0 when C is a single bit. */
12246 if (TREE_CODE (arg0) == BIT_AND_EXPR
12247 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12248 && integer_zerop (arg1)
12249 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12251 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12252 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12253 TREE_OPERAND (arg0, 1));
12254 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12255 type, tem,
12256 fold_convert_loc (loc, TREE_TYPE (arg0),
12257 arg1));
12260 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12261 constant C is a power of two, i.e. a single bit. */
12262 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12263 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12264 && integer_zerop (arg1)
12265 && integer_pow2p (TREE_OPERAND (arg0, 1))
12266 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12267 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12269 tree arg00 = TREE_OPERAND (arg0, 0);
12270 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12271 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12274 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12275 when is C is a power of two, i.e. a single bit. */
12276 if (TREE_CODE (arg0) == BIT_AND_EXPR
12277 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12278 && integer_zerop (arg1)
12279 && integer_pow2p (TREE_OPERAND (arg0, 1))
12280 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12281 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12283 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12284 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12285 arg000, TREE_OPERAND (arg0, 1));
12286 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12287 tem, build_int_cst (TREE_TYPE (tem), 0));
12290 if (integer_zerop (arg1)
12291 && tree_expr_nonzero_p (arg0))
12293 tree res = constant_boolean_node (code==NE_EXPR, type);
12294 return omit_one_operand_loc (loc, type, res, arg0);
12297 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12298 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12300 tree arg00 = TREE_OPERAND (arg0, 0);
12301 tree arg01 = TREE_OPERAND (arg0, 1);
12302 tree arg10 = TREE_OPERAND (arg1, 0);
12303 tree arg11 = TREE_OPERAND (arg1, 1);
12304 tree itype = TREE_TYPE (arg0);
12306 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12307 operand_equal_p guarantees no side-effects so we don't need
12308 to use omit_one_operand on Z. */
12309 if (operand_equal_p (arg01, arg11, 0))
12310 return fold_build2_loc (loc, code, type, arg00,
12311 fold_convert_loc (loc, TREE_TYPE (arg00),
12312 arg10));
12313 if (operand_equal_p (arg01, arg10, 0))
12314 return fold_build2_loc (loc, code, type, arg00,
12315 fold_convert_loc (loc, TREE_TYPE (arg00),
12316 arg11));
12317 if (operand_equal_p (arg00, arg11, 0))
12318 return fold_build2_loc (loc, code, type, arg01,
12319 fold_convert_loc (loc, TREE_TYPE (arg01),
12320 arg10));
12321 if (operand_equal_p (arg00, arg10, 0))
12322 return fold_build2_loc (loc, code, type, arg01,
12323 fold_convert_loc (loc, TREE_TYPE (arg01),
12324 arg11));
12326 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12327 if (TREE_CODE (arg01) == INTEGER_CST
12328 && TREE_CODE (arg11) == INTEGER_CST)
12330 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12331 fold_convert_loc (loc, itype, arg11));
12332 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12333 return fold_build2_loc (loc, code, type, tem,
12334 fold_convert_loc (loc, itype, arg10));
12338 /* Attempt to simplify equality/inequality comparisons of complex
12339 values. Only lower the comparison if the result is known or
12340 can be simplified to a single scalar comparison. */
12341 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12342 || TREE_CODE (arg0) == COMPLEX_CST)
12343 && (TREE_CODE (arg1) == COMPLEX_EXPR
12344 || TREE_CODE (arg1) == COMPLEX_CST))
12346 tree real0, imag0, real1, imag1;
12347 tree rcond, icond;
12349 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12351 real0 = TREE_OPERAND (arg0, 0);
12352 imag0 = TREE_OPERAND (arg0, 1);
12354 else
12356 real0 = TREE_REALPART (arg0);
12357 imag0 = TREE_IMAGPART (arg0);
12360 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12362 real1 = TREE_OPERAND (arg1, 0);
12363 imag1 = TREE_OPERAND (arg1, 1);
12365 else
12367 real1 = TREE_REALPART (arg1);
12368 imag1 = TREE_IMAGPART (arg1);
12371 rcond = fold_binary_loc (loc, code, type, real0, real1);
12372 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12374 if (integer_zerop (rcond))
12376 if (code == EQ_EXPR)
12377 return omit_two_operands_loc (loc, type, boolean_false_node,
12378 imag0, imag1);
12379 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12381 else
12383 if (code == NE_EXPR)
12384 return omit_two_operands_loc (loc, type, boolean_true_node,
12385 imag0, imag1);
12386 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12390 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12391 if (icond && TREE_CODE (icond) == INTEGER_CST)
12393 if (integer_zerop (icond))
12395 if (code == EQ_EXPR)
12396 return omit_two_operands_loc (loc, type, boolean_false_node,
12397 real0, real1);
12398 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12400 else
12402 if (code == NE_EXPR)
12403 return omit_two_operands_loc (loc, type, boolean_true_node,
12404 real0, real1);
12405 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12410 return NULL_TREE;
12412 case LT_EXPR:
12413 case GT_EXPR:
12414 case LE_EXPR:
12415 case GE_EXPR:
12416 tem = fold_comparison (loc, code, type, op0, op1);
12417 if (tem != NULL_TREE)
12418 return tem;
12420 /* Transform comparisons of the form X +- C CMP X. */
12421 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12422 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12423 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12424 && !HONOR_SNANS (arg0))
12426 tree arg01 = TREE_OPERAND (arg0, 1);
12427 enum tree_code code0 = TREE_CODE (arg0);
12428 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12430 /* (X - c) > X becomes false. */
12431 if (code == GT_EXPR
12432 && ((code0 == MINUS_EXPR && is_positive >= 0)
12433 || (code0 == PLUS_EXPR && is_positive <= 0)))
12434 return constant_boolean_node (0, type);
12436 /* Likewise (X + c) < X becomes false. */
12437 if (code == LT_EXPR
12438 && ((code0 == PLUS_EXPR && is_positive >= 0)
12439 || (code0 == MINUS_EXPR && is_positive <= 0)))
12440 return constant_boolean_node (0, type);
12442 /* Convert (X - c) <= X to true. */
12443 if (!HONOR_NANS (arg1)
12444 && code == LE_EXPR
12445 && ((code0 == MINUS_EXPR && is_positive >= 0)
12446 || (code0 == PLUS_EXPR && is_positive <= 0)))
12447 return constant_boolean_node (1, type);
12449 /* Convert (X + c) >= X to true. */
12450 if (!HONOR_NANS (arg1)
12451 && code == GE_EXPR
12452 && ((code0 == PLUS_EXPR && is_positive >= 0)
12453 || (code0 == MINUS_EXPR && is_positive <= 0)))
12454 return constant_boolean_node (1, type);
12457 /* If we are comparing an ABS_EXPR with a constant, we can
12458 convert all the cases into explicit comparisons, but they may
12459 well not be faster than doing the ABS and one comparison.
12460 But ABS (X) <= C is a range comparison, which becomes a subtraction
12461 and a comparison, and is probably faster. */
12462 if (code == LE_EXPR
12463 && TREE_CODE (arg1) == INTEGER_CST
12464 && TREE_CODE (arg0) == ABS_EXPR
12465 && ! TREE_SIDE_EFFECTS (arg0)
12466 && (tem = negate_expr (arg1)) != 0
12467 && TREE_CODE (tem) == INTEGER_CST
12468 && !TREE_OVERFLOW (tem))
12469 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12470 build2 (GE_EXPR, type,
12471 TREE_OPERAND (arg0, 0), tem),
12472 build2 (LE_EXPR, type,
12473 TREE_OPERAND (arg0, 0), arg1));
12475 /* Convert ABS_EXPR<x> >= 0 to true. */
12476 strict_overflow_p = false;
12477 if (code == GE_EXPR
12478 && (integer_zerop (arg1)
12479 || (! HONOR_NANS (arg0)
12480 && real_zerop (arg1)))
12481 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12483 if (strict_overflow_p)
12484 fold_overflow_warning (("assuming signed overflow does not occur "
12485 "when simplifying comparison of "
12486 "absolute value and zero"),
12487 WARN_STRICT_OVERFLOW_CONDITIONAL);
12488 return omit_one_operand_loc (loc, type,
12489 constant_boolean_node (true, type),
12490 arg0);
12493 /* Convert ABS_EXPR<x> < 0 to false. */
12494 strict_overflow_p = false;
12495 if (code == LT_EXPR
12496 && (integer_zerop (arg1) || real_zerop (arg1))
12497 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12499 if (strict_overflow_p)
12500 fold_overflow_warning (("assuming signed overflow does not occur "
12501 "when simplifying comparison of "
12502 "absolute value and zero"),
12503 WARN_STRICT_OVERFLOW_CONDITIONAL);
12504 return omit_one_operand_loc (loc, type,
12505 constant_boolean_node (false, type),
12506 arg0);
12509 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12510 and similarly for >= into !=. */
12511 if ((code == LT_EXPR || code == GE_EXPR)
12512 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12513 && TREE_CODE (arg1) == LSHIFT_EXPR
12514 && integer_onep (TREE_OPERAND (arg1, 0)))
12515 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12516 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12517 TREE_OPERAND (arg1, 1)),
12518 build_zero_cst (TREE_TYPE (arg0)));
12520 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12521 otherwise Y might be >= # of bits in X's type and thus e.g.
12522 (unsigned char) (1 << Y) for Y 15 might be 0.
12523 If the cast is widening, then 1 << Y should have unsigned type,
12524 otherwise if Y is number of bits in the signed shift type minus 1,
12525 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12526 31 might be 0xffffffff80000000. */
12527 if ((code == LT_EXPR || code == GE_EXPR)
12528 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12529 || VECTOR_INTEGER_TYPE_P (TREE_TYPE (arg0)))
12530 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12531 && CONVERT_EXPR_P (arg1)
12532 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12533 && (element_precision (TREE_TYPE (arg1))
12534 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12535 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12536 || (element_precision (TREE_TYPE (arg1))
12537 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12538 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12540 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12541 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12542 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12543 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12544 build_zero_cst (TREE_TYPE (arg0)));
12547 return NULL_TREE;
12549 case UNORDERED_EXPR:
12550 case ORDERED_EXPR:
12551 case UNLT_EXPR:
12552 case UNLE_EXPR:
12553 case UNGT_EXPR:
12554 case UNGE_EXPR:
12555 case UNEQ_EXPR:
12556 case LTGT_EXPR:
12557 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12559 tree targ0 = strip_float_extensions (arg0);
12560 tree targ1 = strip_float_extensions (arg1);
12561 tree newtype = TREE_TYPE (targ0);
12563 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12564 newtype = TREE_TYPE (targ1);
12566 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12567 return fold_build2_loc (loc, code, type,
12568 fold_convert_loc (loc, newtype, targ0),
12569 fold_convert_loc (loc, newtype, targ1));
12572 return NULL_TREE;
12574 case COMPOUND_EXPR:
12575 /* When pedantic, a compound expression can be neither an lvalue
12576 nor an integer constant expression. */
12577 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12578 return NULL_TREE;
12579 /* Don't let (0, 0) be null pointer constant. */
12580 tem = integer_zerop (arg1) ? build1_loc (loc, NOP_EXPR, type, arg1)
12581 : fold_convert_loc (loc, type, arg1);
12582 return tem;
12584 case ASSERT_EXPR:
12585 /* An ASSERT_EXPR should never be passed to fold_binary. */
12586 gcc_unreachable ();
12588 default:
12589 return NULL_TREE;
12590 } /* switch (code) */
12593 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12594 ((A & N) + B) & M -> (A + B) & M
12595 Similarly if (N & M) == 0,
12596 ((A | N) + B) & M -> (A + B) & M
12597 and for - instead of + (or unary - instead of +)
12598 and/or ^ instead of |.
12599 If B is constant and (B & M) == 0, fold into A & M.
12601 This function is a helper for match.pd patterns. Return non-NULL
12602 type in which the simplified operation should be performed only
12603 if any optimization is possible.
12605 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12606 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12607 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12608 +/-. */
12609 tree
12610 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12611 tree arg00, enum tree_code code00, tree arg000, tree arg001,
12612 tree arg01, enum tree_code code01, tree arg010, tree arg011,
12613 tree *pmop)
12615 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12616 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12617 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12618 if (~cst1 == 0
12619 || (cst1 & (cst1 + 1)) != 0
12620 || !INTEGRAL_TYPE_P (type)
12621 || (!TYPE_OVERFLOW_WRAPS (type)
12622 && TREE_CODE (type) != INTEGER_TYPE)
12623 || (wi::max_value (type) & cst1) != cst1)
12624 return NULL_TREE;
12626 enum tree_code codes[2] = { code00, code01 };
12627 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12628 int which = 0;
12629 wide_int cst0;
12631 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12632 arg1 (M) is == (1LL << cst) - 1.
12633 Store C into PMOP[0] and D into PMOP[1]. */
12634 pmop[0] = arg00;
12635 pmop[1] = arg01;
12636 which = code != NEGATE_EXPR;
12638 for (; which >= 0; which--)
12639 switch (codes[which])
12641 case BIT_AND_EXPR:
12642 case BIT_IOR_EXPR:
12643 case BIT_XOR_EXPR:
12644 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12645 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12646 if (codes[which] == BIT_AND_EXPR)
12648 if (cst0 != cst1)
12649 break;
12651 else if (cst0 != 0)
12652 break;
12653 /* If C or D is of the form (A & N) where
12654 (N & M) == M, or of the form (A | N) or
12655 (A ^ N) where (N & M) == 0, replace it with A. */
12656 pmop[which] = arg0xx[2 * which];
12657 break;
12658 case ERROR_MARK:
12659 if (TREE_CODE (pmop[which]) != INTEGER_CST)
12660 break;
12661 /* If C or D is a N where (N & M) == 0, it can be
12662 omitted (replaced with 0). */
12663 if ((code == PLUS_EXPR
12664 || (code == MINUS_EXPR && which == 0))
12665 && (cst1 & wi::to_wide (pmop[which])) == 0)
12666 pmop[which] = build_int_cst (type, 0);
12667 /* Similarly, with C - N where (-N & M) == 0. */
12668 if (code == MINUS_EXPR
12669 && which == 1
12670 && (cst1 & -wi::to_wide (pmop[which])) == 0)
12671 pmop[which] = build_int_cst (type, 0);
12672 break;
12673 default:
12674 gcc_unreachable ();
12677 /* Only build anything new if we optimized one or both arguments above. */
12678 if (pmop[0] == arg00 && pmop[1] == arg01)
12679 return NULL_TREE;
12681 if (TYPE_OVERFLOW_WRAPS (type))
12682 return type;
12683 else
12684 return unsigned_type_for (type);
12687 /* Used by contains_label_[p1]. */
12689 struct contains_label_data
12691 hash_set<tree> *pset;
12692 bool inside_switch_p;
12695 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12696 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12697 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12699 static tree
12700 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12702 contains_label_data *d = (contains_label_data *) data;
12703 switch (TREE_CODE (*tp))
12705 case LABEL_EXPR:
12706 return *tp;
12708 case CASE_LABEL_EXPR:
12709 if (!d->inside_switch_p)
12710 return *tp;
12711 return NULL_TREE;
12713 case SWITCH_EXPR:
12714 if (!d->inside_switch_p)
12716 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12717 return *tp;
12718 d->inside_switch_p = true;
12719 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12720 return *tp;
12721 d->inside_switch_p = false;
12722 *walk_subtrees = 0;
12724 return NULL_TREE;
12726 case GOTO_EXPR:
12727 *walk_subtrees = 0;
12728 return NULL_TREE;
12730 default:
12731 return NULL_TREE;
12735 /* Return whether the sub-tree ST contains a label which is accessible from
12736 outside the sub-tree. */
12738 static bool
12739 contains_label_p (tree st)
12741 hash_set<tree> pset;
12742 contains_label_data data = { &pset, false };
12743 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12746 /* Fold a ternary expression of code CODE and type TYPE with operands
12747 OP0, OP1, and OP2. Return the folded expression if folding is
12748 successful. Otherwise, return NULL_TREE. */
12750 tree
12751 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12752 tree op0, tree op1, tree op2)
12754 tree tem;
12755 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12756 enum tree_code_class kind = TREE_CODE_CLASS (code);
12758 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12759 && TREE_CODE_LENGTH (code) == 3);
12761 /* If this is a commutative operation, and OP0 is a constant, move it
12762 to OP1 to reduce the number of tests below. */
12763 if (commutative_ternary_tree_code (code)
12764 && tree_swap_operands_p (op0, op1))
12765 return fold_build3_loc (loc, code, type, op1, op0, op2);
12767 tem = generic_simplify (loc, code, type, op0, op1, op2);
12768 if (tem)
12769 return tem;
12771 /* Strip any conversions that don't change the mode. This is safe
12772 for every expression, except for a comparison expression because
12773 its signedness is derived from its operands. So, in the latter
12774 case, only strip conversions that don't change the signedness.
12776 Note that this is done as an internal manipulation within the
12777 constant folder, in order to find the simplest representation of
12778 the arguments so that their form can be studied. In any cases,
12779 the appropriate type conversions should be put back in the tree
12780 that will get out of the constant folder. */
12781 if (op0)
12783 arg0 = op0;
12784 STRIP_NOPS (arg0);
12787 if (op1)
12789 arg1 = op1;
12790 STRIP_NOPS (arg1);
12793 if (op2)
12795 arg2 = op2;
12796 STRIP_NOPS (arg2);
12799 switch (code)
12801 case COMPONENT_REF:
12802 if (TREE_CODE (arg0) == CONSTRUCTOR
12803 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12805 unsigned HOST_WIDE_INT idx;
12806 tree field, value;
12807 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12808 if (field == arg1)
12809 return value;
12811 return NULL_TREE;
12813 case COND_EXPR:
12814 case VEC_COND_EXPR:
12815 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12816 so all simple results must be passed through pedantic_non_lvalue. */
12817 if (TREE_CODE (arg0) == INTEGER_CST)
12819 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12820 tem = integer_zerop (arg0) ? op2 : op1;
12821 /* Only optimize constant conditions when the selected branch
12822 has the same type as the COND_EXPR. This avoids optimizing
12823 away "c ? x : throw", where the throw has a void type.
12824 Avoid throwing away that operand which contains label. */
12825 if ((!TREE_SIDE_EFFECTS (unused_op)
12826 || !contains_label_p (unused_op))
12827 && (! VOID_TYPE_P (TREE_TYPE (tem))
12828 || VOID_TYPE_P (type)))
12829 return protected_set_expr_location_unshare (tem, loc);
12830 return NULL_TREE;
12832 else if (TREE_CODE (arg0) == VECTOR_CST)
12834 unsigned HOST_WIDE_INT nelts;
12835 if ((TREE_CODE (arg1) == VECTOR_CST
12836 || TREE_CODE (arg1) == CONSTRUCTOR)
12837 && (TREE_CODE (arg2) == VECTOR_CST
12838 || TREE_CODE (arg2) == CONSTRUCTOR)
12839 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12841 vec_perm_builder sel (nelts, nelts, 1);
12842 for (unsigned int i = 0; i < nelts; i++)
12844 tree val = VECTOR_CST_ELT (arg0, i);
12845 if (integer_all_onesp (val))
12846 sel.quick_push (i);
12847 else if (integer_zerop (val))
12848 sel.quick_push (nelts + i);
12849 else /* Currently unreachable. */
12850 return NULL_TREE;
12852 vec_perm_indices indices (sel, 2, nelts);
12853 tree t = fold_vec_perm (type, arg1, arg2, indices);
12854 if (t != NULL_TREE)
12855 return t;
12859 /* If we have A op B ? A : C, we may be able to convert this to a
12860 simpler expression, depending on the operation and the values
12861 of B and C. Signed zeros prevent all of these transformations,
12862 for reasons given above each one.
12864 Also try swapping the arguments and inverting the conditional. */
12865 if (COMPARISON_CLASS_P (arg0)
12866 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
12867 && !HONOR_SIGNED_ZEROS (op1))
12869 tem = fold_cond_expr_with_comparison (loc, type, TREE_CODE (arg0),
12870 TREE_OPERAND (arg0, 0),
12871 TREE_OPERAND (arg0, 1),
12872 op1, op2);
12873 if (tem)
12874 return tem;
12877 if (COMPARISON_CLASS_P (arg0)
12878 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
12879 && !HONOR_SIGNED_ZEROS (op2))
12881 enum tree_code comp_code = TREE_CODE (arg0);
12882 tree arg00 = TREE_OPERAND (arg0, 0);
12883 tree arg01 = TREE_OPERAND (arg0, 1);
12884 comp_code = invert_tree_comparison (comp_code, HONOR_NANS (arg00));
12885 if (comp_code != ERROR_MARK)
12886 tem = fold_cond_expr_with_comparison (loc, type, comp_code,
12887 arg00,
12888 arg01,
12889 op2, op1);
12890 if (tem)
12891 return tem;
12894 /* If the second operand is simpler than the third, swap them
12895 since that produces better jump optimization results. */
12896 if (truth_value_p (TREE_CODE (arg0))
12897 && tree_swap_operands_p (op1, op2))
12899 location_t loc0 = expr_location_or (arg0, loc);
12900 /* See if this can be inverted. If it can't, possibly because
12901 it was a floating-point inequality comparison, don't do
12902 anything. */
12903 tem = fold_invert_truthvalue (loc0, arg0);
12904 if (tem)
12905 return fold_build3_loc (loc, code, type, tem, op2, op1);
12908 /* Convert A ? 1 : 0 to simply A. */
12909 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12910 : (integer_onep (op1)
12911 && !VECTOR_TYPE_P (type)))
12912 && integer_zerop (op2)
12913 /* If we try to convert OP0 to our type, the
12914 call to fold will try to move the conversion inside
12915 a COND, which will recurse. In that case, the COND_EXPR
12916 is probably the best choice, so leave it alone. */
12917 && type == TREE_TYPE (arg0))
12918 return protected_set_expr_location_unshare (arg0, loc);
12920 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12921 over COND_EXPR in cases such as floating point comparisons. */
12922 if (integer_zerop (op1)
12923 && code == COND_EXPR
12924 && integer_onep (op2)
12925 && !VECTOR_TYPE_P (type)
12926 && truth_value_p (TREE_CODE (arg0)))
12927 return fold_convert_loc (loc, type,
12928 invert_truthvalue_loc (loc, arg0));
12930 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12931 if (TREE_CODE (arg0) == LT_EXPR
12932 && integer_zerop (TREE_OPERAND (arg0, 1))
12933 && integer_zerop (op2)
12934 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12936 /* sign_bit_p looks through both zero and sign extensions,
12937 but for this optimization only sign extensions are
12938 usable. */
12939 tree tem2 = TREE_OPERAND (arg0, 0);
12940 while (tem != tem2)
12942 if (TREE_CODE (tem2) != NOP_EXPR
12943 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12945 tem = NULL_TREE;
12946 break;
12948 tem2 = TREE_OPERAND (tem2, 0);
12950 /* sign_bit_p only checks ARG1 bits within A's precision.
12951 If <sign bit of A> has wider type than A, bits outside
12952 of A's precision in <sign bit of A> need to be checked.
12953 If they are all 0, this optimization needs to be done
12954 in unsigned A's type, if they are all 1 in signed A's type,
12955 otherwise this can't be done. */
12956 if (tem
12957 && TYPE_PRECISION (TREE_TYPE (tem))
12958 < TYPE_PRECISION (TREE_TYPE (arg1))
12959 && TYPE_PRECISION (TREE_TYPE (tem))
12960 < TYPE_PRECISION (type))
12962 int inner_width, outer_width;
12963 tree tem_type;
12965 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12966 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12967 if (outer_width > TYPE_PRECISION (type))
12968 outer_width = TYPE_PRECISION (type);
12970 wide_int mask = wi::shifted_mask
12971 (inner_width, outer_width - inner_width, false,
12972 TYPE_PRECISION (TREE_TYPE (arg1)));
12974 wide_int common = mask & wi::to_wide (arg1);
12975 if (common == mask)
12977 tem_type = signed_type_for (TREE_TYPE (tem));
12978 tem = fold_convert_loc (loc, tem_type, tem);
12980 else if (common == 0)
12982 tem_type = unsigned_type_for (TREE_TYPE (tem));
12983 tem = fold_convert_loc (loc, tem_type, tem);
12985 else
12986 tem = NULL;
12989 if (tem)
12990 return
12991 fold_convert_loc (loc, type,
12992 fold_build2_loc (loc, BIT_AND_EXPR,
12993 TREE_TYPE (tem), tem,
12994 fold_convert_loc (loc,
12995 TREE_TYPE (tem),
12996 arg1)));
12999 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13000 already handled above. */
13001 if (TREE_CODE (arg0) == BIT_AND_EXPR
13002 && integer_onep (TREE_OPERAND (arg0, 1))
13003 && integer_zerop (op2)
13004 && integer_pow2p (arg1))
13006 tree tem = TREE_OPERAND (arg0, 0);
13007 STRIP_NOPS (tem);
13008 if (TREE_CODE (tem) == RSHIFT_EXPR
13009 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13010 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
13011 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
13012 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13013 fold_convert_loc (loc, type,
13014 TREE_OPERAND (tem, 0)),
13015 op1);
13018 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13019 is probably obsolete because the first operand should be a
13020 truth value (that's why we have the two cases above), but let's
13021 leave it in until we can confirm this for all front-ends. */
13022 if (integer_zerop (op2)
13023 && TREE_CODE (arg0) == NE_EXPR
13024 && integer_zerop (TREE_OPERAND (arg0, 1))
13025 && integer_pow2p (arg1)
13026 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13027 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13028 arg1, OEP_ONLY_CONST)
13029 /* operand_equal_p compares just value, not precision, so e.g.
13030 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
13031 second operand 32-bit -128, which is not a power of two (or vice
13032 versa. */
13033 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
13034 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
13036 /* Disable the transformations below for vectors, since
13037 fold_binary_op_with_conditional_arg may undo them immediately,
13038 yielding an infinite loop. */
13039 if (code == VEC_COND_EXPR)
13040 return NULL_TREE;
13042 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13043 if (integer_zerop (op2)
13044 && truth_value_p (TREE_CODE (arg0))
13045 && truth_value_p (TREE_CODE (arg1))
13046 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13047 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13048 : TRUTH_ANDIF_EXPR,
13049 type, fold_convert_loc (loc, type, arg0), op1);
13051 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13052 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13053 && truth_value_p (TREE_CODE (arg0))
13054 && truth_value_p (TREE_CODE (arg1))
13055 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13057 location_t loc0 = expr_location_or (arg0, loc);
13058 /* Only perform transformation if ARG0 is easily inverted. */
13059 tem = fold_invert_truthvalue (loc0, arg0);
13060 if (tem)
13061 return fold_build2_loc (loc, code == VEC_COND_EXPR
13062 ? BIT_IOR_EXPR
13063 : TRUTH_ORIF_EXPR,
13064 type, fold_convert_loc (loc, type, tem),
13065 op1);
13068 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13069 if (integer_zerop (arg1)
13070 && truth_value_p (TREE_CODE (arg0))
13071 && truth_value_p (TREE_CODE (op2))
13072 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13074 location_t loc0 = expr_location_or (arg0, loc);
13075 /* Only perform transformation if ARG0 is easily inverted. */
13076 tem = fold_invert_truthvalue (loc0, arg0);
13077 if (tem)
13078 return fold_build2_loc (loc, code == VEC_COND_EXPR
13079 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13080 type, fold_convert_loc (loc, type, tem),
13081 op2);
13084 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13085 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13086 && truth_value_p (TREE_CODE (arg0))
13087 && truth_value_p (TREE_CODE (op2))
13088 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13089 return fold_build2_loc (loc, code == VEC_COND_EXPR
13090 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13091 type, fold_convert_loc (loc, type, arg0), op2);
13093 return NULL_TREE;
13095 case CALL_EXPR:
13096 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13097 of fold_ternary on them. */
13098 gcc_unreachable ();
13100 case BIT_FIELD_REF:
13101 if (TREE_CODE (arg0) == VECTOR_CST
13102 && (type == TREE_TYPE (TREE_TYPE (arg0))
13103 || (VECTOR_TYPE_P (type)
13104 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
13105 && tree_fits_uhwi_p (op1)
13106 && tree_fits_uhwi_p (op2))
13108 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13109 unsigned HOST_WIDE_INT width
13110 = (TREE_CODE (eltype) == BOOLEAN_TYPE
13111 ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
13112 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13113 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13115 if (n != 0
13116 && (idx % width) == 0
13117 && (n % width) == 0
13118 && known_le ((idx + n) / width,
13119 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
13121 idx = idx / width;
13122 n = n / width;
13124 if (TREE_CODE (arg0) == VECTOR_CST)
13126 if (n == 1)
13128 tem = VECTOR_CST_ELT (arg0, idx);
13129 if (VECTOR_TYPE_P (type))
13130 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
13131 return tem;
13134 tree_vector_builder vals (type, n, 1);
13135 for (unsigned i = 0; i < n; ++i)
13136 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
13137 return vals.build ();
13142 /* On constants we can use native encode/interpret to constant
13143 fold (nearly) all BIT_FIELD_REFs. */
13144 if (CONSTANT_CLASS_P (arg0)
13145 && can_native_interpret_type_p (type)
13146 && BITS_PER_UNIT == 8
13147 && tree_fits_uhwi_p (op1)
13148 && tree_fits_uhwi_p (op2))
13150 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13151 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13152 /* Limit us to a reasonable amount of work. To relax the
13153 other limitations we need bit-shifting of the buffer
13154 and rounding up the size. */
13155 if (bitpos % BITS_PER_UNIT == 0
13156 && bitsize % BITS_PER_UNIT == 0
13157 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
13159 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
13160 unsigned HOST_WIDE_INT len
13161 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
13162 bitpos / BITS_PER_UNIT);
13163 if (len > 0
13164 && len * BITS_PER_UNIT >= bitsize)
13166 tree v = native_interpret_expr (type, b,
13167 bitsize / BITS_PER_UNIT);
13168 if (v)
13169 return v;
13174 return NULL_TREE;
13176 case VEC_PERM_EXPR:
13177 /* Perform constant folding of BIT_INSERT_EXPR. */
13178 if (TREE_CODE (arg2) == VECTOR_CST
13179 && TREE_CODE (op0) == VECTOR_CST
13180 && TREE_CODE (op1) == VECTOR_CST)
13182 /* Build a vector of integers from the tree mask. */
13183 vec_perm_builder builder;
13184 if (!tree_to_vec_perm_builder (&builder, arg2))
13185 return NULL_TREE;
13187 /* Create a vec_perm_indices for the integer vector. */
13188 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
13189 bool single_arg = (op0 == op1);
13190 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
13191 return fold_vec_perm (type, op0, op1, sel);
13193 return NULL_TREE;
13195 case BIT_INSERT_EXPR:
13196 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13197 if (TREE_CODE (arg0) == INTEGER_CST
13198 && TREE_CODE (arg1) == INTEGER_CST)
13200 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13201 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
13202 wide_int tem = (wi::to_wide (arg0)
13203 & wi::shifted_mask (bitpos, bitsize, true,
13204 TYPE_PRECISION (type)));
13205 wide_int tem2
13206 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
13207 bitsize), bitpos);
13208 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
13210 else if (TREE_CODE (arg0) == VECTOR_CST
13211 && CONSTANT_CLASS_P (arg1)
13212 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
13213 TREE_TYPE (arg1)))
13215 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13216 unsigned HOST_WIDE_INT elsize
13217 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
13218 if (bitpos % elsize == 0)
13220 unsigned k = bitpos / elsize;
13221 unsigned HOST_WIDE_INT nelts;
13222 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
13223 return arg0;
13224 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
13226 tree_vector_builder elts (type, nelts, 1);
13227 elts.quick_grow (nelts);
13228 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
13229 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
13230 return elts.build ();
13234 return NULL_TREE;
13236 default:
13237 return NULL_TREE;
13238 } /* switch (code) */
13241 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13242 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13243 constructor element index of the value returned. If the element is
13244 not found NULL_TREE is returned and *CTOR_IDX is updated to
13245 the index of the element after the ACCESS_INDEX position (which
13246 may be outside of the CTOR array). */
13248 tree
13249 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
13250 unsigned *ctor_idx)
13252 tree index_type = NULL_TREE;
13253 signop index_sgn = UNSIGNED;
13254 offset_int low_bound = 0;
13256 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
13258 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
13259 if (domain_type && TYPE_MIN_VALUE (domain_type))
13261 /* Static constructors for variably sized objects makes no sense. */
13262 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
13263 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
13264 /* ??? When it is obvious that the range is signed, treat it so. */
13265 if (TYPE_UNSIGNED (index_type)
13266 && TYPE_MAX_VALUE (domain_type)
13267 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
13268 TYPE_MIN_VALUE (domain_type)))
13270 index_sgn = SIGNED;
13271 low_bound
13272 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
13273 SIGNED);
13275 else
13277 index_sgn = TYPE_SIGN (index_type);
13278 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
13283 if (index_type)
13284 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
13285 index_sgn);
13287 offset_int index = low_bound;
13288 if (index_type)
13289 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13291 offset_int max_index = index;
13292 unsigned cnt;
13293 tree cfield, cval;
13294 bool first_p = true;
13296 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
13298 /* Array constructor might explicitly set index, or specify a range,
13299 or leave index NULL meaning that it is next index after previous
13300 one. */
13301 if (cfield)
13303 if (TREE_CODE (cfield) == INTEGER_CST)
13304 max_index = index
13305 = offset_int::from (wi::to_wide (cfield), index_sgn);
13306 else
13308 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
13309 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
13310 index_sgn);
13311 max_index
13312 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
13313 index_sgn);
13314 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
13317 else if (!first_p)
13319 index = max_index + 1;
13320 if (index_type)
13321 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13322 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
13323 max_index = index;
13325 else
13326 first_p = false;
13328 /* Do we have match? */
13329 if (wi::cmp (access_index, index, index_sgn) >= 0)
13331 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
13333 if (ctor_idx)
13334 *ctor_idx = cnt;
13335 return cval;
13338 else if (in_gimple_form)
13339 /* We're past the element we search for. Note during parsing
13340 the elements might not be sorted.
13341 ??? We should use a binary search and a flag on the
13342 CONSTRUCTOR as to whether elements are sorted in declaration
13343 order. */
13344 break;
13346 if (ctor_idx)
13347 *ctor_idx = cnt;
13348 return NULL_TREE;
13351 /* Perform constant folding and related simplification of EXPR.
13352 The related simplifications include x*1 => x, x*0 => 0, etc.,
13353 and application of the associative law.
13354 NOP_EXPR conversions may be removed freely (as long as we
13355 are careful not to change the type of the overall expression).
13356 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13357 but we can constant-fold them if they have constant operands. */
13359 #ifdef ENABLE_FOLD_CHECKING
13360 # define fold(x) fold_1 (x)
13361 static tree fold_1 (tree);
13362 static
13363 #endif
13364 tree
13365 fold (tree expr)
13367 const tree t = expr;
13368 enum tree_code code = TREE_CODE (t);
13369 enum tree_code_class kind = TREE_CODE_CLASS (code);
13370 tree tem;
13371 location_t loc = EXPR_LOCATION (expr);
13373 /* Return right away if a constant. */
13374 if (kind == tcc_constant)
13375 return t;
13377 /* CALL_EXPR-like objects with variable numbers of operands are
13378 treated specially. */
13379 if (kind == tcc_vl_exp)
13381 if (code == CALL_EXPR)
13383 tem = fold_call_expr (loc, expr, false);
13384 return tem ? tem : expr;
13386 return expr;
13389 if (IS_EXPR_CODE_CLASS (kind))
13391 tree type = TREE_TYPE (t);
13392 tree op0, op1, op2;
13394 switch (TREE_CODE_LENGTH (code))
13396 case 1:
13397 op0 = TREE_OPERAND (t, 0);
13398 tem = fold_unary_loc (loc, code, type, op0);
13399 return tem ? tem : expr;
13400 case 2:
13401 op0 = TREE_OPERAND (t, 0);
13402 op1 = TREE_OPERAND (t, 1);
13403 tem = fold_binary_loc (loc, code, type, op0, op1);
13404 return tem ? tem : expr;
13405 case 3:
13406 op0 = TREE_OPERAND (t, 0);
13407 op1 = TREE_OPERAND (t, 1);
13408 op2 = TREE_OPERAND (t, 2);
13409 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13410 return tem ? tem : expr;
13411 default:
13412 break;
13416 switch (code)
13418 case ARRAY_REF:
13420 tree op0 = TREE_OPERAND (t, 0);
13421 tree op1 = TREE_OPERAND (t, 1);
13423 if (TREE_CODE (op1) == INTEGER_CST
13424 && TREE_CODE (op0) == CONSTRUCTOR
13425 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13427 tree val = get_array_ctor_element_at_index (op0,
13428 wi::to_offset (op1));
13429 if (val)
13430 return val;
13433 return t;
13436 /* Return a VECTOR_CST if possible. */
13437 case CONSTRUCTOR:
13439 tree type = TREE_TYPE (t);
13440 if (TREE_CODE (type) != VECTOR_TYPE)
13441 return t;
13443 unsigned i;
13444 tree val;
13445 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
13446 if (! CONSTANT_CLASS_P (val))
13447 return t;
13449 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
13452 case CONST_DECL:
13453 return fold (DECL_INITIAL (t));
13455 default:
13456 return t;
13457 } /* switch (code) */
13460 #ifdef ENABLE_FOLD_CHECKING
13461 #undef fold
13463 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13464 hash_table<nofree_ptr_hash<const tree_node> > *);
13465 static void fold_check_failed (const_tree, const_tree);
13466 void print_fold_checksum (const_tree);
13468 /* When --enable-checking=fold, compute a digest of expr before
13469 and after actual fold call to see if fold did not accidentally
13470 change original expr. */
13472 tree
13473 fold (tree expr)
13475 tree ret;
13476 struct md5_ctx ctx;
13477 unsigned char checksum_before[16], checksum_after[16];
13478 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13480 md5_init_ctx (&ctx);
13481 fold_checksum_tree (expr, &ctx, &ht);
13482 md5_finish_ctx (&ctx, checksum_before);
13483 ht.empty ();
13485 ret = fold_1 (expr);
13487 md5_init_ctx (&ctx);
13488 fold_checksum_tree (expr, &ctx, &ht);
13489 md5_finish_ctx (&ctx, checksum_after);
13491 if (memcmp (checksum_before, checksum_after, 16))
13492 fold_check_failed (expr, ret);
13494 return ret;
13497 void
13498 print_fold_checksum (const_tree expr)
13500 struct md5_ctx ctx;
13501 unsigned char checksum[16], cnt;
13502 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13504 md5_init_ctx (&ctx);
13505 fold_checksum_tree (expr, &ctx, &ht);
13506 md5_finish_ctx (&ctx, checksum);
13507 for (cnt = 0; cnt < 16; ++cnt)
13508 fprintf (stderr, "%02x", checksum[cnt]);
13509 putc ('\n', stderr);
13512 static void
13513 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13515 internal_error ("fold check: original tree changed by fold");
13518 static void
13519 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13520 hash_table<nofree_ptr_hash <const tree_node> > *ht)
13522 const tree_node **slot;
13523 enum tree_code code;
13524 union tree_node *buf;
13525 int i, len;
13527 recursive_label:
13528 if (expr == NULL)
13529 return;
13530 slot = ht->find_slot (expr, INSERT);
13531 if (*slot != NULL)
13532 return;
13533 *slot = expr;
13534 code = TREE_CODE (expr);
13535 if (TREE_CODE_CLASS (code) == tcc_declaration
13536 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13538 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13539 size_t sz = tree_size (expr);
13540 buf = XALLOCAVAR (union tree_node, sz);
13541 memcpy ((char *) buf, expr, sz);
13542 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13543 buf->decl_with_vis.symtab_node = NULL;
13544 buf->base.nowarning_flag = 0;
13545 expr = (tree) buf;
13547 else if (TREE_CODE_CLASS (code) == tcc_type
13548 && (TYPE_POINTER_TO (expr)
13549 || TYPE_REFERENCE_TO (expr)
13550 || TYPE_CACHED_VALUES_P (expr)
13551 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13552 || TYPE_NEXT_VARIANT (expr)
13553 || TYPE_ALIAS_SET_KNOWN_P (expr)))
13555 /* Allow these fields to be modified. */
13556 tree tmp;
13557 size_t sz = tree_size (expr);
13558 buf = XALLOCAVAR (union tree_node, sz);
13559 memcpy ((char *) buf, expr, sz);
13560 expr = tmp = (tree) buf;
13561 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13562 TYPE_POINTER_TO (tmp) = NULL;
13563 TYPE_REFERENCE_TO (tmp) = NULL;
13564 TYPE_NEXT_VARIANT (tmp) = NULL;
13565 TYPE_ALIAS_SET (tmp) = -1;
13566 if (TYPE_CACHED_VALUES_P (tmp))
13568 TYPE_CACHED_VALUES_P (tmp) = 0;
13569 TYPE_CACHED_VALUES (tmp) = NULL;
13572 else if (warning_suppressed_p (expr) && (DECL_P (expr) || EXPR_P (expr)))
13574 /* Allow the no-warning bit to be set. Perhaps we shouldn't allow
13575 that and change builtins.c etc. instead - see PR89543. */
13576 size_t sz = tree_size (expr);
13577 buf = XALLOCAVAR (union tree_node, sz);
13578 memcpy ((char *) buf, expr, sz);
13579 buf->base.nowarning_flag = 0;
13580 expr = (tree) buf;
13582 md5_process_bytes (expr, tree_size (expr), ctx);
13583 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13584 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13585 if (TREE_CODE_CLASS (code) != tcc_type
13586 && TREE_CODE_CLASS (code) != tcc_declaration
13587 && code != TREE_LIST
13588 && code != SSA_NAME
13589 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13590 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13591 switch (TREE_CODE_CLASS (code))
13593 case tcc_constant:
13594 switch (code)
13596 case STRING_CST:
13597 md5_process_bytes (TREE_STRING_POINTER (expr),
13598 TREE_STRING_LENGTH (expr), ctx);
13599 break;
13600 case COMPLEX_CST:
13601 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13602 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13603 break;
13604 case VECTOR_CST:
13605 len = vector_cst_encoded_nelts (expr);
13606 for (i = 0; i < len; ++i)
13607 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13608 break;
13609 default:
13610 break;
13612 break;
13613 case tcc_exceptional:
13614 switch (code)
13616 case TREE_LIST:
13617 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13618 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13619 expr = TREE_CHAIN (expr);
13620 goto recursive_label;
13621 break;
13622 case TREE_VEC:
13623 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13624 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13625 break;
13626 default:
13627 break;
13629 break;
13630 case tcc_expression:
13631 case tcc_reference:
13632 case tcc_comparison:
13633 case tcc_unary:
13634 case tcc_binary:
13635 case tcc_statement:
13636 case tcc_vl_exp:
13637 len = TREE_OPERAND_LENGTH (expr);
13638 for (i = 0; i < len; ++i)
13639 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13640 break;
13641 case tcc_declaration:
13642 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13643 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13644 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13646 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13647 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13648 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13649 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13650 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13653 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13655 if (TREE_CODE (expr) == FUNCTION_DECL)
13657 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13658 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13660 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13662 break;
13663 case tcc_type:
13664 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13665 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13666 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13667 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13668 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13669 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13670 if (INTEGRAL_TYPE_P (expr)
13671 || SCALAR_FLOAT_TYPE_P (expr))
13673 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13674 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13676 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13677 if (TREE_CODE (expr) == RECORD_TYPE
13678 || TREE_CODE (expr) == UNION_TYPE
13679 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13680 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13681 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13682 break;
13683 default:
13684 break;
13688 /* Helper function for outputting the checksum of a tree T. When
13689 debugging with gdb, you can "define mynext" to be "next" followed
13690 by "call debug_fold_checksum (op0)", then just trace down till the
13691 outputs differ. */
13693 DEBUG_FUNCTION void
13694 debug_fold_checksum (const_tree t)
13696 int i;
13697 unsigned char checksum[16];
13698 struct md5_ctx ctx;
13699 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13701 md5_init_ctx (&ctx);
13702 fold_checksum_tree (t, &ctx, &ht);
13703 md5_finish_ctx (&ctx, checksum);
13704 ht.empty ();
13706 for (i = 0; i < 16; i++)
13707 fprintf (stderr, "%d ", checksum[i]);
13709 fprintf (stderr, "\n");
13712 #endif
13714 /* Fold a unary tree expression with code CODE of type TYPE with an
13715 operand OP0. LOC is the location of the resulting expression.
13716 Return a folded expression if successful. Otherwise, return a tree
13717 expression with code CODE of type TYPE with an operand OP0. */
13719 tree
13720 fold_build1_loc (location_t loc,
13721 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13723 tree tem;
13724 #ifdef ENABLE_FOLD_CHECKING
13725 unsigned char checksum_before[16], checksum_after[16];
13726 struct md5_ctx ctx;
13727 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13729 md5_init_ctx (&ctx);
13730 fold_checksum_tree (op0, &ctx, &ht);
13731 md5_finish_ctx (&ctx, checksum_before);
13732 ht.empty ();
13733 #endif
13735 tem = fold_unary_loc (loc, code, type, op0);
13736 if (!tem)
13737 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13739 #ifdef ENABLE_FOLD_CHECKING
13740 md5_init_ctx (&ctx);
13741 fold_checksum_tree (op0, &ctx, &ht);
13742 md5_finish_ctx (&ctx, checksum_after);
13744 if (memcmp (checksum_before, checksum_after, 16))
13745 fold_check_failed (op0, tem);
13746 #endif
13747 return tem;
13750 /* Fold a binary tree expression with code CODE of type TYPE with
13751 operands OP0 and OP1. LOC is the location of the resulting
13752 expression. Return a folded expression if successful. Otherwise,
13753 return a tree expression with code CODE of type TYPE with operands
13754 OP0 and OP1. */
13756 tree
13757 fold_build2_loc (location_t loc,
13758 enum tree_code code, tree type, tree op0, tree op1
13759 MEM_STAT_DECL)
13761 tree tem;
13762 #ifdef ENABLE_FOLD_CHECKING
13763 unsigned char checksum_before_op0[16],
13764 checksum_before_op1[16],
13765 checksum_after_op0[16],
13766 checksum_after_op1[16];
13767 struct md5_ctx ctx;
13768 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13770 md5_init_ctx (&ctx);
13771 fold_checksum_tree (op0, &ctx, &ht);
13772 md5_finish_ctx (&ctx, checksum_before_op0);
13773 ht.empty ();
13775 md5_init_ctx (&ctx);
13776 fold_checksum_tree (op1, &ctx, &ht);
13777 md5_finish_ctx (&ctx, checksum_before_op1);
13778 ht.empty ();
13779 #endif
13781 tem = fold_binary_loc (loc, code, type, op0, op1);
13782 if (!tem)
13783 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13785 #ifdef ENABLE_FOLD_CHECKING
13786 md5_init_ctx (&ctx);
13787 fold_checksum_tree (op0, &ctx, &ht);
13788 md5_finish_ctx (&ctx, checksum_after_op0);
13789 ht.empty ();
13791 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13792 fold_check_failed (op0, tem);
13794 md5_init_ctx (&ctx);
13795 fold_checksum_tree (op1, &ctx, &ht);
13796 md5_finish_ctx (&ctx, checksum_after_op1);
13798 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13799 fold_check_failed (op1, tem);
13800 #endif
13801 return tem;
13804 /* Fold a ternary tree expression with code CODE of type TYPE with
13805 operands OP0, OP1, and OP2. Return a folded expression if
13806 successful. Otherwise, return a tree expression with code CODE of
13807 type TYPE with operands OP0, OP1, and OP2. */
13809 tree
13810 fold_build3_loc (location_t loc, enum tree_code code, tree type,
13811 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13813 tree tem;
13814 #ifdef ENABLE_FOLD_CHECKING
13815 unsigned char checksum_before_op0[16],
13816 checksum_before_op1[16],
13817 checksum_before_op2[16],
13818 checksum_after_op0[16],
13819 checksum_after_op1[16],
13820 checksum_after_op2[16];
13821 struct md5_ctx ctx;
13822 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13824 md5_init_ctx (&ctx);
13825 fold_checksum_tree (op0, &ctx, &ht);
13826 md5_finish_ctx (&ctx, checksum_before_op0);
13827 ht.empty ();
13829 md5_init_ctx (&ctx);
13830 fold_checksum_tree (op1, &ctx, &ht);
13831 md5_finish_ctx (&ctx, checksum_before_op1);
13832 ht.empty ();
13834 md5_init_ctx (&ctx);
13835 fold_checksum_tree (op2, &ctx, &ht);
13836 md5_finish_ctx (&ctx, checksum_before_op2);
13837 ht.empty ();
13838 #endif
13840 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13841 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13842 if (!tem)
13843 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13845 #ifdef ENABLE_FOLD_CHECKING
13846 md5_init_ctx (&ctx);
13847 fold_checksum_tree (op0, &ctx, &ht);
13848 md5_finish_ctx (&ctx, checksum_after_op0);
13849 ht.empty ();
13851 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13852 fold_check_failed (op0, tem);
13854 md5_init_ctx (&ctx);
13855 fold_checksum_tree (op1, &ctx, &ht);
13856 md5_finish_ctx (&ctx, checksum_after_op1);
13857 ht.empty ();
13859 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13860 fold_check_failed (op1, tem);
13862 md5_init_ctx (&ctx);
13863 fold_checksum_tree (op2, &ctx, &ht);
13864 md5_finish_ctx (&ctx, checksum_after_op2);
13866 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13867 fold_check_failed (op2, tem);
13868 #endif
13869 return tem;
13872 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13873 arguments in ARGARRAY, and a null static chain.
13874 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13875 of type TYPE from the given operands as constructed by build_call_array. */
13877 tree
13878 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13879 int nargs, tree *argarray)
13881 tree tem;
13882 #ifdef ENABLE_FOLD_CHECKING
13883 unsigned char checksum_before_fn[16],
13884 checksum_before_arglist[16],
13885 checksum_after_fn[16],
13886 checksum_after_arglist[16];
13887 struct md5_ctx ctx;
13888 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13889 int i;
13891 md5_init_ctx (&ctx);
13892 fold_checksum_tree (fn, &ctx, &ht);
13893 md5_finish_ctx (&ctx, checksum_before_fn);
13894 ht.empty ();
13896 md5_init_ctx (&ctx);
13897 for (i = 0; i < nargs; i++)
13898 fold_checksum_tree (argarray[i], &ctx, &ht);
13899 md5_finish_ctx (&ctx, checksum_before_arglist);
13900 ht.empty ();
13901 #endif
13903 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13904 if (!tem)
13905 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13907 #ifdef ENABLE_FOLD_CHECKING
13908 md5_init_ctx (&ctx);
13909 fold_checksum_tree (fn, &ctx, &ht);
13910 md5_finish_ctx (&ctx, checksum_after_fn);
13911 ht.empty ();
13913 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13914 fold_check_failed (fn, tem);
13916 md5_init_ctx (&ctx);
13917 for (i = 0; i < nargs; i++)
13918 fold_checksum_tree (argarray[i], &ctx, &ht);
13919 md5_finish_ctx (&ctx, checksum_after_arglist);
13921 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13922 fold_check_failed (NULL_TREE, tem);
13923 #endif
13924 return tem;
13927 /* Perform constant folding and related simplification of initializer
13928 expression EXPR. These behave identically to "fold_buildN" but ignore
13929 potential run-time traps and exceptions that fold must preserve. */
13931 #define START_FOLD_INIT \
13932 int saved_signaling_nans = flag_signaling_nans;\
13933 int saved_trapping_math = flag_trapping_math;\
13934 int saved_rounding_math = flag_rounding_math;\
13935 int saved_trapv = flag_trapv;\
13936 int saved_folding_initializer = folding_initializer;\
13937 flag_signaling_nans = 0;\
13938 flag_trapping_math = 0;\
13939 flag_rounding_math = 0;\
13940 flag_trapv = 0;\
13941 folding_initializer = 1;
13943 #define END_FOLD_INIT \
13944 flag_signaling_nans = saved_signaling_nans;\
13945 flag_trapping_math = saved_trapping_math;\
13946 flag_rounding_math = saved_rounding_math;\
13947 flag_trapv = saved_trapv;\
13948 folding_initializer = saved_folding_initializer;
13950 tree
13951 fold_init (tree expr)
13953 tree result;
13954 START_FOLD_INIT;
13956 result = fold (expr);
13958 END_FOLD_INIT;
13959 return result;
13962 tree
13963 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13964 tree type, tree op)
13966 tree result;
13967 START_FOLD_INIT;
13969 result = fold_build1_loc (loc, code, type, op);
13971 END_FOLD_INIT;
13972 return result;
13975 tree
13976 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13977 tree type, tree op0, tree op1)
13979 tree result;
13980 START_FOLD_INIT;
13982 result = fold_build2_loc (loc, code, type, op0, op1);
13984 END_FOLD_INIT;
13985 return result;
13988 tree
13989 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
13990 int nargs, tree *argarray)
13992 tree result;
13993 START_FOLD_INIT;
13995 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
13997 END_FOLD_INIT;
13998 return result;
14001 tree
14002 fold_binary_initializer_loc (location_t loc, tree_code code, tree type,
14003 tree lhs, tree rhs)
14005 tree result;
14006 START_FOLD_INIT;
14008 result = fold_binary_loc (loc, code, type, lhs, rhs);
14010 END_FOLD_INIT;
14011 return result;
14014 #undef START_FOLD_INIT
14015 #undef END_FOLD_INIT
14017 /* Determine if first argument is a multiple of second argument. Return 0 if
14018 it is not, or we cannot easily determined it to be.
14020 An example of the sort of thing we care about (at this point; this routine
14021 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14022 fold cases do now) is discovering that
14024 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14026 is a multiple of
14028 SAVE_EXPR (J * 8)
14030 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14032 This code also handles discovering that
14034 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14036 is a multiple of 8 so we don't have to worry about dealing with a
14037 possible remainder.
14039 Note that we *look* inside a SAVE_EXPR only to determine how it was
14040 calculated; it is not safe for fold to do much of anything else with the
14041 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14042 at run time. For example, the latter example above *cannot* be implemented
14043 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14044 evaluation time of the original SAVE_EXPR is not necessarily the same at
14045 the time the new expression is evaluated. The only optimization of this
14046 sort that would be valid is changing
14048 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14050 divided by 8 to
14052 SAVE_EXPR (I) * SAVE_EXPR (J)
14054 (where the same SAVE_EXPR (J) is used in the original and the
14055 transformed version). */
14058 multiple_of_p (tree type, const_tree top, const_tree bottom)
14060 gimple *stmt;
14061 tree t1, op1, op2;
14063 if (operand_equal_p (top, bottom, 0))
14064 return 1;
14066 if (TREE_CODE (type) != INTEGER_TYPE)
14067 return 0;
14069 switch (TREE_CODE (top))
14071 case BIT_AND_EXPR:
14072 /* Bitwise and provides a power of two multiple. If the mask is
14073 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14074 if (!integer_pow2p (bottom))
14075 return 0;
14076 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14077 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
14079 case MULT_EXPR:
14080 if (TREE_CODE (bottom) == INTEGER_CST)
14082 op1 = TREE_OPERAND (top, 0);
14083 op2 = TREE_OPERAND (top, 1);
14084 if (TREE_CODE (op1) == INTEGER_CST)
14085 std::swap (op1, op2);
14086 if (TREE_CODE (op2) == INTEGER_CST)
14088 if (multiple_of_p (type, op2, bottom))
14089 return 1;
14090 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
14091 if (multiple_of_p (type, bottom, op2))
14093 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
14094 wi::to_widest (op2));
14095 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
14097 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
14098 return multiple_of_p (type, op1, op2);
14101 return multiple_of_p (type, op1, bottom);
14104 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14105 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
14107 case MINUS_EXPR:
14108 /* It is impossible to prove if op0 - op1 is multiple of bottom
14109 precisely, so be conservative here checking if both op0 and op1
14110 are multiple of bottom. Note we check the second operand first
14111 since it's usually simpler. */
14112 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14113 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
14115 case PLUS_EXPR:
14116 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
14117 as op0 - 3 if the expression has unsigned type. For example,
14118 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
14119 op1 = TREE_OPERAND (top, 1);
14120 if (TYPE_UNSIGNED (type)
14121 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
14122 op1 = fold_build1 (NEGATE_EXPR, type, op1);
14123 return (multiple_of_p (type, op1, bottom)
14124 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
14126 case LSHIFT_EXPR:
14127 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14129 op1 = TREE_OPERAND (top, 1);
14130 /* const_binop may not detect overflow correctly,
14131 so check for it explicitly here. */
14132 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
14133 wi::to_wide (op1))
14134 && (t1 = fold_convert (type,
14135 const_binop (LSHIFT_EXPR, size_one_node,
14136 op1))) != 0
14137 && !TREE_OVERFLOW (t1))
14138 return multiple_of_p (type, t1, bottom);
14140 return 0;
14142 case NOP_EXPR:
14143 /* Can't handle conversions from non-integral or wider integral type. */
14144 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14145 || (TYPE_PRECISION (type)
14146 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14147 return 0;
14149 /* fall through */
14151 case SAVE_EXPR:
14152 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14154 case COND_EXPR:
14155 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14156 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14158 case INTEGER_CST:
14159 if (TREE_CODE (bottom) != INTEGER_CST
14160 || integer_zerop (bottom)
14161 || (TYPE_UNSIGNED (type)
14162 && (tree_int_cst_sgn (top) < 0
14163 || tree_int_cst_sgn (bottom) < 0)))
14164 return 0;
14165 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14166 SIGNED);
14168 case SSA_NAME:
14169 if (TREE_CODE (bottom) == INTEGER_CST
14170 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
14171 && gimple_code (stmt) == GIMPLE_ASSIGN)
14173 enum tree_code code = gimple_assign_rhs_code (stmt);
14175 /* Check for special cases to see if top is defined as multiple
14176 of bottom:
14178 top = (X & ~(bottom - 1) ; bottom is power of 2
14182 Y = X % bottom
14183 top = X - Y. */
14184 if (code == BIT_AND_EXPR
14185 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14186 && TREE_CODE (op2) == INTEGER_CST
14187 && integer_pow2p (bottom)
14188 && wi::multiple_of_p (wi::to_widest (op2),
14189 wi::to_widest (bottom), UNSIGNED))
14190 return 1;
14192 op1 = gimple_assign_rhs1 (stmt);
14193 if (code == MINUS_EXPR
14194 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14195 && TREE_CODE (op2) == SSA_NAME
14196 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
14197 && gimple_code (stmt) == GIMPLE_ASSIGN
14198 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
14199 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
14200 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
14201 return 1;
14204 /* fall through */
14206 default:
14207 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
14208 return multiple_p (wi::to_poly_widest (top),
14209 wi::to_poly_widest (bottom));
14211 return 0;
14215 /* Return true if expression X cannot be (or contain) a NaN or infinity.
14216 This function returns true for integer expressions, and returns
14217 false if uncertain. */
14219 bool
14220 tree_expr_finite_p (const_tree x)
14222 machine_mode mode = element_mode (x);
14223 if (!HONOR_NANS (mode) && !HONOR_INFINITIES (mode))
14224 return true;
14225 switch (TREE_CODE (x))
14227 case REAL_CST:
14228 return real_isfinite (TREE_REAL_CST_PTR (x));
14229 case COMPLEX_CST:
14230 return tree_expr_finite_p (TREE_REALPART (x))
14231 && tree_expr_finite_p (TREE_IMAGPART (x));
14232 case FLOAT_EXPR:
14233 return true;
14234 case ABS_EXPR:
14235 case CONVERT_EXPR:
14236 case NON_LVALUE_EXPR:
14237 case NEGATE_EXPR:
14238 case SAVE_EXPR:
14239 return tree_expr_finite_p (TREE_OPERAND (x, 0));
14240 case MIN_EXPR:
14241 case MAX_EXPR:
14242 return tree_expr_finite_p (TREE_OPERAND (x, 0))
14243 && tree_expr_finite_p (TREE_OPERAND (x, 1));
14244 case COND_EXPR:
14245 return tree_expr_finite_p (TREE_OPERAND (x, 1))
14246 && tree_expr_finite_p (TREE_OPERAND (x, 2));
14247 case CALL_EXPR:
14248 switch (get_call_combined_fn (x))
14250 CASE_CFN_FABS:
14251 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0));
14252 CASE_CFN_FMAX:
14253 CASE_CFN_FMIN:
14254 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0))
14255 && tree_expr_finite_p (CALL_EXPR_ARG (x, 1));
14256 default:
14257 return false;
14260 default:
14261 return false;
14265 /* Return true if expression X evaluates to an infinity.
14266 This function returns false for integer expressions. */
14268 bool
14269 tree_expr_infinite_p (const_tree x)
14271 if (!HONOR_INFINITIES (x))
14272 return false;
14273 switch (TREE_CODE (x))
14275 case REAL_CST:
14276 return real_isinf (TREE_REAL_CST_PTR (x));
14277 case ABS_EXPR:
14278 case NEGATE_EXPR:
14279 case NON_LVALUE_EXPR:
14280 case SAVE_EXPR:
14281 return tree_expr_infinite_p (TREE_OPERAND (x, 0));
14282 case COND_EXPR:
14283 return tree_expr_infinite_p (TREE_OPERAND (x, 1))
14284 && tree_expr_infinite_p (TREE_OPERAND (x, 2));
14285 default:
14286 return false;
14290 /* Return true if expression X could evaluate to an infinity.
14291 This function returns false for integer expressions, and returns
14292 true if uncertain. */
14294 bool
14295 tree_expr_maybe_infinite_p (const_tree x)
14297 if (!HONOR_INFINITIES (x))
14298 return false;
14299 switch (TREE_CODE (x))
14301 case REAL_CST:
14302 return real_isinf (TREE_REAL_CST_PTR (x));
14303 case FLOAT_EXPR:
14304 return false;
14305 case ABS_EXPR:
14306 case NEGATE_EXPR:
14307 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 0));
14308 case COND_EXPR:
14309 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 1))
14310 || tree_expr_maybe_infinite_p (TREE_OPERAND (x, 2));
14311 default:
14312 return true;
14316 /* Return true if expression X evaluates to a signaling NaN.
14317 This function returns false for integer expressions. */
14319 bool
14320 tree_expr_signaling_nan_p (const_tree x)
14322 if (!HONOR_SNANS (x))
14323 return false;
14324 switch (TREE_CODE (x))
14326 case REAL_CST:
14327 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14328 case NON_LVALUE_EXPR:
14329 case SAVE_EXPR:
14330 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 0));
14331 case COND_EXPR:
14332 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 1))
14333 && tree_expr_signaling_nan_p (TREE_OPERAND (x, 2));
14334 default:
14335 return false;
14339 /* Return true if expression X could evaluate to a signaling NaN.
14340 This function returns false for integer expressions, and returns
14341 true if uncertain. */
14343 bool
14344 tree_expr_maybe_signaling_nan_p (const_tree x)
14346 if (!HONOR_SNANS (x))
14347 return false;
14348 switch (TREE_CODE (x))
14350 case REAL_CST:
14351 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14352 case FLOAT_EXPR:
14353 return false;
14354 case ABS_EXPR:
14355 case CONVERT_EXPR:
14356 case NEGATE_EXPR:
14357 case NON_LVALUE_EXPR:
14358 case SAVE_EXPR:
14359 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0));
14360 case MIN_EXPR:
14361 case MAX_EXPR:
14362 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0))
14363 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1));
14364 case COND_EXPR:
14365 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1))
14366 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 2));
14367 case CALL_EXPR:
14368 switch (get_call_combined_fn (x))
14370 CASE_CFN_FABS:
14371 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0));
14372 CASE_CFN_FMAX:
14373 CASE_CFN_FMIN:
14374 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0))
14375 || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 1));
14376 default:
14377 return true;
14379 default:
14380 return true;
14384 /* Return true if expression X evaluates to a NaN.
14385 This function returns false for integer expressions. */
14387 bool
14388 tree_expr_nan_p (const_tree x)
14390 if (!HONOR_NANS (x))
14391 return false;
14392 switch (TREE_CODE (x))
14394 case REAL_CST:
14395 return real_isnan (TREE_REAL_CST_PTR (x));
14396 case NON_LVALUE_EXPR:
14397 case SAVE_EXPR:
14398 return tree_expr_nan_p (TREE_OPERAND (x, 0));
14399 case COND_EXPR:
14400 return tree_expr_nan_p (TREE_OPERAND (x, 1))
14401 && tree_expr_nan_p (TREE_OPERAND (x, 2));
14402 default:
14403 return false;
14407 /* Return true if expression X could evaluate to a NaN.
14408 This function returns false for integer expressions, and returns
14409 true if uncertain. */
14411 bool
14412 tree_expr_maybe_nan_p (const_tree x)
14414 if (!HONOR_NANS (x))
14415 return false;
14416 switch (TREE_CODE (x))
14418 case REAL_CST:
14419 return real_isnan (TREE_REAL_CST_PTR (x));
14420 case FLOAT_EXPR:
14421 return false;
14422 case PLUS_EXPR:
14423 case MINUS_EXPR:
14424 case MULT_EXPR:
14425 return !tree_expr_finite_p (TREE_OPERAND (x, 0))
14426 || !tree_expr_finite_p (TREE_OPERAND (x, 1));
14427 case ABS_EXPR:
14428 case CONVERT_EXPR:
14429 case NEGATE_EXPR:
14430 case NON_LVALUE_EXPR:
14431 case SAVE_EXPR:
14432 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0));
14433 case MIN_EXPR:
14434 case MAX_EXPR:
14435 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0))
14436 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 1));
14437 case COND_EXPR:
14438 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 1))
14439 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 2));
14440 case CALL_EXPR:
14441 switch (get_call_combined_fn (x))
14443 CASE_CFN_FABS:
14444 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0));
14445 CASE_CFN_FMAX:
14446 CASE_CFN_FMIN:
14447 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0))
14448 || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 1));
14449 default:
14450 return true;
14452 default:
14453 return true;
14457 /* Return true if expression X could evaluate to -0.0.
14458 This function returns true if uncertain. */
14460 bool
14461 tree_expr_maybe_real_minus_zero_p (const_tree x)
14463 if (!HONOR_SIGNED_ZEROS (x))
14464 return false;
14465 switch (TREE_CODE (x))
14467 case REAL_CST:
14468 return REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (x));
14469 case INTEGER_CST:
14470 case FLOAT_EXPR:
14471 case ABS_EXPR:
14472 return false;
14473 case NON_LVALUE_EXPR:
14474 case SAVE_EXPR:
14475 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 0));
14476 case COND_EXPR:
14477 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 1))
14478 || tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 2));
14479 case CALL_EXPR:
14480 switch (get_call_combined_fn (x))
14482 CASE_CFN_FABS:
14483 return false;
14484 default:
14485 break;
14487 default:
14488 break;
14490 /* Ideally !(tree_expr_nonzero_p (X) || tree_expr_nonnegative_p (X))
14491 * but currently those predicates require tree and not const_tree. */
14492 return true;
14495 #define tree_expr_nonnegative_warnv_p(X, Y) \
14496 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14498 #define RECURSE(X) \
14499 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14501 /* Return true if CODE or TYPE is known to be non-negative. */
14503 static bool
14504 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14506 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14507 && truth_value_p (code))
14508 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14509 have a signed:1 type (where the value is -1 and 0). */
14510 return true;
14511 return false;
14514 /* Return true if (CODE OP0) is known to be non-negative. If the return
14515 value is based on the assumption that signed overflow is undefined,
14516 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14517 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14519 bool
14520 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14521 bool *strict_overflow_p, int depth)
14523 if (TYPE_UNSIGNED (type))
14524 return true;
14526 switch (code)
14528 case ABS_EXPR:
14529 /* We can't return 1 if flag_wrapv is set because
14530 ABS_EXPR<INT_MIN> = INT_MIN. */
14531 if (!ANY_INTEGRAL_TYPE_P (type))
14532 return true;
14533 if (TYPE_OVERFLOW_UNDEFINED (type))
14535 *strict_overflow_p = true;
14536 return true;
14538 break;
14540 case NON_LVALUE_EXPR:
14541 case FLOAT_EXPR:
14542 case FIX_TRUNC_EXPR:
14543 return RECURSE (op0);
14545 CASE_CONVERT:
14547 tree inner_type = TREE_TYPE (op0);
14548 tree outer_type = type;
14550 if (TREE_CODE (outer_type) == REAL_TYPE)
14552 if (TREE_CODE (inner_type) == REAL_TYPE)
14553 return RECURSE (op0);
14554 if (INTEGRAL_TYPE_P (inner_type))
14556 if (TYPE_UNSIGNED (inner_type))
14557 return true;
14558 return RECURSE (op0);
14561 else if (INTEGRAL_TYPE_P (outer_type))
14563 if (TREE_CODE (inner_type) == REAL_TYPE)
14564 return RECURSE (op0);
14565 if (INTEGRAL_TYPE_P (inner_type))
14566 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14567 && TYPE_UNSIGNED (inner_type);
14570 break;
14572 default:
14573 return tree_simple_nonnegative_warnv_p (code, type);
14576 /* We don't know sign of `t', so be conservative and return false. */
14577 return false;
14580 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14581 value is based on the assumption that signed overflow is undefined,
14582 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14583 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14585 bool
14586 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14587 tree op1, bool *strict_overflow_p,
14588 int depth)
14590 if (TYPE_UNSIGNED (type))
14591 return true;
14593 switch (code)
14595 case POINTER_PLUS_EXPR:
14596 case PLUS_EXPR:
14597 if (FLOAT_TYPE_P (type))
14598 return RECURSE (op0) && RECURSE (op1);
14600 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14601 both unsigned and at least 2 bits shorter than the result. */
14602 if (TREE_CODE (type) == INTEGER_TYPE
14603 && TREE_CODE (op0) == NOP_EXPR
14604 && TREE_CODE (op1) == NOP_EXPR)
14606 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14607 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14608 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14609 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14611 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14612 TYPE_PRECISION (inner2)) + 1;
14613 return prec < TYPE_PRECISION (type);
14616 break;
14618 case MULT_EXPR:
14619 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14621 /* x * x is always non-negative for floating point x
14622 or without overflow. */
14623 if (operand_equal_p (op0, op1, 0)
14624 || (RECURSE (op0) && RECURSE (op1)))
14626 if (ANY_INTEGRAL_TYPE_P (type)
14627 && TYPE_OVERFLOW_UNDEFINED (type))
14628 *strict_overflow_p = true;
14629 return true;
14633 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14634 both unsigned and their total bits is shorter than the result. */
14635 if (TREE_CODE (type) == INTEGER_TYPE
14636 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14637 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14639 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14640 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14641 : TREE_TYPE (op0);
14642 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14643 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14644 : TREE_TYPE (op1);
14646 bool unsigned0 = TYPE_UNSIGNED (inner0);
14647 bool unsigned1 = TYPE_UNSIGNED (inner1);
14649 if (TREE_CODE (op0) == INTEGER_CST)
14650 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14652 if (TREE_CODE (op1) == INTEGER_CST)
14653 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14655 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14656 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14658 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14659 ? tree_int_cst_min_precision (op0, UNSIGNED)
14660 : TYPE_PRECISION (inner0);
14662 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14663 ? tree_int_cst_min_precision (op1, UNSIGNED)
14664 : TYPE_PRECISION (inner1);
14666 return precision0 + precision1 < TYPE_PRECISION (type);
14669 return false;
14671 case BIT_AND_EXPR:
14672 return RECURSE (op0) || RECURSE (op1);
14674 case MAX_EXPR:
14675 /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
14676 things. */
14677 if (tree_expr_maybe_nan_p (op0) || tree_expr_maybe_nan_p (op1))
14678 return RECURSE (op0) && RECURSE (op1);
14679 return RECURSE (op0) || RECURSE (op1);
14681 case BIT_IOR_EXPR:
14682 case BIT_XOR_EXPR:
14683 case MIN_EXPR:
14684 case RDIV_EXPR:
14685 case TRUNC_DIV_EXPR:
14686 case CEIL_DIV_EXPR:
14687 case FLOOR_DIV_EXPR:
14688 case ROUND_DIV_EXPR:
14689 return RECURSE (op0) && RECURSE (op1);
14691 case TRUNC_MOD_EXPR:
14692 return RECURSE (op0);
14694 case FLOOR_MOD_EXPR:
14695 return RECURSE (op1);
14697 case CEIL_MOD_EXPR:
14698 case ROUND_MOD_EXPR:
14699 default:
14700 return tree_simple_nonnegative_warnv_p (code, type);
14703 /* We don't know sign of `t', so be conservative and return false. */
14704 return false;
14707 /* Return true if T is known to be non-negative. If the return
14708 value is based on the assumption that signed overflow is undefined,
14709 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14710 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14712 bool
14713 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14715 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14716 return true;
14718 switch (TREE_CODE (t))
14720 case INTEGER_CST:
14721 return tree_int_cst_sgn (t) >= 0;
14723 case REAL_CST:
14724 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14726 case FIXED_CST:
14727 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14729 case COND_EXPR:
14730 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14732 case SSA_NAME:
14733 /* Limit the depth of recursion to avoid quadratic behavior.
14734 This is expected to catch almost all occurrences in practice.
14735 If this code misses important cases that unbounded recursion
14736 would not, passes that need this information could be revised
14737 to provide it through dataflow propagation. */
14738 return (!name_registered_for_update_p (t)
14739 && depth < param_max_ssa_name_query_depth
14740 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
14741 strict_overflow_p, depth));
14743 default:
14744 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14748 /* Return true if T is known to be non-negative. If the return
14749 value is based on the assumption that signed overflow is undefined,
14750 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14751 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14753 bool
14754 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
14755 bool *strict_overflow_p, int depth)
14757 switch (fn)
14759 CASE_CFN_ACOS:
14760 CASE_CFN_ACOSH:
14761 CASE_CFN_CABS:
14762 CASE_CFN_COSH:
14763 CASE_CFN_ERFC:
14764 CASE_CFN_EXP:
14765 CASE_CFN_EXP10:
14766 CASE_CFN_EXP2:
14767 CASE_CFN_FABS:
14768 CASE_CFN_FDIM:
14769 CASE_CFN_HYPOT:
14770 CASE_CFN_POW10:
14771 CASE_CFN_FFS:
14772 CASE_CFN_PARITY:
14773 CASE_CFN_POPCOUNT:
14774 CASE_CFN_CLZ:
14775 CASE_CFN_CLRSB:
14776 case CFN_BUILT_IN_BSWAP16:
14777 case CFN_BUILT_IN_BSWAP32:
14778 case CFN_BUILT_IN_BSWAP64:
14779 case CFN_BUILT_IN_BSWAP128:
14780 /* Always true. */
14781 return true;
14783 CASE_CFN_SQRT:
14784 CASE_CFN_SQRT_FN:
14785 /* sqrt(-0.0) is -0.0. */
14786 if (!HONOR_SIGNED_ZEROS (type))
14787 return true;
14788 return RECURSE (arg0);
14790 CASE_CFN_ASINH:
14791 CASE_CFN_ATAN:
14792 CASE_CFN_ATANH:
14793 CASE_CFN_CBRT:
14794 CASE_CFN_CEIL:
14795 CASE_CFN_CEIL_FN:
14796 CASE_CFN_ERF:
14797 CASE_CFN_EXPM1:
14798 CASE_CFN_FLOOR:
14799 CASE_CFN_FLOOR_FN:
14800 CASE_CFN_FMOD:
14801 CASE_CFN_FREXP:
14802 CASE_CFN_ICEIL:
14803 CASE_CFN_IFLOOR:
14804 CASE_CFN_IRINT:
14805 CASE_CFN_IROUND:
14806 CASE_CFN_LCEIL:
14807 CASE_CFN_LDEXP:
14808 CASE_CFN_LFLOOR:
14809 CASE_CFN_LLCEIL:
14810 CASE_CFN_LLFLOOR:
14811 CASE_CFN_LLRINT:
14812 CASE_CFN_LLROUND:
14813 CASE_CFN_LRINT:
14814 CASE_CFN_LROUND:
14815 CASE_CFN_MODF:
14816 CASE_CFN_NEARBYINT:
14817 CASE_CFN_NEARBYINT_FN:
14818 CASE_CFN_RINT:
14819 CASE_CFN_RINT_FN:
14820 CASE_CFN_ROUND:
14821 CASE_CFN_ROUND_FN:
14822 CASE_CFN_ROUNDEVEN:
14823 CASE_CFN_ROUNDEVEN_FN:
14824 CASE_CFN_SCALB:
14825 CASE_CFN_SCALBLN:
14826 CASE_CFN_SCALBN:
14827 CASE_CFN_SIGNBIT:
14828 CASE_CFN_SIGNIFICAND:
14829 CASE_CFN_SINH:
14830 CASE_CFN_TANH:
14831 CASE_CFN_TRUNC:
14832 CASE_CFN_TRUNC_FN:
14833 /* True if the 1st argument is nonnegative. */
14834 return RECURSE (arg0);
14836 CASE_CFN_FMAX:
14837 CASE_CFN_FMAX_FN:
14838 /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
14839 things. In the presence of sNaNs, we're only guaranteed to be
14840 non-negative if both operands are non-negative. In the presence
14841 of qNaNs, we're non-negative if either operand is non-negative
14842 and can't be a qNaN, or if both operands are non-negative. */
14843 if (tree_expr_maybe_signaling_nan_p (arg0) ||
14844 tree_expr_maybe_signaling_nan_p (arg1))
14845 return RECURSE (arg0) && RECURSE (arg1);
14846 return RECURSE (arg0) ? (!tree_expr_maybe_nan_p (arg0)
14847 || RECURSE (arg1))
14848 : (RECURSE (arg1)
14849 && !tree_expr_maybe_nan_p (arg1));
14851 CASE_CFN_FMIN:
14852 CASE_CFN_FMIN_FN:
14853 /* True if the 1st AND 2nd arguments are nonnegative. */
14854 return RECURSE (arg0) && RECURSE (arg1);
14856 CASE_CFN_COPYSIGN:
14857 CASE_CFN_COPYSIGN_FN:
14858 /* True if the 2nd argument is nonnegative. */
14859 return RECURSE (arg1);
14861 CASE_CFN_POWI:
14862 /* True if the 1st argument is nonnegative or the second
14863 argument is an even integer. */
14864 if (TREE_CODE (arg1) == INTEGER_CST
14865 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14866 return true;
14867 return RECURSE (arg0);
14869 CASE_CFN_POW:
14870 /* True if the 1st argument is nonnegative or the second
14871 argument is an even integer valued real. */
14872 if (TREE_CODE (arg1) == REAL_CST)
14874 REAL_VALUE_TYPE c;
14875 HOST_WIDE_INT n;
14877 c = TREE_REAL_CST (arg1);
14878 n = real_to_integer (&c);
14879 if ((n & 1) == 0)
14881 REAL_VALUE_TYPE cint;
14882 real_from_integer (&cint, VOIDmode, n, SIGNED);
14883 if (real_identical (&c, &cint))
14884 return true;
14887 return RECURSE (arg0);
14889 default:
14890 break;
14892 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
14895 /* Return true if T is known to be non-negative. If the return
14896 value is based on the assumption that signed overflow is undefined,
14897 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14898 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14900 static bool
14901 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14903 enum tree_code code = TREE_CODE (t);
14904 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14905 return true;
14907 switch (code)
14909 case TARGET_EXPR:
14911 tree temp = TARGET_EXPR_SLOT (t);
14912 t = TARGET_EXPR_INITIAL (t);
14914 /* If the initializer is non-void, then it's a normal expression
14915 that will be assigned to the slot. */
14916 if (!VOID_TYPE_P (t))
14917 return RECURSE (t);
14919 /* Otherwise, the initializer sets the slot in some way. One common
14920 way is an assignment statement at the end of the initializer. */
14921 while (1)
14923 if (TREE_CODE (t) == BIND_EXPR)
14924 t = expr_last (BIND_EXPR_BODY (t));
14925 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14926 || TREE_CODE (t) == TRY_CATCH_EXPR)
14927 t = expr_last (TREE_OPERAND (t, 0));
14928 else if (TREE_CODE (t) == STATEMENT_LIST)
14929 t = expr_last (t);
14930 else
14931 break;
14933 if (TREE_CODE (t) == MODIFY_EXPR
14934 && TREE_OPERAND (t, 0) == temp)
14935 return RECURSE (TREE_OPERAND (t, 1));
14937 return false;
14940 case CALL_EXPR:
14942 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14943 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14945 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14946 get_call_combined_fn (t),
14947 arg0,
14948 arg1,
14949 strict_overflow_p, depth);
14951 case COMPOUND_EXPR:
14952 case MODIFY_EXPR:
14953 return RECURSE (TREE_OPERAND (t, 1));
14955 case BIND_EXPR:
14956 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
14958 case SAVE_EXPR:
14959 return RECURSE (TREE_OPERAND (t, 0));
14961 default:
14962 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14966 #undef RECURSE
14967 #undef tree_expr_nonnegative_warnv_p
14969 /* Return true if T is known to be non-negative. If the return
14970 value is based on the assumption that signed overflow is undefined,
14971 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14972 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14974 bool
14975 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14977 enum tree_code code;
14978 if (t == error_mark_node)
14979 return false;
14981 code = TREE_CODE (t);
14982 switch (TREE_CODE_CLASS (code))
14984 case tcc_binary:
14985 case tcc_comparison:
14986 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14987 TREE_TYPE (t),
14988 TREE_OPERAND (t, 0),
14989 TREE_OPERAND (t, 1),
14990 strict_overflow_p, depth);
14992 case tcc_unary:
14993 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14994 TREE_TYPE (t),
14995 TREE_OPERAND (t, 0),
14996 strict_overflow_p, depth);
14998 case tcc_constant:
14999 case tcc_declaration:
15000 case tcc_reference:
15001 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15003 default:
15004 break;
15007 switch (code)
15009 case TRUTH_AND_EXPR:
15010 case TRUTH_OR_EXPR:
15011 case TRUTH_XOR_EXPR:
15012 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15013 TREE_TYPE (t),
15014 TREE_OPERAND (t, 0),
15015 TREE_OPERAND (t, 1),
15016 strict_overflow_p, depth);
15017 case TRUTH_NOT_EXPR:
15018 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15019 TREE_TYPE (t),
15020 TREE_OPERAND (t, 0),
15021 strict_overflow_p, depth);
15023 case COND_EXPR:
15024 case CONSTRUCTOR:
15025 case OBJ_TYPE_REF:
15026 case ASSERT_EXPR:
15027 case ADDR_EXPR:
15028 case WITH_SIZE_EXPR:
15029 case SSA_NAME:
15030 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15032 default:
15033 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
15037 /* Return true if `t' is known to be non-negative. Handle warnings
15038 about undefined signed overflow. */
15040 bool
15041 tree_expr_nonnegative_p (tree t)
15043 bool ret, strict_overflow_p;
15045 strict_overflow_p = false;
15046 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15047 if (strict_overflow_p)
15048 fold_overflow_warning (("assuming signed overflow does not occur when "
15049 "determining that expression is always "
15050 "non-negative"),
15051 WARN_STRICT_OVERFLOW_MISC);
15052 return ret;
15056 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15057 For floating point we further ensure that T is not denormal.
15058 Similar logic is present in nonzero_address in rtlanal.h.
15060 If the return value is based on the assumption that signed overflow
15061 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15062 change *STRICT_OVERFLOW_P. */
15064 bool
15065 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15066 bool *strict_overflow_p)
15068 switch (code)
15070 case ABS_EXPR:
15071 return tree_expr_nonzero_warnv_p (op0,
15072 strict_overflow_p);
15074 case NOP_EXPR:
15076 tree inner_type = TREE_TYPE (op0);
15077 tree outer_type = type;
15079 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15080 && tree_expr_nonzero_warnv_p (op0,
15081 strict_overflow_p));
15083 break;
15085 case NON_LVALUE_EXPR:
15086 return tree_expr_nonzero_warnv_p (op0,
15087 strict_overflow_p);
15089 default:
15090 break;
15093 return false;
15096 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15097 For floating point we further ensure that T is not denormal.
15098 Similar logic is present in nonzero_address in rtlanal.h.
15100 If the return value is based on the assumption that signed overflow
15101 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15102 change *STRICT_OVERFLOW_P. */
15104 bool
15105 tree_binary_nonzero_warnv_p (enum tree_code code,
15106 tree type,
15107 tree op0,
15108 tree op1, bool *strict_overflow_p)
15110 bool sub_strict_overflow_p;
15111 switch (code)
15113 case POINTER_PLUS_EXPR:
15114 case PLUS_EXPR:
15115 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15117 /* With the presence of negative values it is hard
15118 to say something. */
15119 sub_strict_overflow_p = false;
15120 if (!tree_expr_nonnegative_warnv_p (op0,
15121 &sub_strict_overflow_p)
15122 || !tree_expr_nonnegative_warnv_p (op1,
15123 &sub_strict_overflow_p))
15124 return false;
15125 /* One of operands must be positive and the other non-negative. */
15126 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15127 overflows, on a twos-complement machine the sum of two
15128 nonnegative numbers can never be zero. */
15129 return (tree_expr_nonzero_warnv_p (op0,
15130 strict_overflow_p)
15131 || tree_expr_nonzero_warnv_p (op1,
15132 strict_overflow_p));
15134 break;
15136 case MULT_EXPR:
15137 if (TYPE_OVERFLOW_UNDEFINED (type))
15139 if (tree_expr_nonzero_warnv_p (op0,
15140 strict_overflow_p)
15141 && tree_expr_nonzero_warnv_p (op1,
15142 strict_overflow_p))
15144 *strict_overflow_p = true;
15145 return true;
15148 break;
15150 case MIN_EXPR:
15151 sub_strict_overflow_p = false;
15152 if (tree_expr_nonzero_warnv_p (op0,
15153 &sub_strict_overflow_p)
15154 && tree_expr_nonzero_warnv_p (op1,
15155 &sub_strict_overflow_p))
15157 if (sub_strict_overflow_p)
15158 *strict_overflow_p = true;
15160 break;
15162 case MAX_EXPR:
15163 sub_strict_overflow_p = false;
15164 if (tree_expr_nonzero_warnv_p (op0,
15165 &sub_strict_overflow_p))
15167 if (sub_strict_overflow_p)
15168 *strict_overflow_p = true;
15170 /* When both operands are nonzero, then MAX must be too. */
15171 if (tree_expr_nonzero_warnv_p (op1,
15172 strict_overflow_p))
15173 return true;
15175 /* MAX where operand 0 is positive is positive. */
15176 return tree_expr_nonnegative_warnv_p (op0,
15177 strict_overflow_p);
15179 /* MAX where operand 1 is positive is positive. */
15180 else if (tree_expr_nonzero_warnv_p (op1,
15181 &sub_strict_overflow_p)
15182 && tree_expr_nonnegative_warnv_p (op1,
15183 &sub_strict_overflow_p))
15185 if (sub_strict_overflow_p)
15186 *strict_overflow_p = true;
15187 return true;
15189 break;
15191 case BIT_IOR_EXPR:
15192 return (tree_expr_nonzero_warnv_p (op1,
15193 strict_overflow_p)
15194 || tree_expr_nonzero_warnv_p (op0,
15195 strict_overflow_p));
15197 default:
15198 break;
15201 return false;
15204 /* Return true when T is an address and is known to be nonzero.
15205 For floating point we further ensure that T is not denormal.
15206 Similar logic is present in nonzero_address in rtlanal.h.
15208 If the return value is based on the assumption that signed overflow
15209 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15210 change *STRICT_OVERFLOW_P. */
15212 bool
15213 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15215 bool sub_strict_overflow_p;
15216 switch (TREE_CODE (t))
15218 case INTEGER_CST:
15219 return !integer_zerop (t);
15221 case ADDR_EXPR:
15223 tree base = TREE_OPERAND (t, 0);
15225 if (!DECL_P (base))
15226 base = get_base_address (base);
15228 if (base && TREE_CODE (base) == TARGET_EXPR)
15229 base = TARGET_EXPR_SLOT (base);
15231 if (!base)
15232 return false;
15234 /* For objects in symbol table check if we know they are non-zero.
15235 Don't do anything for variables and functions before symtab is built;
15236 it is quite possible that they will be declared weak later. */
15237 int nonzero_addr = maybe_nonzero_address (base);
15238 if (nonzero_addr >= 0)
15239 return nonzero_addr;
15241 /* Constants are never weak. */
15242 if (CONSTANT_CLASS_P (base))
15243 return true;
15245 return false;
15248 case COND_EXPR:
15249 sub_strict_overflow_p = false;
15250 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15251 &sub_strict_overflow_p)
15252 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15253 &sub_strict_overflow_p))
15255 if (sub_strict_overflow_p)
15256 *strict_overflow_p = true;
15257 return true;
15259 break;
15261 case SSA_NAME:
15262 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
15263 break;
15264 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
15266 default:
15267 break;
15269 return false;
15272 #define integer_valued_real_p(X) \
15273 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15275 #define RECURSE(X) \
15276 ((integer_valued_real_p) (X, depth + 1))
15278 /* Return true if the floating point result of (CODE OP0) has an
15279 integer value. We also allow +Inf, -Inf and NaN to be considered
15280 integer values. Return false for signaling NaN.
15282 DEPTH is the current nesting depth of the query. */
15284 bool
15285 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
15287 switch (code)
15289 case FLOAT_EXPR:
15290 return true;
15292 case ABS_EXPR:
15293 return RECURSE (op0);
15295 CASE_CONVERT:
15297 tree type = TREE_TYPE (op0);
15298 if (TREE_CODE (type) == INTEGER_TYPE)
15299 return true;
15300 if (TREE_CODE (type) == REAL_TYPE)
15301 return RECURSE (op0);
15302 break;
15305 default:
15306 break;
15308 return false;
15311 /* Return true if the floating point result of (CODE OP0 OP1) has an
15312 integer value. We also allow +Inf, -Inf and NaN to be considered
15313 integer values. Return false for signaling NaN.
15315 DEPTH is the current nesting depth of the query. */
15317 bool
15318 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
15320 switch (code)
15322 case PLUS_EXPR:
15323 case MINUS_EXPR:
15324 case MULT_EXPR:
15325 case MIN_EXPR:
15326 case MAX_EXPR:
15327 return RECURSE (op0) && RECURSE (op1);
15329 default:
15330 break;
15332 return false;
15335 /* Return true if the floating point result of calling FNDECL with arguments
15336 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15337 considered integer values. Return false for signaling NaN. If FNDECL
15338 takes fewer than 2 arguments, the remaining ARGn are null.
15340 DEPTH is the current nesting depth of the query. */
15342 bool
15343 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
15345 switch (fn)
15347 CASE_CFN_CEIL:
15348 CASE_CFN_CEIL_FN:
15349 CASE_CFN_FLOOR:
15350 CASE_CFN_FLOOR_FN:
15351 CASE_CFN_NEARBYINT:
15352 CASE_CFN_NEARBYINT_FN:
15353 CASE_CFN_RINT:
15354 CASE_CFN_RINT_FN:
15355 CASE_CFN_ROUND:
15356 CASE_CFN_ROUND_FN:
15357 CASE_CFN_ROUNDEVEN:
15358 CASE_CFN_ROUNDEVEN_FN:
15359 CASE_CFN_TRUNC:
15360 CASE_CFN_TRUNC_FN:
15361 return true;
15363 CASE_CFN_FMIN:
15364 CASE_CFN_FMIN_FN:
15365 CASE_CFN_FMAX:
15366 CASE_CFN_FMAX_FN:
15367 return RECURSE (arg0) && RECURSE (arg1);
15369 default:
15370 break;
15372 return false;
15375 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15376 has an integer value. We also allow +Inf, -Inf and NaN to be
15377 considered integer values. Return false for signaling NaN.
15379 DEPTH is the current nesting depth of the query. */
15381 bool
15382 integer_valued_real_single_p (tree t, int depth)
15384 switch (TREE_CODE (t))
15386 case REAL_CST:
15387 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
15389 case COND_EXPR:
15390 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15392 case SSA_NAME:
15393 /* Limit the depth of recursion to avoid quadratic behavior.
15394 This is expected to catch almost all occurrences in practice.
15395 If this code misses important cases that unbounded recursion
15396 would not, passes that need this information could be revised
15397 to provide it through dataflow propagation. */
15398 return (!name_registered_for_update_p (t)
15399 && depth < param_max_ssa_name_query_depth
15400 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
15401 depth));
15403 default:
15404 break;
15406 return false;
15409 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15410 has an integer value. We also allow +Inf, -Inf and NaN to be
15411 considered integer values. Return false for signaling NaN.
15413 DEPTH is the current nesting depth of the query. */
15415 static bool
15416 integer_valued_real_invalid_p (tree t, int depth)
15418 switch (TREE_CODE (t))
15420 case COMPOUND_EXPR:
15421 case MODIFY_EXPR:
15422 case BIND_EXPR:
15423 return RECURSE (TREE_OPERAND (t, 1));
15425 case SAVE_EXPR:
15426 return RECURSE (TREE_OPERAND (t, 0));
15428 default:
15429 break;
15431 return false;
15434 #undef RECURSE
15435 #undef integer_valued_real_p
15437 /* Return true if the floating point expression T has an integer value.
15438 We also allow +Inf, -Inf and NaN to be considered integer values.
15439 Return false for signaling NaN.
15441 DEPTH is the current nesting depth of the query. */
15443 bool
15444 integer_valued_real_p (tree t, int depth)
15446 if (t == error_mark_node)
15447 return false;
15449 STRIP_ANY_LOCATION_WRAPPER (t);
15451 tree_code code = TREE_CODE (t);
15452 switch (TREE_CODE_CLASS (code))
15454 case tcc_binary:
15455 case tcc_comparison:
15456 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
15457 TREE_OPERAND (t, 1), depth);
15459 case tcc_unary:
15460 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
15462 case tcc_constant:
15463 case tcc_declaration:
15464 case tcc_reference:
15465 return integer_valued_real_single_p (t, depth);
15467 default:
15468 break;
15471 switch (code)
15473 case COND_EXPR:
15474 case SSA_NAME:
15475 return integer_valued_real_single_p (t, depth);
15477 case CALL_EXPR:
15479 tree arg0 = (call_expr_nargs (t) > 0
15480 ? CALL_EXPR_ARG (t, 0)
15481 : NULL_TREE);
15482 tree arg1 = (call_expr_nargs (t) > 1
15483 ? CALL_EXPR_ARG (t, 1)
15484 : NULL_TREE);
15485 return integer_valued_real_call_p (get_call_combined_fn (t),
15486 arg0, arg1, depth);
15489 default:
15490 return integer_valued_real_invalid_p (t, depth);
15494 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15495 attempt to fold the expression to a constant without modifying TYPE,
15496 OP0 or OP1.
15498 If the expression could be simplified to a constant, then return
15499 the constant. If the expression would not be simplified to a
15500 constant, then return NULL_TREE. */
15502 tree
15503 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15505 tree tem = fold_binary (code, type, op0, op1);
15506 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15509 /* Given the components of a unary expression CODE, TYPE and OP0,
15510 attempt to fold the expression to a constant without modifying
15511 TYPE or OP0.
15513 If the expression could be simplified to a constant, then return
15514 the constant. If the expression would not be simplified to a
15515 constant, then return NULL_TREE. */
15517 tree
15518 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15520 tree tem = fold_unary (code, type, op0);
15521 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15524 /* If EXP represents referencing an element in a constant string
15525 (either via pointer arithmetic or array indexing), return the
15526 tree representing the value accessed, otherwise return NULL. */
15528 tree
15529 fold_read_from_constant_string (tree exp)
15531 if ((TREE_CODE (exp) == INDIRECT_REF
15532 || TREE_CODE (exp) == ARRAY_REF)
15533 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15535 tree exp1 = TREE_OPERAND (exp, 0);
15536 tree index;
15537 tree string;
15538 location_t loc = EXPR_LOCATION (exp);
15540 if (TREE_CODE (exp) == INDIRECT_REF)
15541 string = string_constant (exp1, &index, NULL, NULL);
15542 else
15544 tree low_bound = array_ref_low_bound (exp);
15545 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15547 /* Optimize the special-case of a zero lower bound.
15549 We convert the low_bound to sizetype to avoid some problems
15550 with constant folding. (E.g. suppose the lower bound is 1,
15551 and its mode is QI. Without the conversion,l (ARRAY
15552 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15553 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15554 if (! integer_zerop (low_bound))
15555 index = size_diffop_loc (loc, index,
15556 fold_convert_loc (loc, sizetype, low_bound));
15558 string = exp1;
15561 scalar_int_mode char_mode;
15562 if (string
15563 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15564 && TREE_CODE (string) == STRING_CST
15565 && tree_fits_uhwi_p (index)
15566 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15567 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
15568 &char_mode)
15569 && GET_MODE_SIZE (char_mode) == 1)
15570 return build_int_cst_type (TREE_TYPE (exp),
15571 (TREE_STRING_POINTER (string)
15572 [TREE_INT_CST_LOW (index)]));
15574 return NULL;
15577 /* Folds a read from vector element at IDX of vector ARG. */
15579 tree
15580 fold_read_from_vector (tree arg, poly_uint64 idx)
15582 unsigned HOST_WIDE_INT i;
15583 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
15584 && known_ge (idx, 0u)
15585 && idx.is_constant (&i))
15587 if (TREE_CODE (arg) == VECTOR_CST)
15588 return VECTOR_CST_ELT (arg, i);
15589 else if (TREE_CODE (arg) == CONSTRUCTOR)
15591 if (CONSTRUCTOR_NELTS (arg)
15592 && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg, 0)->value)))
15593 return NULL_TREE;
15594 if (i >= CONSTRUCTOR_NELTS (arg))
15595 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
15596 return CONSTRUCTOR_ELT (arg, i)->value;
15599 return NULL_TREE;
15602 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15603 an integer constant, real, or fixed-point constant.
15605 TYPE is the type of the result. */
15607 static tree
15608 fold_negate_const (tree arg0, tree type)
15610 tree t = NULL_TREE;
15612 switch (TREE_CODE (arg0))
15614 case REAL_CST:
15615 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15616 break;
15618 case FIXED_CST:
15620 FIXED_VALUE_TYPE f;
15621 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15622 &(TREE_FIXED_CST (arg0)), NULL,
15623 TYPE_SATURATING (type));
15624 t = build_fixed (type, f);
15625 /* Propagate overflow flags. */
15626 if (overflow_p | TREE_OVERFLOW (arg0))
15627 TREE_OVERFLOW (t) = 1;
15628 break;
15631 default:
15632 if (poly_int_tree_p (arg0))
15634 wi::overflow_type overflow;
15635 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
15636 t = force_fit_type (type, res, 1,
15637 (overflow && ! TYPE_UNSIGNED (type))
15638 || TREE_OVERFLOW (arg0));
15639 break;
15642 gcc_unreachable ();
15645 return t;
15648 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15649 an integer constant or real constant.
15651 TYPE is the type of the result. */
15653 tree
15654 fold_abs_const (tree arg0, tree type)
15656 tree t = NULL_TREE;
15658 switch (TREE_CODE (arg0))
15660 case INTEGER_CST:
15662 /* If the value is unsigned or non-negative, then the absolute value
15663 is the same as the ordinary value. */
15664 wide_int val = wi::to_wide (arg0);
15665 wi::overflow_type overflow = wi::OVF_NONE;
15666 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
15669 /* If the value is negative, then the absolute value is
15670 its negation. */
15671 else
15672 val = wi::neg (val, &overflow);
15674 /* Force to the destination type, set TREE_OVERFLOW for signed
15675 TYPE only. */
15676 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
15678 break;
15680 case REAL_CST:
15681 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15682 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15683 else
15684 t = arg0;
15685 break;
15687 default:
15688 gcc_unreachable ();
15691 return t;
15694 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15695 constant. TYPE is the type of the result. */
15697 static tree
15698 fold_not_const (const_tree arg0, tree type)
15700 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15702 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
15705 /* Given CODE, a relational operator, the target type, TYPE and two
15706 constant operands OP0 and OP1, return the result of the
15707 relational operation. If the result is not a compile time
15708 constant, then return NULL_TREE. */
15710 static tree
15711 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15713 int result, invert;
15715 /* From here on, the only cases we handle are when the result is
15716 known to be a constant. */
15718 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15720 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15721 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15723 /* Handle the cases where either operand is a NaN. */
15724 if (real_isnan (c0) || real_isnan (c1))
15726 switch (code)
15728 case EQ_EXPR:
15729 case ORDERED_EXPR:
15730 result = 0;
15731 break;
15733 case NE_EXPR:
15734 case UNORDERED_EXPR:
15735 case UNLT_EXPR:
15736 case UNLE_EXPR:
15737 case UNGT_EXPR:
15738 case UNGE_EXPR:
15739 case UNEQ_EXPR:
15740 result = 1;
15741 break;
15743 case LT_EXPR:
15744 case LE_EXPR:
15745 case GT_EXPR:
15746 case GE_EXPR:
15747 case LTGT_EXPR:
15748 if (flag_trapping_math)
15749 return NULL_TREE;
15750 result = 0;
15751 break;
15753 default:
15754 gcc_unreachable ();
15757 return constant_boolean_node (result, type);
15760 return constant_boolean_node (real_compare (code, c0, c1), type);
15763 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15765 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15766 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15767 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15770 /* Handle equality/inequality of complex constants. */
15771 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15773 tree rcond = fold_relational_const (code, type,
15774 TREE_REALPART (op0),
15775 TREE_REALPART (op1));
15776 tree icond = fold_relational_const (code, type,
15777 TREE_IMAGPART (op0),
15778 TREE_IMAGPART (op1));
15779 if (code == EQ_EXPR)
15780 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15781 else if (code == NE_EXPR)
15782 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15783 else
15784 return NULL_TREE;
15787 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15789 if (!VECTOR_TYPE_P (type))
15791 /* Have vector comparison with scalar boolean result. */
15792 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
15793 && known_eq (VECTOR_CST_NELTS (op0),
15794 VECTOR_CST_NELTS (op1)));
15795 unsigned HOST_WIDE_INT nunits;
15796 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
15797 return NULL_TREE;
15798 for (unsigned i = 0; i < nunits; i++)
15800 tree elem0 = VECTOR_CST_ELT (op0, i);
15801 tree elem1 = VECTOR_CST_ELT (op1, i);
15802 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
15803 if (tmp == NULL_TREE)
15804 return NULL_TREE;
15805 if (integer_zerop (tmp))
15806 return constant_boolean_node (code == NE_EXPR, type);
15808 return constant_boolean_node (code == EQ_EXPR, type);
15810 tree_vector_builder elts;
15811 if (!elts.new_binary_operation (type, op0, op1, false))
15812 return NULL_TREE;
15813 unsigned int count = elts.encoded_nelts ();
15814 for (unsigned i = 0; i < count; i++)
15816 tree elem_type = TREE_TYPE (type);
15817 tree elem0 = VECTOR_CST_ELT (op0, i);
15818 tree elem1 = VECTOR_CST_ELT (op1, i);
15820 tree tem = fold_relational_const (code, elem_type,
15821 elem0, elem1);
15823 if (tem == NULL_TREE)
15824 return NULL_TREE;
15826 elts.quick_push (build_int_cst (elem_type,
15827 integer_zerop (tem) ? 0 : -1));
15830 return elts.build ();
15833 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15835 To compute GT, swap the arguments and do LT.
15836 To compute GE, do LT and invert the result.
15837 To compute LE, swap the arguments, do LT and invert the result.
15838 To compute NE, do EQ and invert the result.
15840 Therefore, the code below must handle only EQ and LT. */
15842 if (code == LE_EXPR || code == GT_EXPR)
15844 std::swap (op0, op1);
15845 code = swap_tree_comparison (code);
15848 /* Note that it is safe to invert for real values here because we
15849 have already handled the one case that it matters. */
15851 invert = 0;
15852 if (code == NE_EXPR || code == GE_EXPR)
15854 invert = 1;
15855 code = invert_tree_comparison (code, false);
15858 /* Compute a result for LT or EQ if args permit;
15859 Otherwise return T. */
15860 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15862 if (code == EQ_EXPR)
15863 result = tree_int_cst_equal (op0, op1);
15864 else
15865 result = tree_int_cst_lt (op0, op1);
15867 else
15868 return NULL_TREE;
15870 if (invert)
15871 result ^= 1;
15872 return constant_boolean_node (result, type);
15875 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15876 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15877 itself. */
15879 tree
15880 fold_build_cleanup_point_expr (tree type, tree expr)
15882 /* If the expression does not have side effects then we don't have to wrap
15883 it with a cleanup point expression. */
15884 if (!TREE_SIDE_EFFECTS (expr))
15885 return expr;
15887 /* If the expression is a return, check to see if the expression inside the
15888 return has no side effects or the right hand side of the modify expression
15889 inside the return. If either don't have side effects set we don't need to
15890 wrap the expression in a cleanup point expression. Note we don't check the
15891 left hand side of the modify because it should always be a return decl. */
15892 if (TREE_CODE (expr) == RETURN_EXPR)
15894 tree op = TREE_OPERAND (expr, 0);
15895 if (!op || !TREE_SIDE_EFFECTS (op))
15896 return expr;
15897 op = TREE_OPERAND (op, 1);
15898 if (!TREE_SIDE_EFFECTS (op))
15899 return expr;
15902 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
15905 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15906 of an indirection through OP0, or NULL_TREE if no simplification is
15907 possible. */
15909 tree
15910 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15912 tree sub = op0;
15913 tree subtype;
15914 poly_uint64 const_op01;
15916 STRIP_NOPS (sub);
15917 subtype = TREE_TYPE (sub);
15918 if (!POINTER_TYPE_P (subtype)
15919 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
15920 return NULL_TREE;
15922 if (TREE_CODE (sub) == ADDR_EXPR)
15924 tree op = TREE_OPERAND (sub, 0);
15925 tree optype = TREE_TYPE (op);
15927 /* *&CONST_DECL -> to the value of the const decl. */
15928 if (TREE_CODE (op) == CONST_DECL)
15929 return DECL_INITIAL (op);
15930 /* *&p => p; make sure to handle *&"str"[cst] here. */
15931 if (type == optype)
15933 tree fop = fold_read_from_constant_string (op);
15934 if (fop)
15935 return fop;
15936 else
15937 return op;
15939 /* *(foo *)&fooarray => fooarray[0] */
15940 else if (TREE_CODE (optype) == ARRAY_TYPE
15941 && type == TREE_TYPE (optype)
15942 && (!in_gimple_form
15943 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15945 tree type_domain = TYPE_DOMAIN (optype);
15946 tree min_val = size_zero_node;
15947 if (type_domain && TYPE_MIN_VALUE (type_domain))
15948 min_val = TYPE_MIN_VALUE (type_domain);
15949 if (in_gimple_form
15950 && TREE_CODE (min_val) != INTEGER_CST)
15951 return NULL_TREE;
15952 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15953 NULL_TREE, NULL_TREE);
15955 /* *(foo *)&complexfoo => __real__ complexfoo */
15956 else if (TREE_CODE (optype) == COMPLEX_TYPE
15957 && type == TREE_TYPE (optype))
15958 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15959 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15960 else if (VECTOR_TYPE_P (optype)
15961 && type == TREE_TYPE (optype))
15963 tree part_width = TYPE_SIZE (type);
15964 tree index = bitsize_int (0);
15965 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
15966 index);
15970 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15971 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
15973 tree op00 = TREE_OPERAND (sub, 0);
15974 tree op01 = TREE_OPERAND (sub, 1);
15976 STRIP_NOPS (op00);
15977 if (TREE_CODE (op00) == ADDR_EXPR)
15979 tree op00type;
15980 op00 = TREE_OPERAND (op00, 0);
15981 op00type = TREE_TYPE (op00);
15983 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15984 if (VECTOR_TYPE_P (op00type)
15985 && type == TREE_TYPE (op00type)
15986 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
15987 but we want to treat offsets with MSB set as negative.
15988 For the code below negative offsets are invalid and
15989 TYPE_SIZE of the element is something unsigned, so
15990 check whether op01 fits into poly_int64, which implies
15991 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
15992 then just use poly_uint64 because we want to treat the
15993 value as unsigned. */
15994 && tree_fits_poly_int64_p (op01))
15996 tree part_width = TYPE_SIZE (type);
15997 poly_uint64 max_offset
15998 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
15999 * TYPE_VECTOR_SUBPARTS (op00type));
16000 if (known_lt (const_op01, max_offset))
16002 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
16003 return fold_build3_loc (loc,
16004 BIT_FIELD_REF, type, op00,
16005 part_width, index);
16008 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16009 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16010 && type == TREE_TYPE (op00type))
16012 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
16013 const_op01))
16014 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16016 /* ((foo *)&fooarray)[1] => fooarray[1] */
16017 else if (TREE_CODE (op00type) == ARRAY_TYPE
16018 && type == TREE_TYPE (op00type))
16020 tree type_domain = TYPE_DOMAIN (op00type);
16021 tree min_val = size_zero_node;
16022 if (type_domain && TYPE_MIN_VALUE (type_domain))
16023 min_val = TYPE_MIN_VALUE (type_domain);
16024 poly_uint64 type_size, index;
16025 if (poly_int_tree_p (min_val)
16026 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
16027 && multiple_p (const_op01, type_size, &index))
16029 poly_offset_int off = index + wi::to_poly_offset (min_val);
16030 op01 = wide_int_to_tree (sizetype, off);
16031 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16032 NULL_TREE, NULL_TREE);
16038 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16039 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16040 && type == TREE_TYPE (TREE_TYPE (subtype))
16041 && (!in_gimple_form
16042 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16044 tree type_domain;
16045 tree min_val = size_zero_node;
16046 sub = build_fold_indirect_ref_loc (loc, sub);
16047 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16048 if (type_domain && TYPE_MIN_VALUE (type_domain))
16049 min_val = TYPE_MIN_VALUE (type_domain);
16050 if (in_gimple_form
16051 && TREE_CODE (min_val) != INTEGER_CST)
16052 return NULL_TREE;
16053 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16054 NULL_TREE);
16057 return NULL_TREE;
16060 /* Builds an expression for an indirection through T, simplifying some
16061 cases. */
16063 tree
16064 build_fold_indirect_ref_loc (location_t loc, tree t)
16066 tree type = TREE_TYPE (TREE_TYPE (t));
16067 tree sub = fold_indirect_ref_1 (loc, type, t);
16069 if (sub)
16070 return sub;
16072 return build1_loc (loc, INDIRECT_REF, type, t);
16075 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16077 tree
16078 fold_indirect_ref_loc (location_t loc, tree t)
16080 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16082 if (sub)
16083 return sub;
16084 else
16085 return t;
16088 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16089 whose result is ignored. The type of the returned tree need not be
16090 the same as the original expression. */
16092 tree
16093 fold_ignored_result (tree t)
16095 if (!TREE_SIDE_EFFECTS (t))
16096 return integer_zero_node;
16098 for (;;)
16099 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16101 case tcc_unary:
16102 t = TREE_OPERAND (t, 0);
16103 break;
16105 case tcc_binary:
16106 case tcc_comparison:
16107 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16108 t = TREE_OPERAND (t, 0);
16109 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16110 t = TREE_OPERAND (t, 1);
16111 else
16112 return t;
16113 break;
16115 case tcc_expression:
16116 switch (TREE_CODE (t))
16118 case COMPOUND_EXPR:
16119 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16120 return t;
16121 t = TREE_OPERAND (t, 0);
16122 break;
16124 case COND_EXPR:
16125 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16126 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16127 return t;
16128 t = TREE_OPERAND (t, 0);
16129 break;
16131 default:
16132 return t;
16134 break;
16136 default:
16137 return t;
16141 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16143 tree
16144 round_up_loc (location_t loc, tree value, unsigned int divisor)
16146 tree div = NULL_TREE;
16148 if (divisor == 1)
16149 return value;
16151 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16152 have to do anything. Only do this when we are not given a const,
16153 because in that case, this check is more expensive than just
16154 doing it. */
16155 if (TREE_CODE (value) != INTEGER_CST)
16157 div = build_int_cst (TREE_TYPE (value), divisor);
16159 if (multiple_of_p (TREE_TYPE (value), value, div))
16160 return value;
16163 /* If divisor is a power of two, simplify this to bit manipulation. */
16164 if (pow2_or_zerop (divisor))
16166 if (TREE_CODE (value) == INTEGER_CST)
16168 wide_int val = wi::to_wide (value);
16169 bool overflow_p;
16171 if ((val & (divisor - 1)) == 0)
16172 return value;
16174 overflow_p = TREE_OVERFLOW (value);
16175 val += divisor - 1;
16176 val &= (int) -divisor;
16177 if (val == 0)
16178 overflow_p = true;
16180 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16182 else
16184 tree t;
16186 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16187 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16188 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16189 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16192 else
16194 if (!div)
16195 div = build_int_cst (TREE_TYPE (value), divisor);
16196 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16197 value = size_binop_loc (loc, MULT_EXPR, value, div);
16200 return value;
16203 /* Likewise, but round down. */
16205 tree
16206 round_down_loc (location_t loc, tree value, int divisor)
16208 tree div = NULL_TREE;
16210 gcc_assert (divisor > 0);
16211 if (divisor == 1)
16212 return value;
16214 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16215 have to do anything. Only do this when we are not given a const,
16216 because in that case, this check is more expensive than just
16217 doing it. */
16218 if (TREE_CODE (value) != INTEGER_CST)
16220 div = build_int_cst (TREE_TYPE (value), divisor);
16222 if (multiple_of_p (TREE_TYPE (value), value, div))
16223 return value;
16226 /* If divisor is a power of two, simplify this to bit manipulation. */
16227 if (pow2_or_zerop (divisor))
16229 tree t;
16231 t = build_int_cst (TREE_TYPE (value), -divisor);
16232 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16234 else
16236 if (!div)
16237 div = build_int_cst (TREE_TYPE (value), divisor);
16238 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16239 value = size_binop_loc (loc, MULT_EXPR, value, div);
16242 return value;
16245 /* Returns the pointer to the base of the object addressed by EXP and
16246 extracts the information about the offset of the access, storing it
16247 to PBITPOS and POFFSET. */
16249 static tree
16250 split_address_to_core_and_offset (tree exp,
16251 poly_int64_pod *pbitpos, tree *poffset)
16253 tree core;
16254 machine_mode mode;
16255 int unsignedp, reversep, volatilep;
16256 poly_int64 bitsize;
16257 location_t loc = EXPR_LOCATION (exp);
16259 if (TREE_CODE (exp) == ADDR_EXPR)
16261 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16262 poffset, &mode, &unsignedp, &reversep,
16263 &volatilep);
16264 core = build_fold_addr_expr_loc (loc, core);
16266 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
16268 core = TREE_OPERAND (exp, 0);
16269 STRIP_NOPS (core);
16270 *pbitpos = 0;
16271 *poffset = TREE_OPERAND (exp, 1);
16272 if (poly_int_tree_p (*poffset))
16274 poly_offset_int tem
16275 = wi::sext (wi::to_poly_offset (*poffset),
16276 TYPE_PRECISION (TREE_TYPE (*poffset)));
16277 tem <<= LOG2_BITS_PER_UNIT;
16278 if (tem.to_shwi (pbitpos))
16279 *poffset = NULL_TREE;
16282 else
16284 core = exp;
16285 *pbitpos = 0;
16286 *poffset = NULL_TREE;
16289 return core;
16292 /* Returns true if addresses of E1 and E2 differ by a constant, false
16293 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16295 bool
16296 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
16298 tree core1, core2;
16299 poly_int64 bitpos1, bitpos2;
16300 tree toffset1, toffset2, tdiff, type;
16302 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16303 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16305 poly_int64 bytepos1, bytepos2;
16306 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
16307 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
16308 || !operand_equal_p (core1, core2, 0))
16309 return false;
16311 if (toffset1 && toffset2)
16313 type = TREE_TYPE (toffset1);
16314 if (type != TREE_TYPE (toffset2))
16315 toffset2 = fold_convert (type, toffset2);
16317 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16318 if (!cst_and_fits_in_hwi (tdiff))
16319 return false;
16321 *diff = int_cst_value (tdiff);
16323 else if (toffset1 || toffset2)
16325 /* If only one of the offsets is non-constant, the difference cannot
16326 be a constant. */
16327 return false;
16329 else
16330 *diff = 0;
16332 *diff += bytepos1 - bytepos2;
16333 return true;
16336 /* Return OFF converted to a pointer offset type suitable as offset for
16337 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16338 tree
16339 convert_to_ptrofftype_loc (location_t loc, tree off)
16341 if (ptrofftype_p (TREE_TYPE (off)))
16342 return off;
16343 return fold_convert_loc (loc, sizetype, off);
16346 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16347 tree
16348 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16350 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16351 ptr, convert_to_ptrofftype_loc (loc, off));
16354 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16355 tree
16356 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16358 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16359 ptr, size_int (off));
16362 /* Return a pointer to a NUL-terminated string containing the sequence
16363 of bytes corresponding to the representation of the object referred to
16364 by SRC (or a subsequence of such bytes within it if SRC is a reference
16365 to an initialized constant array plus some constant offset).
16366 Set *STRSIZE the number of bytes in the constant sequence including
16367 the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16368 where A is the array that stores the constant sequence that SRC points
16369 to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16370 need not point to a string or even an array of characters but may point
16371 to an object of any type. */
16373 const char *
16374 getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
16376 /* The offset into the array A storing the string, and A's byte size. */
16377 tree offset_node;
16378 tree mem_size;
16380 if (strsize)
16381 *strsize = 0;
16383 if (strsize)
16384 src = byte_representation (src, &offset_node, &mem_size, NULL);
16385 else
16386 src = string_constant (src, &offset_node, &mem_size, NULL);
16387 if (!src)
16388 return NULL;
16390 unsigned HOST_WIDE_INT offset = 0;
16391 if (offset_node != NULL_TREE)
16393 if (!tree_fits_uhwi_p (offset_node))
16394 return NULL;
16395 else
16396 offset = tree_to_uhwi (offset_node);
16399 if (!tree_fits_uhwi_p (mem_size))
16400 return NULL;
16402 /* ARRAY_SIZE is the byte size of the array the constant sequence
16403 is stored in and equal to sizeof A. INIT_BYTES is the number
16404 of bytes in the constant sequence used to initialize the array,
16405 including any embedded NULs as well as the terminating NUL (for
16406 strings), but not including any trailing zeros/NULs past
16407 the terminating one appended implicitly to a string literal to
16408 zero out the remainder of the array it's stored in. For example,
16409 given:
16410 const char a[7] = "abc\0d";
16411 n = strlen (a + 1);
16412 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16413 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16414 is equal to strlen (A) + 1. */
16415 const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
16416 unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
16417 const char *string = TREE_STRING_POINTER (src);
16419 /* Ideally this would turn into a gcc_checking_assert over time. */
16420 if (init_bytes > array_size)
16421 init_bytes = array_size;
16423 if (init_bytes == 0 || offset >= array_size)
16424 return NULL;
16426 if (strsize)
16428 /* Compute and store the number of characters from the beginning
16429 of the substring at OFFSET to the end, including the terminating
16430 nul. Offsets past the initial length refer to null strings. */
16431 if (offset < init_bytes)
16432 *strsize = init_bytes - offset;
16433 else
16434 *strsize = 1;
16436 else
16438 tree eltype = TREE_TYPE (TREE_TYPE (src));
16439 /* Support only properly NUL-terminated single byte strings. */
16440 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
16441 return NULL;
16442 if (string[init_bytes - 1] != '\0')
16443 return NULL;
16446 return offset < init_bytes ? string + offset : "";
16449 /* Return a pointer to a NUL-terminated string corresponding to
16450 the expression STR referencing a constant string, possibly
16451 involving a constant offset. Return null if STR either doesn't
16452 reference a constant string or if it involves a nonconstant
16453 offset. */
16455 const char *
16456 c_getstr (tree str)
16458 return getbyterep (str, NULL);
16461 /* Given a tree T, compute which bits in T may be nonzero. */
16463 wide_int
16464 tree_nonzero_bits (const_tree t)
16466 switch (TREE_CODE (t))
16468 case INTEGER_CST:
16469 return wi::to_wide (t);
16470 case SSA_NAME:
16471 return get_nonzero_bits (t);
16472 case NON_LVALUE_EXPR:
16473 case SAVE_EXPR:
16474 return tree_nonzero_bits (TREE_OPERAND (t, 0));
16475 case BIT_AND_EXPR:
16476 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16477 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16478 case BIT_IOR_EXPR:
16479 case BIT_XOR_EXPR:
16480 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16481 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16482 case COND_EXPR:
16483 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
16484 tree_nonzero_bits (TREE_OPERAND (t, 2)));
16485 CASE_CONVERT:
16486 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16487 TYPE_PRECISION (TREE_TYPE (t)),
16488 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
16489 case PLUS_EXPR:
16490 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
16492 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
16493 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
16494 if (wi::bit_and (nzbits1, nzbits2) == 0)
16495 return wi::bit_or (nzbits1, nzbits2);
16497 break;
16498 case LSHIFT_EXPR:
16499 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16501 tree type = TREE_TYPE (t);
16502 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16503 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16504 TYPE_PRECISION (type));
16505 return wi::neg_p (arg1)
16506 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
16507 : wi::lshift (nzbits, arg1);
16509 break;
16510 case RSHIFT_EXPR:
16511 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16513 tree type = TREE_TYPE (t);
16514 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16515 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16516 TYPE_PRECISION (type));
16517 return wi::neg_p (arg1)
16518 ? wi::lshift (nzbits, -arg1)
16519 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
16521 break;
16522 default:
16523 break;
16526 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
16529 /* Helper function for address compare simplifications in match.pd.
16530 OP0 and OP1 are ADDR_EXPR operands being compared by CODE.
16531 BASE0, BASE1, OFF0 and OFF1 are set by the function.
16532 GENERIC is true if GENERIC folding and false for GIMPLE folding.
16533 Returns 0 if OP0 is known to be unequal to OP1 regardless of OFF{0,1},
16534 1 if bases are known to be equal and OP0 cmp OP1 depends on OFF0 cmp OFF1,
16535 and 2 if unknown. */
16538 address_compare (tree_code code, tree type, tree op0, tree op1,
16539 tree &base0, tree &base1, poly_int64 &off0, poly_int64 &off1,
16540 bool generic)
16542 gcc_checking_assert (TREE_CODE (op0) == ADDR_EXPR);
16543 gcc_checking_assert (TREE_CODE (op1) == ADDR_EXPR);
16544 base0 = get_addr_base_and_unit_offset (TREE_OPERAND (op0, 0), &off0);
16545 base1 = get_addr_base_and_unit_offset (TREE_OPERAND (op1, 0), &off1);
16546 if (base0 && TREE_CODE (base0) == MEM_REF)
16548 off0 += mem_ref_offset (base0).force_shwi ();
16549 base0 = TREE_OPERAND (base0, 0);
16551 if (base1 && TREE_CODE (base1) == MEM_REF)
16553 off1 += mem_ref_offset (base1).force_shwi ();
16554 base1 = TREE_OPERAND (base1, 0);
16556 if (base0 == NULL_TREE || base1 == NULL_TREE)
16557 return 2;
16559 int equal = 2;
16560 /* Punt in GENERIC on variables with value expressions;
16561 the value expressions might point to fields/elements
16562 of other vars etc. */
16563 if (generic
16564 && ((VAR_P (base0) && DECL_HAS_VALUE_EXPR_P (base0))
16565 || (VAR_P (base1) && DECL_HAS_VALUE_EXPR_P (base1))))
16566 return 2;
16567 else if (decl_in_symtab_p (base0) && decl_in_symtab_p (base1))
16569 symtab_node *node0 = symtab_node::get_create (base0);
16570 symtab_node *node1 = symtab_node::get_create (base1);
16571 equal = node0->equal_address_to (node1);
16573 else if ((DECL_P (base0)
16574 || TREE_CODE (base0) == SSA_NAME
16575 || TREE_CODE (base0) == STRING_CST)
16576 && (DECL_P (base1)
16577 || TREE_CODE (base1) == SSA_NAME
16578 || TREE_CODE (base1) == STRING_CST))
16579 equal = (base0 == base1);
16580 if (equal == 1)
16582 if (code == EQ_EXPR
16583 || code == NE_EXPR
16584 /* If the offsets are equal we can ignore overflow. */
16585 || known_eq (off0, off1)
16586 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
16587 /* Or if we compare using pointers to decls or strings. */
16588 || (POINTER_TYPE_P (type)
16589 && (DECL_P (base0) || TREE_CODE (base0) == STRING_CST)))
16590 return 1;
16591 return 2;
16593 if (equal != 0)
16594 return equal;
16595 if (code != EQ_EXPR && code != NE_EXPR)
16596 return 2;
16598 HOST_WIDE_INT ioff0 = -1, ioff1 = -1;
16599 off0.is_constant (&ioff0);
16600 off1.is_constant (&ioff1);
16601 if ((DECL_P (base0) && TREE_CODE (base1) == STRING_CST)
16602 || (TREE_CODE (base0) == STRING_CST && DECL_P (base1))
16603 || (TREE_CODE (base0) == STRING_CST
16604 && TREE_CODE (base1) == STRING_CST
16605 && ioff0 >= 0 && ioff1 >= 0
16606 && ioff0 < TREE_STRING_LENGTH (base0)
16607 && ioff1 < TREE_STRING_LENGTH (base1)
16608 /* This is a too conservative test that the STRING_CSTs
16609 will not end up being string-merged. */
16610 && strncmp (TREE_STRING_POINTER (base0) + ioff0,
16611 TREE_STRING_POINTER (base1) + ioff1,
16612 MIN (TREE_STRING_LENGTH (base0) - ioff0,
16613 TREE_STRING_LENGTH (base1) - ioff1)) != 0))
16615 else if (!DECL_P (base0) || !DECL_P (base1))
16616 return 2;
16617 /* If this is a pointer comparison, ignore for now even
16618 valid equalities where one pointer is the offset zero
16619 of one object and the other to one past end of another one. */
16620 else if (!INTEGRAL_TYPE_P (type))
16622 /* Assume that automatic variables can't be adjacent to global
16623 variables. */
16624 else if (is_global_var (base0) != is_global_var (base1))
16626 else
16628 tree sz0 = DECL_SIZE_UNIT (base0);
16629 tree sz1 = DECL_SIZE_UNIT (base1);
16630 /* If sizes are unknown, e.g. VLA or not representable, punt. */
16631 if (!tree_fits_poly_int64_p (sz0) || !tree_fits_poly_int64_p (sz1))
16632 return 2;
16634 poly_int64 size0 = tree_to_poly_int64 (sz0);
16635 poly_int64 size1 = tree_to_poly_int64 (sz1);
16636 /* If one offset is pointing (or could be) to the beginning of one
16637 object and the other is pointing to one past the last byte of the
16638 other object, punt. */
16639 if (maybe_eq (off0, 0) && maybe_eq (off1, size1))
16640 equal = 2;
16641 else if (maybe_eq (off1, 0) && maybe_eq (off0, size0))
16642 equal = 2;
16643 /* If both offsets are the same, there are some cases we know that are
16644 ok. Either if we know they aren't zero, or if we know both sizes
16645 are no zero. */
16646 if (equal == 2
16647 && known_eq (off0, off1)
16648 && (known_ne (off0, 0)
16649 || (known_ne (size0, 0) && known_ne (size1, 0))))
16650 equal = 0;
16652 return equal;
16655 #if CHECKING_P
16657 namespace selftest {
16659 /* Helper functions for writing tests of folding trees. */
16661 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
16663 static void
16664 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
16665 tree constant)
16667 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
16670 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
16671 wrapping WRAPPED_EXPR. */
16673 static void
16674 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
16675 tree wrapped_expr)
16677 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
16678 ASSERT_NE (wrapped_expr, result);
16679 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
16680 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
16683 /* Verify that various arithmetic binary operations are folded
16684 correctly. */
16686 static void
16687 test_arithmetic_folding ()
16689 tree type = integer_type_node;
16690 tree x = create_tmp_var_raw (type, "x");
16691 tree zero = build_zero_cst (type);
16692 tree one = build_int_cst (type, 1);
16694 /* Addition. */
16695 /* 1 <-- (0 + 1) */
16696 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
16697 one);
16698 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
16699 one);
16701 /* (nonlvalue)x <-- (x + 0) */
16702 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
16705 /* Subtraction. */
16706 /* 0 <-- (x - x) */
16707 assert_binop_folds_to_const (x, MINUS_EXPR, x,
16708 zero);
16709 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
16712 /* Multiplication. */
16713 /* 0 <-- (x * 0) */
16714 assert_binop_folds_to_const (x, MULT_EXPR, zero,
16715 zero);
16717 /* (nonlvalue)x <-- (x * 1) */
16718 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
16722 /* Verify that various binary operations on vectors are folded
16723 correctly. */
16725 static void
16726 test_vector_folding ()
16728 tree inner_type = integer_type_node;
16729 tree type = build_vector_type (inner_type, 4);
16730 tree zero = build_zero_cst (type);
16731 tree one = build_one_cst (type);
16732 tree index = build_index_vector (type, 0, 1);
16734 /* Verify equality tests that return a scalar boolean result. */
16735 tree res_type = boolean_type_node;
16736 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
16737 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
16738 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
16739 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
16740 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
16741 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16742 index, one)));
16743 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
16744 index, index)));
16745 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16746 index, index)));
16749 /* Verify folding of VEC_DUPLICATE_EXPRs. */
16751 static void
16752 test_vec_duplicate_folding ()
16754 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
16755 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
16756 /* This will be 1 if VEC_MODE isn't a vector mode. */
16757 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
16759 tree type = build_vector_type (ssizetype, nunits);
16760 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
16761 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
16762 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
16765 /* Run all of the selftests within this file. */
16767 void
16768 fold_const_c_tests ()
16770 test_arithmetic_folding ();
16771 test_vector_folding ();
16772 test_vec_duplicate_folding ();
16775 } // namespace selftest
16777 #endif /* CHECKING_P */