Skip gcc.dg/guality/example.c on hppa-linux.
[official-gcc.git] / gcc / fold-const.c
blob14aa9ca659b549c95c8725d2911713d845bb52bb
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-vrp.h"
79 #include "tree-ssanames.h"
80 #include "selftest.h"
81 #include "stringpool.h"
82 #include "attribs.h"
83 #include "tree-vector-builder.h"
84 #include "vec-perm-indices.h"
85 #include "asan.h"
86 #include "gimple-range.h"
88 /* Nonzero if we are folding constants inside an initializer or a C++
89 manifestly-constant-evaluated context; zero otherwise. */
90 int folding_initializer = 0;
92 /* The following constants represent a bit based encoding of GCC's
93 comparison operators. This encoding simplifies transformations
94 on relational comparison operators, such as AND and OR. */
95 enum comparison_code {
96 COMPCODE_FALSE = 0,
97 COMPCODE_LT = 1,
98 COMPCODE_EQ = 2,
99 COMPCODE_LE = 3,
100 COMPCODE_GT = 4,
101 COMPCODE_LTGT = 5,
102 COMPCODE_GE = 6,
103 COMPCODE_ORD = 7,
104 COMPCODE_UNORD = 8,
105 COMPCODE_UNLT = 9,
106 COMPCODE_UNEQ = 10,
107 COMPCODE_UNLE = 11,
108 COMPCODE_UNGT = 12,
109 COMPCODE_NE = 13,
110 COMPCODE_UNGE = 14,
111 COMPCODE_TRUE = 15
114 static bool negate_expr_p (tree);
115 static tree negate_expr (tree);
116 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
117 static enum comparison_code comparison_to_compcode (enum tree_code);
118 static enum tree_code compcode_to_comparison (enum comparison_code);
119 static bool twoval_comparison_p (tree, tree *, tree *);
120 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
121 static tree optimize_bit_field_compare (location_t, enum tree_code,
122 tree, tree, tree);
123 static bool simple_operand_p (const_tree);
124 static bool simple_operand_p_2 (tree);
125 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
126 static tree range_predecessor (tree);
127 static tree range_successor (tree);
128 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
129 static tree fold_cond_expr_with_comparison (location_t, tree, enum tree_code,
130 tree, tree, tree, tree);
131 static tree unextend (tree, int, int, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static tree fold_binary_op_with_conditional_arg (location_t,
135 enum tree_code, tree,
136 tree, tree,
137 tree, tree, int);
138 static tree fold_negate_const (tree, tree);
139 static tree fold_not_const (const_tree, tree);
140 static tree fold_relational_const (enum tree_code, tree, tree, tree);
141 static tree fold_convert_const (enum tree_code, tree, tree);
142 static tree fold_view_convert_expr (tree, tree);
143 static tree fold_negate_expr (location_t, tree);
146 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
147 Otherwise, return LOC. */
149 static location_t
150 expr_location_or (tree t, location_t loc)
152 location_t tloc = EXPR_LOCATION (t);
153 return tloc == UNKNOWN_LOCATION ? loc : tloc;
156 /* Similar to protected_set_expr_location, but never modify x in place,
157 if location can and needs to be set, unshare it. */
159 static inline tree
160 protected_set_expr_location_unshare (tree x, location_t loc)
162 if (CAN_HAVE_LOCATION_P (x)
163 && EXPR_LOCATION (x) != loc
164 && !(TREE_CODE (x) == SAVE_EXPR
165 || TREE_CODE (x) == TARGET_EXPR
166 || TREE_CODE (x) == BIND_EXPR))
168 x = copy_node (x);
169 SET_EXPR_LOCATION (x, loc);
171 return x;
174 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
175 division and returns the quotient. Otherwise returns
176 NULL_TREE. */
178 tree
179 div_if_zero_remainder (const_tree arg1, const_tree arg2)
181 widest_int quo;
183 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
184 SIGNED, &quo))
185 return wide_int_to_tree (TREE_TYPE (arg1), quo);
187 return NULL_TREE;
190 /* This is nonzero if we should defer warnings about undefined
191 overflow. This facility exists because these warnings are a
192 special case. The code to estimate loop iterations does not want
193 to issue any warnings, since it works with expressions which do not
194 occur in user code. Various bits of cleanup code call fold(), but
195 only use the result if it has certain characteristics (e.g., is a
196 constant); that code only wants to issue a warning if the result is
197 used. */
199 static int fold_deferring_overflow_warnings;
201 /* If a warning about undefined overflow is deferred, this is the
202 warning. Note that this may cause us to turn two warnings into
203 one, but that is fine since it is sufficient to only give one
204 warning per expression. */
206 static const char* fold_deferred_overflow_warning;
208 /* If a warning about undefined overflow is deferred, this is the
209 level at which the warning should be emitted. */
211 static enum warn_strict_overflow_code fold_deferred_overflow_code;
213 /* Start deferring overflow warnings. We could use a stack here to
214 permit nested calls, but at present it is not necessary. */
216 void
217 fold_defer_overflow_warnings (void)
219 ++fold_deferring_overflow_warnings;
222 /* Stop deferring overflow warnings. If there is a pending warning,
223 and ISSUE is true, then issue the warning if appropriate. STMT is
224 the statement with which the warning should be associated (used for
225 location information); STMT may be NULL. CODE is the level of the
226 warning--a warn_strict_overflow_code value. This function will use
227 the smaller of CODE and the deferred code when deciding whether to
228 issue the warning. CODE may be zero to mean to always use the
229 deferred code. */
231 void
232 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
234 const char *warnmsg;
235 location_t locus;
237 gcc_assert (fold_deferring_overflow_warnings > 0);
238 --fold_deferring_overflow_warnings;
239 if (fold_deferring_overflow_warnings > 0)
241 if (fold_deferred_overflow_warning != NULL
242 && code != 0
243 && code < (int) fold_deferred_overflow_code)
244 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
245 return;
248 warnmsg = fold_deferred_overflow_warning;
249 fold_deferred_overflow_warning = NULL;
251 if (!issue || warnmsg == NULL)
252 return;
254 if (warning_suppressed_p (stmt, OPT_Wstrict_overflow))
255 return;
257 /* Use the smallest code level when deciding to issue the
258 warning. */
259 if (code == 0 || code > (int) fold_deferred_overflow_code)
260 code = fold_deferred_overflow_code;
262 if (!issue_strict_overflow_warning (code))
263 return;
265 if (stmt == NULL)
266 locus = input_location;
267 else
268 locus = gimple_location (stmt);
269 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
272 /* Stop deferring overflow warnings, ignoring any deferred
273 warnings. */
275 void
276 fold_undefer_and_ignore_overflow_warnings (void)
278 fold_undefer_overflow_warnings (false, NULL, 0);
281 /* Whether we are deferring overflow warnings. */
283 bool
284 fold_deferring_overflow_warnings_p (void)
286 return fold_deferring_overflow_warnings > 0;
289 /* This is called when we fold something based on the fact that signed
290 overflow is undefined. */
292 void
293 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
295 if (fold_deferring_overflow_warnings > 0)
297 if (fold_deferred_overflow_warning == NULL
298 || wc < fold_deferred_overflow_code)
300 fold_deferred_overflow_warning = gmsgid;
301 fold_deferred_overflow_code = wc;
304 else if (issue_strict_overflow_warning (wc))
305 warning (OPT_Wstrict_overflow, gmsgid);
308 /* Return true if the built-in mathematical function specified by CODE
309 is odd, i.e. -f(x) == f(-x). */
311 bool
312 negate_mathfn_p (combined_fn fn)
314 switch (fn)
316 CASE_CFN_ASIN:
317 CASE_CFN_ASINH:
318 CASE_CFN_ATAN:
319 CASE_CFN_ATANH:
320 CASE_CFN_CASIN:
321 CASE_CFN_CASINH:
322 CASE_CFN_CATAN:
323 CASE_CFN_CATANH:
324 CASE_CFN_CBRT:
325 CASE_CFN_CPROJ:
326 CASE_CFN_CSIN:
327 CASE_CFN_CSINH:
328 CASE_CFN_CTAN:
329 CASE_CFN_CTANH:
330 CASE_CFN_ERF:
331 CASE_CFN_LLROUND:
332 CASE_CFN_LROUND:
333 CASE_CFN_ROUND:
334 CASE_CFN_ROUNDEVEN:
335 CASE_CFN_ROUNDEVEN_FN:
336 CASE_CFN_SIN:
337 CASE_CFN_SINH:
338 CASE_CFN_TAN:
339 CASE_CFN_TANH:
340 CASE_CFN_TRUNC:
341 return true;
343 CASE_CFN_LLRINT:
344 CASE_CFN_LRINT:
345 CASE_CFN_NEARBYINT:
346 CASE_CFN_RINT:
347 return !flag_rounding_math;
349 default:
350 break;
352 return false;
355 /* Check whether we may negate an integer constant T without causing
356 overflow. */
358 bool
359 may_negate_without_overflow_p (const_tree t)
361 tree type;
363 gcc_assert (TREE_CODE (t) == INTEGER_CST);
365 type = TREE_TYPE (t);
366 if (TYPE_UNSIGNED (type))
367 return false;
369 return !wi::only_sign_bit_p (wi::to_wide (t));
372 /* Determine whether an expression T can be cheaply negated using
373 the function negate_expr without introducing undefined overflow. */
375 static bool
376 negate_expr_p (tree t)
378 tree type;
380 if (t == 0)
381 return false;
383 type = TREE_TYPE (t);
385 STRIP_SIGN_NOPS (t);
386 switch (TREE_CODE (t))
388 case INTEGER_CST:
389 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
390 return true;
392 /* Check that -CST will not overflow type. */
393 return may_negate_without_overflow_p (t);
394 case BIT_NOT_EXPR:
395 return (INTEGRAL_TYPE_P (type)
396 && TYPE_OVERFLOW_WRAPS (type));
398 case FIXED_CST:
399 return true;
401 case NEGATE_EXPR:
402 return !TYPE_OVERFLOW_SANITIZED (type);
404 case REAL_CST:
405 /* We want to canonicalize to positive real constants. Pretend
406 that only negative ones can be easily negated. */
407 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
409 case COMPLEX_CST:
410 return negate_expr_p (TREE_REALPART (t))
411 && negate_expr_p (TREE_IMAGPART (t));
413 case VECTOR_CST:
415 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
416 return true;
418 /* Steps don't prevent negation. */
419 unsigned int count = vector_cst_encoded_nelts (t);
420 for (unsigned int i = 0; i < count; ++i)
421 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
422 return false;
424 return true;
427 case COMPLEX_EXPR:
428 return negate_expr_p (TREE_OPERAND (t, 0))
429 && negate_expr_p (TREE_OPERAND (t, 1));
431 case CONJ_EXPR:
432 return negate_expr_p (TREE_OPERAND (t, 0));
434 case PLUS_EXPR:
435 if (HONOR_SIGN_DEPENDENT_ROUNDING (type)
436 || HONOR_SIGNED_ZEROS (type)
437 || (ANY_INTEGRAL_TYPE_P (type)
438 && ! TYPE_OVERFLOW_WRAPS (type)))
439 return false;
440 /* -(A + B) -> (-B) - A. */
441 if (negate_expr_p (TREE_OPERAND (t, 1)))
442 return true;
443 /* -(A + B) -> (-A) - B. */
444 return negate_expr_p (TREE_OPERAND (t, 0));
446 case MINUS_EXPR:
447 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
448 return !HONOR_SIGN_DEPENDENT_ROUNDING (type)
449 && !HONOR_SIGNED_ZEROS (type)
450 && (! ANY_INTEGRAL_TYPE_P (type)
451 || TYPE_OVERFLOW_WRAPS (type));
453 case MULT_EXPR:
454 if (TYPE_UNSIGNED (type))
455 break;
456 /* INT_MIN/n * n doesn't overflow while negating one operand it does
457 if n is a (negative) power of two. */
458 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
459 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
460 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
461 && (wi::popcount
462 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
463 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
464 && (wi::popcount
465 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
466 break;
468 /* Fall through. */
470 case RDIV_EXPR:
471 if (! HONOR_SIGN_DEPENDENT_ROUNDING (t))
472 return negate_expr_p (TREE_OPERAND (t, 1))
473 || negate_expr_p (TREE_OPERAND (t, 0));
474 break;
476 case TRUNC_DIV_EXPR:
477 case ROUND_DIV_EXPR:
478 case EXACT_DIV_EXPR:
479 if (TYPE_UNSIGNED (type))
480 break;
481 /* In general we can't negate A in A / B, because if A is INT_MIN and
482 B is not 1 we change the sign of the result. */
483 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
484 && negate_expr_p (TREE_OPERAND (t, 0)))
485 return true;
486 /* In general we can't negate B in A / B, because if A is INT_MIN and
487 B is 1, we may turn this into INT_MIN / -1 which is undefined
488 and actually traps on some architectures. */
489 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
490 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
491 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
492 && ! integer_onep (TREE_OPERAND (t, 1))))
493 return negate_expr_p (TREE_OPERAND (t, 1));
494 break;
496 case NOP_EXPR:
497 /* Negate -((double)float) as (double)(-float). */
498 if (TREE_CODE (type) == REAL_TYPE)
500 tree tem = strip_float_extensions (t);
501 if (tem != t)
502 return negate_expr_p (tem);
504 break;
506 case CALL_EXPR:
507 /* Negate -f(x) as f(-x). */
508 if (negate_mathfn_p (get_call_combined_fn (t)))
509 return negate_expr_p (CALL_EXPR_ARG (t, 0));
510 break;
512 case RSHIFT_EXPR:
513 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
514 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
516 tree op1 = TREE_OPERAND (t, 1);
517 if (wi::to_wide (op1) == element_precision (type) - 1)
518 return true;
520 break;
522 default:
523 break;
525 return false;
528 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
529 simplification is possible.
530 If negate_expr_p would return true for T, NULL_TREE will never be
531 returned. */
533 static tree
534 fold_negate_expr_1 (location_t loc, tree t)
536 tree type = TREE_TYPE (t);
537 tree tem;
539 switch (TREE_CODE (t))
541 /* Convert - (~A) to A + 1. */
542 case BIT_NOT_EXPR:
543 if (INTEGRAL_TYPE_P (type))
544 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
545 build_one_cst (type));
546 break;
548 case INTEGER_CST:
549 tem = fold_negate_const (t, type);
550 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
551 || (ANY_INTEGRAL_TYPE_P (type)
552 && !TYPE_OVERFLOW_TRAPS (type)
553 && TYPE_OVERFLOW_WRAPS (type))
554 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
555 return tem;
556 break;
558 case POLY_INT_CST:
559 case REAL_CST:
560 case FIXED_CST:
561 tem = fold_negate_const (t, type);
562 return tem;
564 case COMPLEX_CST:
566 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
567 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
568 if (rpart && ipart)
569 return build_complex (type, rpart, ipart);
571 break;
573 case VECTOR_CST:
575 tree_vector_builder elts;
576 elts.new_unary_operation (type, t, true);
577 unsigned int count = elts.encoded_nelts ();
578 for (unsigned int i = 0; i < count; ++i)
580 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
581 if (elt == NULL_TREE)
582 return NULL_TREE;
583 elts.quick_push (elt);
586 return elts.build ();
589 case COMPLEX_EXPR:
590 if (negate_expr_p (t))
591 return fold_build2_loc (loc, COMPLEX_EXPR, type,
592 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
593 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
594 break;
596 case CONJ_EXPR:
597 if (negate_expr_p (t))
598 return fold_build1_loc (loc, CONJ_EXPR, type,
599 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
600 break;
602 case NEGATE_EXPR:
603 if (!TYPE_OVERFLOW_SANITIZED (type))
604 return TREE_OPERAND (t, 0);
605 break;
607 case PLUS_EXPR:
608 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
609 && !HONOR_SIGNED_ZEROS (type))
611 /* -(A + B) -> (-B) - A. */
612 if (negate_expr_p (TREE_OPERAND (t, 1)))
614 tem = negate_expr (TREE_OPERAND (t, 1));
615 return fold_build2_loc (loc, MINUS_EXPR, type,
616 tem, TREE_OPERAND (t, 0));
619 /* -(A + B) -> (-A) - B. */
620 if (negate_expr_p (TREE_OPERAND (t, 0)))
622 tem = negate_expr (TREE_OPERAND (t, 0));
623 return fold_build2_loc (loc, MINUS_EXPR, type,
624 tem, TREE_OPERAND (t, 1));
627 break;
629 case MINUS_EXPR:
630 /* - (A - B) -> B - A */
631 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
632 && !HONOR_SIGNED_ZEROS (type))
633 return fold_build2_loc (loc, MINUS_EXPR, type,
634 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
635 break;
637 case MULT_EXPR:
638 if (TYPE_UNSIGNED (type))
639 break;
641 /* Fall through. */
643 case RDIV_EXPR:
644 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
646 tem = TREE_OPERAND (t, 1);
647 if (negate_expr_p (tem))
648 return fold_build2_loc (loc, TREE_CODE (t), type,
649 TREE_OPERAND (t, 0), negate_expr (tem));
650 tem = TREE_OPERAND (t, 0);
651 if (negate_expr_p (tem))
652 return fold_build2_loc (loc, TREE_CODE (t), type,
653 negate_expr (tem), TREE_OPERAND (t, 1));
655 break;
657 case TRUNC_DIV_EXPR:
658 case ROUND_DIV_EXPR:
659 case EXACT_DIV_EXPR:
660 if (TYPE_UNSIGNED (type))
661 break;
662 /* In general we can't negate A in A / B, because if A is INT_MIN and
663 B is not 1 we change the sign of the result. */
664 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
665 && negate_expr_p (TREE_OPERAND (t, 0)))
666 return fold_build2_loc (loc, TREE_CODE (t), type,
667 negate_expr (TREE_OPERAND (t, 0)),
668 TREE_OPERAND (t, 1));
669 /* In general we can't negate B in A / B, because if A is INT_MIN and
670 B is 1, we may turn this into INT_MIN / -1 which is undefined
671 and actually traps on some architectures. */
672 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
673 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
674 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
675 && ! integer_onep (TREE_OPERAND (t, 1))))
676 && negate_expr_p (TREE_OPERAND (t, 1)))
677 return fold_build2_loc (loc, TREE_CODE (t), type,
678 TREE_OPERAND (t, 0),
679 negate_expr (TREE_OPERAND (t, 1)));
680 break;
682 case NOP_EXPR:
683 /* Convert -((double)float) into (double)(-float). */
684 if (TREE_CODE (type) == REAL_TYPE)
686 tem = strip_float_extensions (t);
687 if (tem != t && negate_expr_p (tem))
688 return fold_convert_loc (loc, type, negate_expr (tem));
690 break;
692 case CALL_EXPR:
693 /* Negate -f(x) as f(-x). */
694 if (negate_mathfn_p (get_call_combined_fn (t))
695 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
697 tree fndecl, arg;
699 fndecl = get_callee_fndecl (t);
700 arg = negate_expr (CALL_EXPR_ARG (t, 0));
701 return build_call_expr_loc (loc, fndecl, 1, arg);
703 break;
705 case RSHIFT_EXPR:
706 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
707 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
709 tree op1 = TREE_OPERAND (t, 1);
710 if (wi::to_wide (op1) == element_precision (type) - 1)
712 tree ntype = TYPE_UNSIGNED (type)
713 ? signed_type_for (type)
714 : unsigned_type_for (type);
715 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
716 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
717 return fold_convert_loc (loc, type, temp);
720 break;
722 default:
723 break;
726 return NULL_TREE;
729 /* A wrapper for fold_negate_expr_1. */
731 static tree
732 fold_negate_expr (location_t loc, tree t)
734 tree type = TREE_TYPE (t);
735 STRIP_SIGN_NOPS (t);
736 tree tem = fold_negate_expr_1 (loc, t);
737 if (tem == NULL_TREE)
738 return NULL_TREE;
739 return fold_convert_loc (loc, type, tem);
742 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
743 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
744 return NULL_TREE. */
746 static tree
747 negate_expr (tree t)
749 tree type, tem;
750 location_t loc;
752 if (t == NULL_TREE)
753 return NULL_TREE;
755 loc = EXPR_LOCATION (t);
756 type = TREE_TYPE (t);
757 STRIP_SIGN_NOPS (t);
759 tem = fold_negate_expr (loc, t);
760 if (!tem)
761 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
762 return fold_convert_loc (loc, type, tem);
765 /* Split a tree IN into a constant, literal and variable parts that could be
766 combined with CODE to make IN. "constant" means an expression with
767 TREE_CONSTANT but that isn't an actual constant. CODE must be a
768 commutative arithmetic operation. Store the constant part into *CONP,
769 the literal in *LITP and return the variable part. If a part isn't
770 present, set it to null. If the tree does not decompose in this way,
771 return the entire tree as the variable part and the other parts as null.
773 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
774 case, we negate an operand that was subtracted. Except if it is a
775 literal for which we use *MINUS_LITP instead.
777 If NEGATE_P is true, we are negating all of IN, again except a literal
778 for which we use *MINUS_LITP instead. If a variable part is of pointer
779 type, it is negated after converting to TYPE. This prevents us from
780 generating illegal MINUS pointer expression. LOC is the location of
781 the converted variable part.
783 If IN is itself a literal or constant, return it as appropriate.
785 Note that we do not guarantee that any of the three values will be the
786 same type as IN, but they will have the same signedness and mode. */
788 static tree
789 split_tree (tree in, tree type, enum tree_code code,
790 tree *minus_varp, tree *conp, tree *minus_conp,
791 tree *litp, tree *minus_litp, int negate_p)
793 tree var = 0;
794 *minus_varp = 0;
795 *conp = 0;
796 *minus_conp = 0;
797 *litp = 0;
798 *minus_litp = 0;
800 /* Strip any conversions that don't change the machine mode or signedness. */
801 STRIP_SIGN_NOPS (in);
803 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
804 || TREE_CODE (in) == FIXED_CST)
805 *litp = in;
806 else if (TREE_CODE (in) == code
807 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
808 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
809 /* We can associate addition and subtraction together (even
810 though the C standard doesn't say so) for integers because
811 the value is not affected. For reals, the value might be
812 affected, so we can't. */
813 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
814 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
815 || (code == MINUS_EXPR
816 && (TREE_CODE (in) == PLUS_EXPR
817 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
819 tree op0 = TREE_OPERAND (in, 0);
820 tree op1 = TREE_OPERAND (in, 1);
821 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
822 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
824 /* First see if either of the operands is a literal, then a constant. */
825 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
826 || TREE_CODE (op0) == FIXED_CST)
827 *litp = op0, op0 = 0;
828 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
829 || TREE_CODE (op1) == FIXED_CST)
830 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
832 if (op0 != 0 && TREE_CONSTANT (op0))
833 *conp = op0, op0 = 0;
834 else if (op1 != 0 && TREE_CONSTANT (op1))
835 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
837 /* If we haven't dealt with either operand, this is not a case we can
838 decompose. Otherwise, VAR is either of the ones remaining, if any. */
839 if (op0 != 0 && op1 != 0)
840 var = in;
841 else if (op0 != 0)
842 var = op0;
843 else
844 var = op1, neg_var_p = neg1_p;
846 /* Now do any needed negations. */
847 if (neg_litp_p)
848 *minus_litp = *litp, *litp = 0;
849 if (neg_conp_p && *conp)
850 *minus_conp = *conp, *conp = 0;
851 if (neg_var_p && var)
852 *minus_varp = var, var = 0;
854 else if (TREE_CONSTANT (in))
855 *conp = in;
856 else if (TREE_CODE (in) == BIT_NOT_EXPR
857 && code == PLUS_EXPR)
859 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
860 when IN is constant. */
861 *litp = build_minus_one_cst (type);
862 *minus_varp = TREE_OPERAND (in, 0);
864 else
865 var = in;
867 if (negate_p)
869 if (*litp)
870 *minus_litp = *litp, *litp = 0;
871 else if (*minus_litp)
872 *litp = *minus_litp, *minus_litp = 0;
873 if (*conp)
874 *minus_conp = *conp, *conp = 0;
875 else if (*minus_conp)
876 *conp = *minus_conp, *minus_conp = 0;
877 if (var)
878 *minus_varp = var, var = 0;
879 else if (*minus_varp)
880 var = *minus_varp, *minus_varp = 0;
883 if (*litp
884 && TREE_OVERFLOW_P (*litp))
885 *litp = drop_tree_overflow (*litp);
886 if (*minus_litp
887 && TREE_OVERFLOW_P (*minus_litp))
888 *minus_litp = drop_tree_overflow (*minus_litp);
890 return var;
893 /* Re-associate trees split by the above function. T1 and T2 are
894 either expressions to associate or null. Return the new
895 expression, if any. LOC is the location of the new expression. If
896 we build an operation, do it in TYPE and with CODE. */
898 static tree
899 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
901 if (t1 == 0)
903 gcc_assert (t2 == 0 || code != MINUS_EXPR);
904 return t2;
906 else if (t2 == 0)
907 return t1;
909 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
910 try to fold this since we will have infinite recursion. But do
911 deal with any NEGATE_EXPRs. */
912 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
913 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
914 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
916 if (code == PLUS_EXPR)
918 if (TREE_CODE (t1) == NEGATE_EXPR)
919 return build2_loc (loc, MINUS_EXPR, type,
920 fold_convert_loc (loc, type, t2),
921 fold_convert_loc (loc, type,
922 TREE_OPERAND (t1, 0)));
923 else if (TREE_CODE (t2) == NEGATE_EXPR)
924 return build2_loc (loc, MINUS_EXPR, type,
925 fold_convert_loc (loc, type, t1),
926 fold_convert_loc (loc, type,
927 TREE_OPERAND (t2, 0)));
928 else if (integer_zerop (t2))
929 return fold_convert_loc (loc, type, t1);
931 else if (code == MINUS_EXPR)
933 if (integer_zerop (t2))
934 return fold_convert_loc (loc, type, t1);
937 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
938 fold_convert_loc (loc, type, t2));
941 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
942 fold_convert_loc (loc, type, t2));
945 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
946 for use in int_const_binop, size_binop and size_diffop. */
948 static bool
949 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
951 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
952 return false;
953 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
954 return false;
956 switch (code)
958 case LSHIFT_EXPR:
959 case RSHIFT_EXPR:
960 case LROTATE_EXPR:
961 case RROTATE_EXPR:
962 return true;
964 default:
965 break;
968 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
969 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
970 && TYPE_MODE (type1) == TYPE_MODE (type2);
973 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
974 a new constant in RES. Return FALSE if we don't know how to
975 evaluate CODE at compile-time. */
977 bool
978 wide_int_binop (wide_int &res,
979 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
980 signop sign, wi::overflow_type *overflow)
982 wide_int tmp;
983 *overflow = wi::OVF_NONE;
984 switch (code)
986 case BIT_IOR_EXPR:
987 res = wi::bit_or (arg1, arg2);
988 break;
990 case BIT_XOR_EXPR:
991 res = wi::bit_xor (arg1, arg2);
992 break;
994 case BIT_AND_EXPR:
995 res = wi::bit_and (arg1, arg2);
996 break;
998 case LSHIFT_EXPR:
999 if (wi::neg_p (arg2))
1000 return false;
1001 res = wi::lshift (arg1, arg2);
1002 break;
1004 case RSHIFT_EXPR:
1005 if (wi::neg_p (arg2))
1006 return false;
1007 /* It's unclear from the C standard whether shifts can overflow.
1008 The following code ignores overflow; perhaps a C standard
1009 interpretation ruling is needed. */
1010 res = wi::rshift (arg1, arg2, sign);
1011 break;
1013 case RROTATE_EXPR:
1014 case LROTATE_EXPR:
1015 if (wi::neg_p (arg2))
1017 tmp = -arg2;
1018 if (code == RROTATE_EXPR)
1019 code = LROTATE_EXPR;
1020 else
1021 code = RROTATE_EXPR;
1023 else
1024 tmp = arg2;
1026 if (code == RROTATE_EXPR)
1027 res = wi::rrotate (arg1, tmp);
1028 else
1029 res = wi::lrotate (arg1, tmp);
1030 break;
1032 case PLUS_EXPR:
1033 res = wi::add (arg1, arg2, sign, overflow);
1034 break;
1036 case MINUS_EXPR:
1037 res = wi::sub (arg1, arg2, sign, overflow);
1038 break;
1040 case MULT_EXPR:
1041 res = wi::mul (arg1, arg2, sign, overflow);
1042 break;
1044 case MULT_HIGHPART_EXPR:
1045 res = wi::mul_high (arg1, arg2, sign);
1046 break;
1048 case TRUNC_DIV_EXPR:
1049 case EXACT_DIV_EXPR:
1050 if (arg2 == 0)
1051 return false;
1052 res = wi::div_trunc (arg1, arg2, sign, overflow);
1053 break;
1055 case FLOOR_DIV_EXPR:
1056 if (arg2 == 0)
1057 return false;
1058 res = wi::div_floor (arg1, arg2, sign, overflow);
1059 break;
1061 case CEIL_DIV_EXPR:
1062 if (arg2 == 0)
1063 return false;
1064 res = wi::div_ceil (arg1, arg2, sign, overflow);
1065 break;
1067 case ROUND_DIV_EXPR:
1068 if (arg2 == 0)
1069 return false;
1070 res = wi::div_round (arg1, arg2, sign, overflow);
1071 break;
1073 case TRUNC_MOD_EXPR:
1074 if (arg2 == 0)
1075 return false;
1076 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1077 break;
1079 case FLOOR_MOD_EXPR:
1080 if (arg2 == 0)
1081 return false;
1082 res = wi::mod_floor (arg1, arg2, sign, overflow);
1083 break;
1085 case CEIL_MOD_EXPR:
1086 if (arg2 == 0)
1087 return false;
1088 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1089 break;
1091 case ROUND_MOD_EXPR:
1092 if (arg2 == 0)
1093 return false;
1094 res = wi::mod_round (arg1, arg2, sign, overflow);
1095 break;
1097 case MIN_EXPR:
1098 res = wi::min (arg1, arg2, sign);
1099 break;
1101 case MAX_EXPR:
1102 res = wi::max (arg1, arg2, sign);
1103 break;
1105 default:
1106 return false;
1108 return true;
1111 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1112 produce a new constant in RES. Return FALSE if we don't know how
1113 to evaluate CODE at compile-time. */
1115 static bool
1116 poly_int_binop (poly_wide_int &res, enum tree_code code,
1117 const_tree arg1, const_tree arg2,
1118 signop sign, wi::overflow_type *overflow)
1120 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1121 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1122 switch (code)
1124 case PLUS_EXPR:
1125 res = wi::add (wi::to_poly_wide (arg1),
1126 wi::to_poly_wide (arg2), sign, overflow);
1127 break;
1129 case MINUS_EXPR:
1130 res = wi::sub (wi::to_poly_wide (arg1),
1131 wi::to_poly_wide (arg2), sign, overflow);
1132 break;
1134 case MULT_EXPR:
1135 if (TREE_CODE (arg2) == INTEGER_CST)
1136 res = wi::mul (wi::to_poly_wide (arg1),
1137 wi::to_wide (arg2), sign, overflow);
1138 else if (TREE_CODE (arg1) == INTEGER_CST)
1139 res = wi::mul (wi::to_poly_wide (arg2),
1140 wi::to_wide (arg1), sign, overflow);
1141 else
1142 return NULL_TREE;
1143 break;
1145 case LSHIFT_EXPR:
1146 if (TREE_CODE (arg2) == INTEGER_CST)
1147 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1148 else
1149 return false;
1150 break;
1152 case BIT_IOR_EXPR:
1153 if (TREE_CODE (arg2) != INTEGER_CST
1154 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1155 &res))
1156 return false;
1157 break;
1159 default:
1160 return false;
1162 return true;
1165 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1166 produce a new constant. Return NULL_TREE if we don't know how to
1167 evaluate CODE at compile-time. */
1169 tree
1170 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1171 int overflowable)
1173 poly_wide_int poly_res;
1174 tree type = TREE_TYPE (arg1);
1175 signop sign = TYPE_SIGN (type);
1176 wi::overflow_type overflow = wi::OVF_NONE;
1178 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1180 wide_int warg1 = wi::to_wide (arg1), res;
1181 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1182 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1183 return NULL_TREE;
1184 poly_res = res;
1186 else if (!poly_int_tree_p (arg1)
1187 || !poly_int_tree_p (arg2)
1188 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1189 return NULL_TREE;
1190 return force_fit_type (type, poly_res, overflowable,
1191 (((sign == SIGNED || overflowable == -1)
1192 && overflow)
1193 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1196 /* Return true if binary operation OP distributes over addition in operand
1197 OPNO, with the other operand being held constant. OPNO counts from 1. */
1199 static bool
1200 distributes_over_addition_p (tree_code op, int opno)
1202 switch (op)
1204 case PLUS_EXPR:
1205 case MINUS_EXPR:
1206 case MULT_EXPR:
1207 return true;
1209 case LSHIFT_EXPR:
1210 return opno == 1;
1212 default:
1213 return false;
1217 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1218 constant. We assume ARG1 and ARG2 have the same data type, or at least
1219 are the same kind of constant and the same machine mode. Return zero if
1220 combining the constants is not allowed in the current operating mode. */
1222 static tree
1223 const_binop (enum tree_code code, tree arg1, tree arg2)
1225 /* Sanity check for the recursive cases. */
1226 if (!arg1 || !arg2)
1227 return NULL_TREE;
1229 STRIP_NOPS (arg1);
1230 STRIP_NOPS (arg2);
1232 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1234 if (code == POINTER_PLUS_EXPR)
1235 return int_const_binop (PLUS_EXPR,
1236 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1238 return int_const_binop (code, arg1, arg2);
1241 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1243 machine_mode mode;
1244 REAL_VALUE_TYPE d1;
1245 REAL_VALUE_TYPE d2;
1246 REAL_VALUE_TYPE value;
1247 REAL_VALUE_TYPE result;
1248 bool inexact;
1249 tree t, type;
1251 /* The following codes are handled by real_arithmetic. */
1252 switch (code)
1254 case PLUS_EXPR:
1255 case MINUS_EXPR:
1256 case MULT_EXPR:
1257 case RDIV_EXPR:
1258 case MIN_EXPR:
1259 case MAX_EXPR:
1260 break;
1262 default:
1263 return NULL_TREE;
1266 d1 = TREE_REAL_CST (arg1);
1267 d2 = TREE_REAL_CST (arg2);
1269 type = TREE_TYPE (arg1);
1270 mode = TYPE_MODE (type);
1272 /* Don't perform operation if we honor signaling NaNs and
1273 either operand is a signaling NaN. */
1274 if (HONOR_SNANS (mode)
1275 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1276 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1277 return NULL_TREE;
1279 /* Don't perform operation if it would raise a division
1280 by zero exception. */
1281 if (code == RDIV_EXPR
1282 && real_equal (&d2, &dconst0)
1283 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1284 return NULL_TREE;
1286 /* If either operand is a NaN, just return it. Otherwise, set up
1287 for floating-point trap; we return an overflow. */
1288 if (REAL_VALUE_ISNAN (d1))
1290 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1291 is off. */
1292 d1.signalling = 0;
1293 t = build_real (type, d1);
1294 return t;
1296 else if (REAL_VALUE_ISNAN (d2))
1298 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1299 is off. */
1300 d2.signalling = 0;
1301 t = build_real (type, d2);
1302 return t;
1305 inexact = real_arithmetic (&value, code, &d1, &d2);
1306 real_convert (&result, mode, &value);
1308 /* Don't constant fold this floating point operation if
1309 the result has overflowed and flag_trapping_math. */
1310 if (flag_trapping_math
1311 && MODE_HAS_INFINITIES (mode)
1312 && REAL_VALUE_ISINF (result)
1313 && !REAL_VALUE_ISINF (d1)
1314 && !REAL_VALUE_ISINF (d2))
1315 return NULL_TREE;
1317 /* Don't constant fold this floating point operation if the
1318 result may dependent upon the run-time rounding mode and
1319 flag_rounding_math is set, or if GCC's software emulation
1320 is unable to accurately represent the result. */
1321 if ((flag_rounding_math
1322 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1323 && (inexact || !real_identical (&result, &value)))
1324 return NULL_TREE;
1326 t = build_real (type, result);
1328 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1329 return t;
1332 if (TREE_CODE (arg1) == FIXED_CST)
1334 FIXED_VALUE_TYPE f1;
1335 FIXED_VALUE_TYPE f2;
1336 FIXED_VALUE_TYPE result;
1337 tree t, type;
1338 int sat_p;
1339 bool overflow_p;
1341 /* The following codes are handled by fixed_arithmetic. */
1342 switch (code)
1344 case PLUS_EXPR:
1345 case MINUS_EXPR:
1346 case MULT_EXPR:
1347 case TRUNC_DIV_EXPR:
1348 if (TREE_CODE (arg2) != FIXED_CST)
1349 return NULL_TREE;
1350 f2 = TREE_FIXED_CST (arg2);
1351 break;
1353 case LSHIFT_EXPR:
1354 case RSHIFT_EXPR:
1356 if (TREE_CODE (arg2) != INTEGER_CST)
1357 return NULL_TREE;
1358 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1359 f2.data.high = w2.elt (1);
1360 f2.data.low = w2.ulow ();
1361 f2.mode = SImode;
1363 break;
1365 default:
1366 return NULL_TREE;
1369 f1 = TREE_FIXED_CST (arg1);
1370 type = TREE_TYPE (arg1);
1371 sat_p = TYPE_SATURATING (type);
1372 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1373 t = build_fixed (type, result);
1374 /* Propagate overflow flags. */
1375 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1376 TREE_OVERFLOW (t) = 1;
1377 return t;
1380 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1382 tree type = TREE_TYPE (arg1);
1383 tree r1 = TREE_REALPART (arg1);
1384 tree i1 = TREE_IMAGPART (arg1);
1385 tree r2 = TREE_REALPART (arg2);
1386 tree i2 = TREE_IMAGPART (arg2);
1387 tree real, imag;
1389 switch (code)
1391 case PLUS_EXPR:
1392 case MINUS_EXPR:
1393 real = const_binop (code, r1, r2);
1394 imag = const_binop (code, i1, i2);
1395 break;
1397 case MULT_EXPR:
1398 if (COMPLEX_FLOAT_TYPE_P (type))
1399 return do_mpc_arg2 (arg1, arg2, type,
1400 /* do_nonfinite= */ folding_initializer,
1401 mpc_mul);
1403 real = const_binop (MINUS_EXPR,
1404 const_binop (MULT_EXPR, r1, r2),
1405 const_binop (MULT_EXPR, i1, i2));
1406 imag = const_binop (PLUS_EXPR,
1407 const_binop (MULT_EXPR, r1, i2),
1408 const_binop (MULT_EXPR, i1, r2));
1409 break;
1411 case RDIV_EXPR:
1412 if (COMPLEX_FLOAT_TYPE_P (type))
1413 return do_mpc_arg2 (arg1, arg2, type,
1414 /* do_nonfinite= */ folding_initializer,
1415 mpc_div);
1416 /* Fallthru. */
1417 case TRUNC_DIV_EXPR:
1418 case CEIL_DIV_EXPR:
1419 case FLOOR_DIV_EXPR:
1420 case ROUND_DIV_EXPR:
1421 if (flag_complex_method == 0)
1423 /* Keep this algorithm in sync with
1424 tree-complex.c:expand_complex_div_straight().
1426 Expand complex division to scalars, straightforward algorithm.
1427 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1428 t = br*br + bi*bi
1430 tree magsquared
1431 = const_binop (PLUS_EXPR,
1432 const_binop (MULT_EXPR, r2, r2),
1433 const_binop (MULT_EXPR, i2, i2));
1434 tree t1
1435 = const_binop (PLUS_EXPR,
1436 const_binop (MULT_EXPR, r1, r2),
1437 const_binop (MULT_EXPR, i1, i2));
1438 tree t2
1439 = const_binop (MINUS_EXPR,
1440 const_binop (MULT_EXPR, i1, r2),
1441 const_binop (MULT_EXPR, r1, i2));
1443 real = const_binop (code, t1, magsquared);
1444 imag = const_binop (code, t2, magsquared);
1446 else
1448 /* Keep this algorithm in sync with
1449 tree-complex.c:expand_complex_div_wide().
1451 Expand complex division to scalars, modified algorithm to minimize
1452 overflow with wide input ranges. */
1453 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1454 fold_abs_const (r2, TREE_TYPE (type)),
1455 fold_abs_const (i2, TREE_TYPE (type)));
1457 if (integer_nonzerop (compare))
1459 /* In the TRUE branch, we compute
1460 ratio = br/bi;
1461 div = (br * ratio) + bi;
1462 tr = (ar * ratio) + ai;
1463 ti = (ai * ratio) - ar;
1464 tr = tr / div;
1465 ti = ti / div; */
1466 tree ratio = const_binop (code, r2, i2);
1467 tree div = const_binop (PLUS_EXPR, i2,
1468 const_binop (MULT_EXPR, r2, ratio));
1469 real = const_binop (MULT_EXPR, r1, ratio);
1470 real = const_binop (PLUS_EXPR, real, i1);
1471 real = const_binop (code, real, div);
1473 imag = const_binop (MULT_EXPR, i1, ratio);
1474 imag = const_binop (MINUS_EXPR, imag, r1);
1475 imag = const_binop (code, imag, div);
1477 else
1479 /* In the FALSE branch, we compute
1480 ratio = d/c;
1481 divisor = (d * ratio) + c;
1482 tr = (b * ratio) + a;
1483 ti = b - (a * ratio);
1484 tr = tr / div;
1485 ti = ti / div; */
1486 tree ratio = const_binop (code, i2, r2);
1487 tree div = const_binop (PLUS_EXPR, r2,
1488 const_binop (MULT_EXPR, i2, ratio));
1490 real = const_binop (MULT_EXPR, i1, ratio);
1491 real = const_binop (PLUS_EXPR, real, r1);
1492 real = const_binop (code, real, div);
1494 imag = const_binop (MULT_EXPR, r1, ratio);
1495 imag = const_binop (MINUS_EXPR, i1, imag);
1496 imag = const_binop (code, imag, div);
1499 break;
1501 default:
1502 return NULL_TREE;
1505 if (real && imag)
1506 return build_complex (type, real, imag);
1509 if (TREE_CODE (arg1) == VECTOR_CST
1510 && TREE_CODE (arg2) == VECTOR_CST
1511 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1512 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1514 tree type = TREE_TYPE (arg1);
1515 bool step_ok_p;
1516 if (VECTOR_CST_STEPPED_P (arg1)
1517 && VECTOR_CST_STEPPED_P (arg2))
1518 /* We can operate directly on the encoding if:
1520 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1521 implies
1522 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1524 Addition and subtraction are the supported operators
1525 for which this is true. */
1526 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1527 else if (VECTOR_CST_STEPPED_P (arg1))
1528 /* We can operate directly on stepped encodings if:
1530 a3 - a2 == a2 - a1
1531 implies:
1532 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1534 which is true if (x -> x op c) distributes over addition. */
1535 step_ok_p = distributes_over_addition_p (code, 1);
1536 else
1537 /* Similarly in reverse. */
1538 step_ok_p = distributes_over_addition_p (code, 2);
1539 tree_vector_builder elts;
1540 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1541 return NULL_TREE;
1542 unsigned int count = elts.encoded_nelts ();
1543 for (unsigned int i = 0; i < count; ++i)
1545 tree elem1 = VECTOR_CST_ELT (arg1, i);
1546 tree elem2 = VECTOR_CST_ELT (arg2, i);
1548 tree elt = const_binop (code, elem1, elem2);
1550 /* It is possible that const_binop cannot handle the given
1551 code and return NULL_TREE */
1552 if (elt == NULL_TREE)
1553 return NULL_TREE;
1554 elts.quick_push (elt);
1557 return elts.build ();
1560 /* Shifts allow a scalar offset for a vector. */
1561 if (TREE_CODE (arg1) == VECTOR_CST
1562 && TREE_CODE (arg2) == INTEGER_CST)
1564 tree type = TREE_TYPE (arg1);
1565 bool step_ok_p = distributes_over_addition_p (code, 1);
1566 tree_vector_builder elts;
1567 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1568 return NULL_TREE;
1569 unsigned int count = elts.encoded_nelts ();
1570 for (unsigned int i = 0; i < count; ++i)
1572 tree elem1 = VECTOR_CST_ELT (arg1, i);
1574 tree elt = const_binop (code, elem1, arg2);
1576 /* It is possible that const_binop cannot handle the given
1577 code and return NULL_TREE. */
1578 if (elt == NULL_TREE)
1579 return NULL_TREE;
1580 elts.quick_push (elt);
1583 return elts.build ();
1585 return NULL_TREE;
1588 /* Overload that adds a TYPE parameter to be able to dispatch
1589 to fold_relational_const. */
1591 tree
1592 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1594 if (TREE_CODE_CLASS (code) == tcc_comparison)
1595 return fold_relational_const (code, type, arg1, arg2);
1597 /* ??? Until we make the const_binop worker take the type of the
1598 result as argument put those cases that need it here. */
1599 switch (code)
1601 case VEC_SERIES_EXPR:
1602 if (CONSTANT_CLASS_P (arg1)
1603 && CONSTANT_CLASS_P (arg2))
1604 return build_vec_series (type, arg1, arg2);
1605 return NULL_TREE;
1607 case COMPLEX_EXPR:
1608 if ((TREE_CODE (arg1) == REAL_CST
1609 && TREE_CODE (arg2) == REAL_CST)
1610 || (TREE_CODE (arg1) == INTEGER_CST
1611 && TREE_CODE (arg2) == INTEGER_CST))
1612 return build_complex (type, arg1, arg2);
1613 return NULL_TREE;
1615 case POINTER_DIFF_EXPR:
1616 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1618 poly_offset_int res = (wi::to_poly_offset (arg1)
1619 - wi::to_poly_offset (arg2));
1620 return force_fit_type (type, res, 1,
1621 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1623 return NULL_TREE;
1625 case VEC_PACK_TRUNC_EXPR:
1626 case VEC_PACK_FIX_TRUNC_EXPR:
1627 case VEC_PACK_FLOAT_EXPR:
1629 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1631 if (TREE_CODE (arg1) != VECTOR_CST
1632 || TREE_CODE (arg2) != VECTOR_CST)
1633 return NULL_TREE;
1635 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1636 return NULL_TREE;
1638 out_nelts = in_nelts * 2;
1639 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1640 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1642 tree_vector_builder elts (type, out_nelts, 1);
1643 for (i = 0; i < out_nelts; i++)
1645 tree elt = (i < in_nelts
1646 ? VECTOR_CST_ELT (arg1, i)
1647 : VECTOR_CST_ELT (arg2, i - in_nelts));
1648 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1649 ? NOP_EXPR
1650 : code == VEC_PACK_FLOAT_EXPR
1651 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1652 TREE_TYPE (type), elt);
1653 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1654 return NULL_TREE;
1655 elts.quick_push (elt);
1658 return elts.build ();
1661 case VEC_WIDEN_MULT_LO_EXPR:
1662 case VEC_WIDEN_MULT_HI_EXPR:
1663 case VEC_WIDEN_MULT_EVEN_EXPR:
1664 case VEC_WIDEN_MULT_ODD_EXPR:
1666 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1668 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1669 return NULL_TREE;
1671 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1672 return NULL_TREE;
1673 out_nelts = in_nelts / 2;
1674 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1675 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1677 if (code == VEC_WIDEN_MULT_LO_EXPR)
1678 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1679 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1680 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1681 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1682 scale = 1, ofs = 0;
1683 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1684 scale = 1, ofs = 1;
1686 tree_vector_builder elts (type, out_nelts, 1);
1687 for (out = 0; out < out_nelts; out++)
1689 unsigned int in = (out << scale) + ofs;
1690 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1691 VECTOR_CST_ELT (arg1, in));
1692 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1693 VECTOR_CST_ELT (arg2, in));
1695 if (t1 == NULL_TREE || t2 == NULL_TREE)
1696 return NULL_TREE;
1697 tree elt = const_binop (MULT_EXPR, t1, t2);
1698 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1699 return NULL_TREE;
1700 elts.quick_push (elt);
1703 return elts.build ();
1706 default:;
1709 if (TREE_CODE_CLASS (code) != tcc_binary)
1710 return NULL_TREE;
1712 /* Make sure type and arg0 have the same saturating flag. */
1713 gcc_checking_assert (TYPE_SATURATING (type)
1714 == TYPE_SATURATING (TREE_TYPE (arg1)));
1716 return const_binop (code, arg1, arg2);
1719 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1720 Return zero if computing the constants is not possible. */
1722 tree
1723 const_unop (enum tree_code code, tree type, tree arg0)
1725 /* Don't perform the operation, other than NEGATE and ABS, if
1726 flag_signaling_nans is on and the operand is a signaling NaN. */
1727 if (TREE_CODE (arg0) == REAL_CST
1728 && HONOR_SNANS (arg0)
1729 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1730 && code != NEGATE_EXPR
1731 && code != ABS_EXPR
1732 && code != ABSU_EXPR)
1733 return NULL_TREE;
1735 switch (code)
1737 CASE_CONVERT:
1738 case FLOAT_EXPR:
1739 case FIX_TRUNC_EXPR:
1740 case FIXED_CONVERT_EXPR:
1741 return fold_convert_const (code, type, arg0);
1743 case ADDR_SPACE_CONVERT_EXPR:
1744 /* If the source address is 0, and the source address space
1745 cannot have a valid object at 0, fold to dest type null. */
1746 if (integer_zerop (arg0)
1747 && !(targetm.addr_space.zero_address_valid
1748 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1749 return fold_convert_const (code, type, arg0);
1750 break;
1752 case VIEW_CONVERT_EXPR:
1753 return fold_view_convert_expr (type, arg0);
1755 case NEGATE_EXPR:
1757 /* Can't call fold_negate_const directly here as that doesn't
1758 handle all cases and we might not be able to negate some
1759 constants. */
1760 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1761 if (tem && CONSTANT_CLASS_P (tem))
1762 return tem;
1763 break;
1766 case ABS_EXPR:
1767 case ABSU_EXPR:
1768 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1769 return fold_abs_const (arg0, type);
1770 break;
1772 case CONJ_EXPR:
1773 if (TREE_CODE (arg0) == COMPLEX_CST)
1775 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1776 TREE_TYPE (type));
1777 return build_complex (type, TREE_REALPART (arg0), ipart);
1779 break;
1781 case BIT_NOT_EXPR:
1782 if (TREE_CODE (arg0) == INTEGER_CST)
1783 return fold_not_const (arg0, type);
1784 else if (POLY_INT_CST_P (arg0))
1785 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1786 /* Perform BIT_NOT_EXPR on each element individually. */
1787 else if (TREE_CODE (arg0) == VECTOR_CST)
1789 tree elem;
1791 /* This can cope with stepped encodings because ~x == -1 - x. */
1792 tree_vector_builder elements;
1793 elements.new_unary_operation (type, arg0, true);
1794 unsigned int i, count = elements.encoded_nelts ();
1795 for (i = 0; i < count; ++i)
1797 elem = VECTOR_CST_ELT (arg0, i);
1798 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1799 if (elem == NULL_TREE)
1800 break;
1801 elements.quick_push (elem);
1803 if (i == count)
1804 return elements.build ();
1806 break;
1808 case TRUTH_NOT_EXPR:
1809 if (TREE_CODE (arg0) == INTEGER_CST)
1810 return constant_boolean_node (integer_zerop (arg0), type);
1811 break;
1813 case REALPART_EXPR:
1814 if (TREE_CODE (arg0) == COMPLEX_CST)
1815 return fold_convert (type, TREE_REALPART (arg0));
1816 break;
1818 case IMAGPART_EXPR:
1819 if (TREE_CODE (arg0) == COMPLEX_CST)
1820 return fold_convert (type, TREE_IMAGPART (arg0));
1821 break;
1823 case VEC_UNPACK_LO_EXPR:
1824 case VEC_UNPACK_HI_EXPR:
1825 case VEC_UNPACK_FLOAT_LO_EXPR:
1826 case VEC_UNPACK_FLOAT_HI_EXPR:
1827 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1828 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1830 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1831 enum tree_code subcode;
1833 if (TREE_CODE (arg0) != VECTOR_CST)
1834 return NULL_TREE;
1836 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1837 return NULL_TREE;
1838 out_nelts = in_nelts / 2;
1839 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1841 unsigned int offset = 0;
1842 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1843 || code == VEC_UNPACK_FLOAT_LO_EXPR
1844 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1845 offset = out_nelts;
1847 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1848 subcode = NOP_EXPR;
1849 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1850 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1851 subcode = FLOAT_EXPR;
1852 else
1853 subcode = FIX_TRUNC_EXPR;
1855 tree_vector_builder elts (type, out_nelts, 1);
1856 for (i = 0; i < out_nelts; i++)
1858 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1859 VECTOR_CST_ELT (arg0, i + offset));
1860 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1861 return NULL_TREE;
1862 elts.quick_push (elt);
1865 return elts.build ();
1868 case VEC_DUPLICATE_EXPR:
1869 if (CONSTANT_CLASS_P (arg0))
1870 return build_vector_from_val (type, arg0);
1871 return NULL_TREE;
1873 default:
1874 break;
1877 return NULL_TREE;
1880 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1881 indicates which particular sizetype to create. */
1883 tree
1884 size_int_kind (poly_int64 number, enum size_type_kind kind)
1886 return build_int_cst (sizetype_tab[(int) kind], number);
1889 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1890 is a tree code. The type of the result is taken from the operands.
1891 Both must be equivalent integer types, ala int_binop_types_match_p.
1892 If the operands are constant, so is the result. */
1894 tree
1895 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1897 tree type = TREE_TYPE (arg0);
1899 if (arg0 == error_mark_node || arg1 == error_mark_node)
1900 return error_mark_node;
1902 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1903 TREE_TYPE (arg1)));
1905 /* Handle the special case of two poly_int constants faster. */
1906 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1908 /* And some specific cases even faster than that. */
1909 if (code == PLUS_EXPR)
1911 if (integer_zerop (arg0)
1912 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1913 return arg1;
1914 if (integer_zerop (arg1)
1915 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1916 return arg0;
1918 else if (code == MINUS_EXPR)
1920 if (integer_zerop (arg1)
1921 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1922 return arg0;
1924 else if (code == MULT_EXPR)
1926 if (integer_onep (arg0)
1927 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1928 return arg1;
1931 /* Handle general case of two integer constants. For sizetype
1932 constant calculations we always want to know about overflow,
1933 even in the unsigned case. */
1934 tree res = int_const_binop (code, arg0, arg1, -1);
1935 if (res != NULL_TREE)
1936 return res;
1939 return fold_build2_loc (loc, code, type, arg0, arg1);
1942 /* Given two values, either both of sizetype or both of bitsizetype,
1943 compute the difference between the two values. Return the value
1944 in signed type corresponding to the type of the operands. */
1946 tree
1947 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1949 tree type = TREE_TYPE (arg0);
1950 tree ctype;
1952 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1953 TREE_TYPE (arg1)));
1955 /* If the type is already signed, just do the simple thing. */
1956 if (!TYPE_UNSIGNED (type))
1957 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1959 if (type == sizetype)
1960 ctype = ssizetype;
1961 else if (type == bitsizetype)
1962 ctype = sbitsizetype;
1963 else
1964 ctype = signed_type_for (type);
1966 /* If either operand is not a constant, do the conversions to the signed
1967 type and subtract. The hardware will do the right thing with any
1968 overflow in the subtraction. */
1969 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1970 return size_binop_loc (loc, MINUS_EXPR,
1971 fold_convert_loc (loc, ctype, arg0),
1972 fold_convert_loc (loc, ctype, arg1));
1974 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1975 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1976 overflow) and negate (which can't either). Special-case a result
1977 of zero while we're here. */
1978 if (tree_int_cst_equal (arg0, arg1))
1979 return build_int_cst (ctype, 0);
1980 else if (tree_int_cst_lt (arg1, arg0))
1981 return fold_convert_loc (loc, ctype,
1982 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1983 else
1984 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1985 fold_convert_loc (loc, ctype,
1986 size_binop_loc (loc,
1987 MINUS_EXPR,
1988 arg1, arg0)));
1991 /* A subroutine of fold_convert_const handling conversions of an
1992 INTEGER_CST to another integer type. */
1994 static tree
1995 fold_convert_const_int_from_int (tree type, const_tree arg1)
1997 /* Given an integer constant, make new constant with new type,
1998 appropriately sign-extended or truncated. Use widest_int
1999 so that any extension is done according ARG1's type. */
2000 return force_fit_type (type, wi::to_widest (arg1),
2001 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2002 TREE_OVERFLOW (arg1));
2005 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2006 to an integer type. */
2008 static tree
2009 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2011 bool overflow = false;
2012 tree t;
2014 /* The following code implements the floating point to integer
2015 conversion rules required by the Java Language Specification,
2016 that IEEE NaNs are mapped to zero and values that overflow
2017 the target precision saturate, i.e. values greater than
2018 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2019 are mapped to INT_MIN. These semantics are allowed by the
2020 C and C++ standards that simply state that the behavior of
2021 FP-to-integer conversion is unspecified upon overflow. */
2023 wide_int val;
2024 REAL_VALUE_TYPE r;
2025 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2027 switch (code)
2029 case FIX_TRUNC_EXPR:
2030 real_trunc (&r, VOIDmode, &x);
2031 break;
2033 default:
2034 gcc_unreachable ();
2037 /* If R is NaN, return zero and show we have an overflow. */
2038 if (REAL_VALUE_ISNAN (r))
2040 overflow = true;
2041 val = wi::zero (TYPE_PRECISION (type));
2044 /* See if R is less than the lower bound or greater than the
2045 upper bound. */
2047 if (! overflow)
2049 tree lt = TYPE_MIN_VALUE (type);
2050 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2051 if (real_less (&r, &l))
2053 overflow = true;
2054 val = wi::to_wide (lt);
2058 if (! overflow)
2060 tree ut = TYPE_MAX_VALUE (type);
2061 if (ut)
2063 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2064 if (real_less (&u, &r))
2066 overflow = true;
2067 val = wi::to_wide (ut);
2072 if (! overflow)
2073 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2075 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2076 return t;
2079 /* A subroutine of fold_convert_const handling conversions of a
2080 FIXED_CST to an integer type. */
2082 static tree
2083 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2085 tree t;
2086 double_int temp, temp_trunc;
2087 scalar_mode mode;
2089 /* Right shift FIXED_CST to temp by fbit. */
2090 temp = TREE_FIXED_CST (arg1).data;
2091 mode = TREE_FIXED_CST (arg1).mode;
2092 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2094 temp = temp.rshift (GET_MODE_FBIT (mode),
2095 HOST_BITS_PER_DOUBLE_INT,
2096 SIGNED_FIXED_POINT_MODE_P (mode));
2098 /* Left shift temp to temp_trunc by fbit. */
2099 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2100 HOST_BITS_PER_DOUBLE_INT,
2101 SIGNED_FIXED_POINT_MODE_P (mode));
2103 else
2105 temp = double_int_zero;
2106 temp_trunc = double_int_zero;
2109 /* If FIXED_CST is negative, we need to round the value toward 0.
2110 By checking if the fractional bits are not zero to add 1 to temp. */
2111 if (SIGNED_FIXED_POINT_MODE_P (mode)
2112 && temp_trunc.is_negative ()
2113 && TREE_FIXED_CST (arg1).data != temp_trunc)
2114 temp += double_int_one;
2116 /* Given a fixed-point constant, make new constant with new type,
2117 appropriately sign-extended or truncated. */
2118 t = force_fit_type (type, temp, -1,
2119 (temp.is_negative ()
2120 && (TYPE_UNSIGNED (type)
2121 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2122 | TREE_OVERFLOW (arg1));
2124 return t;
2127 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2128 to another floating point type. */
2130 static tree
2131 fold_convert_const_real_from_real (tree type, const_tree arg1)
2133 REAL_VALUE_TYPE value;
2134 tree t;
2136 /* Don't perform the operation if flag_signaling_nans is on
2137 and the operand is a signaling NaN. */
2138 if (HONOR_SNANS (arg1)
2139 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2140 return NULL_TREE;
2142 /* With flag_rounding_math we should respect the current rounding mode
2143 unless the conversion is exact. */
2144 if (HONOR_SIGN_DEPENDENT_ROUNDING (arg1)
2145 && !exact_real_truncate (TYPE_MODE (type), &TREE_REAL_CST (arg1)))
2146 return NULL_TREE;
2148 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2149 t = build_real (type, value);
2151 /* If converting an infinity or NAN to a representation that doesn't
2152 have one, set the overflow bit so that we can produce some kind of
2153 error message at the appropriate point if necessary. It's not the
2154 most user-friendly message, but it's better than nothing. */
2155 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2156 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2157 TREE_OVERFLOW (t) = 1;
2158 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2159 && !MODE_HAS_NANS (TYPE_MODE (type)))
2160 TREE_OVERFLOW (t) = 1;
2161 /* Regular overflow, conversion produced an infinity in a mode that
2162 can't represent them. */
2163 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2164 && REAL_VALUE_ISINF (value)
2165 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2166 TREE_OVERFLOW (t) = 1;
2167 else
2168 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2169 return t;
2172 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2173 to a floating point type. */
2175 static tree
2176 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2178 REAL_VALUE_TYPE value;
2179 tree t;
2181 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2182 &TREE_FIXED_CST (arg1));
2183 t = build_real (type, value);
2185 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2186 return t;
2189 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2190 to another fixed-point type. */
2192 static tree
2193 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2195 FIXED_VALUE_TYPE value;
2196 tree t;
2197 bool overflow_p;
2199 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2200 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2201 t = build_fixed (type, value);
2203 /* Propagate overflow flags. */
2204 if (overflow_p | TREE_OVERFLOW (arg1))
2205 TREE_OVERFLOW (t) = 1;
2206 return t;
2209 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2210 to a fixed-point type. */
2212 static tree
2213 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2215 FIXED_VALUE_TYPE value;
2216 tree t;
2217 bool overflow_p;
2218 double_int di;
2220 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2222 di.low = TREE_INT_CST_ELT (arg1, 0);
2223 if (TREE_INT_CST_NUNITS (arg1) == 1)
2224 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2225 else
2226 di.high = TREE_INT_CST_ELT (arg1, 1);
2228 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2229 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2230 TYPE_SATURATING (type));
2231 t = build_fixed (type, value);
2233 /* Propagate overflow flags. */
2234 if (overflow_p | TREE_OVERFLOW (arg1))
2235 TREE_OVERFLOW (t) = 1;
2236 return t;
2239 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2240 to a fixed-point type. */
2242 static tree
2243 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2245 FIXED_VALUE_TYPE value;
2246 tree t;
2247 bool overflow_p;
2249 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2250 &TREE_REAL_CST (arg1),
2251 TYPE_SATURATING (type));
2252 t = build_fixed (type, value);
2254 /* Propagate overflow flags. */
2255 if (overflow_p | TREE_OVERFLOW (arg1))
2256 TREE_OVERFLOW (t) = 1;
2257 return t;
2260 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2261 type TYPE. If no simplification can be done return NULL_TREE. */
2263 static tree
2264 fold_convert_const (enum tree_code code, tree type, tree arg1)
2266 tree arg_type = TREE_TYPE (arg1);
2267 if (arg_type == type)
2268 return arg1;
2270 /* We can't widen types, since the runtime value could overflow the
2271 original type before being extended to the new type. */
2272 if (POLY_INT_CST_P (arg1)
2273 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2274 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2275 return build_poly_int_cst (type,
2276 poly_wide_int::from (poly_int_cst_value (arg1),
2277 TYPE_PRECISION (type),
2278 TYPE_SIGN (arg_type)));
2280 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2281 || TREE_CODE (type) == OFFSET_TYPE)
2283 if (TREE_CODE (arg1) == INTEGER_CST)
2284 return fold_convert_const_int_from_int (type, arg1);
2285 else if (TREE_CODE (arg1) == REAL_CST)
2286 return fold_convert_const_int_from_real (code, type, arg1);
2287 else if (TREE_CODE (arg1) == FIXED_CST)
2288 return fold_convert_const_int_from_fixed (type, arg1);
2290 else if (TREE_CODE (type) == REAL_TYPE)
2292 if (TREE_CODE (arg1) == INTEGER_CST)
2294 tree res = build_real_from_int_cst (type, arg1);
2295 /* Avoid the folding if flag_rounding_math is on and the
2296 conversion is not exact. */
2297 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
2299 bool fail = false;
2300 wide_int w = real_to_integer (&TREE_REAL_CST (res), &fail,
2301 TYPE_PRECISION (TREE_TYPE (arg1)));
2302 if (fail || wi::ne_p (w, wi::to_wide (arg1)))
2303 return NULL_TREE;
2305 return res;
2307 else if (TREE_CODE (arg1) == REAL_CST)
2308 return fold_convert_const_real_from_real (type, arg1);
2309 else if (TREE_CODE (arg1) == FIXED_CST)
2310 return fold_convert_const_real_from_fixed (type, arg1);
2312 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2314 if (TREE_CODE (arg1) == FIXED_CST)
2315 return fold_convert_const_fixed_from_fixed (type, arg1);
2316 else if (TREE_CODE (arg1) == INTEGER_CST)
2317 return fold_convert_const_fixed_from_int (type, arg1);
2318 else if (TREE_CODE (arg1) == REAL_CST)
2319 return fold_convert_const_fixed_from_real (type, arg1);
2321 else if (TREE_CODE (type) == VECTOR_TYPE)
2323 if (TREE_CODE (arg1) == VECTOR_CST
2324 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2326 tree elttype = TREE_TYPE (type);
2327 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2328 /* We can't handle steps directly when extending, since the
2329 values need to wrap at the original precision first. */
2330 bool step_ok_p
2331 = (INTEGRAL_TYPE_P (elttype)
2332 && INTEGRAL_TYPE_P (arg1_elttype)
2333 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2334 tree_vector_builder v;
2335 if (!v.new_unary_operation (type, arg1, step_ok_p))
2336 return NULL_TREE;
2337 unsigned int len = v.encoded_nelts ();
2338 for (unsigned int i = 0; i < len; ++i)
2340 tree elt = VECTOR_CST_ELT (arg1, i);
2341 tree cvt = fold_convert_const (code, elttype, elt);
2342 if (cvt == NULL_TREE)
2343 return NULL_TREE;
2344 v.quick_push (cvt);
2346 return v.build ();
2349 return NULL_TREE;
2352 /* Construct a vector of zero elements of vector type TYPE. */
2354 static tree
2355 build_zero_vector (tree type)
2357 tree t;
2359 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2360 return build_vector_from_val (type, t);
2363 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2365 bool
2366 fold_convertible_p (const_tree type, const_tree arg)
2368 tree orig = TREE_TYPE (arg);
2370 if (type == orig)
2371 return true;
2373 if (TREE_CODE (arg) == ERROR_MARK
2374 || TREE_CODE (type) == ERROR_MARK
2375 || TREE_CODE (orig) == ERROR_MARK)
2376 return false;
2378 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2379 return true;
2381 switch (TREE_CODE (type))
2383 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2384 case POINTER_TYPE: case REFERENCE_TYPE:
2385 case OFFSET_TYPE:
2386 return (INTEGRAL_TYPE_P (orig)
2387 || (POINTER_TYPE_P (orig)
2388 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2389 || TREE_CODE (orig) == OFFSET_TYPE);
2391 case REAL_TYPE:
2392 case FIXED_POINT_TYPE:
2393 case VOID_TYPE:
2394 return TREE_CODE (type) == TREE_CODE (orig);
2396 case VECTOR_TYPE:
2397 return (VECTOR_TYPE_P (orig)
2398 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2399 TYPE_VECTOR_SUBPARTS (orig))
2400 && fold_convertible_p (TREE_TYPE (type), TREE_TYPE (orig)));
2402 default:
2403 return false;
2407 /* Convert expression ARG to type TYPE. Used by the middle-end for
2408 simple conversions in preference to calling the front-end's convert. */
2410 tree
2411 fold_convert_loc (location_t loc, tree type, tree arg)
2413 tree orig = TREE_TYPE (arg);
2414 tree tem;
2416 if (type == orig)
2417 return arg;
2419 if (TREE_CODE (arg) == ERROR_MARK
2420 || TREE_CODE (type) == ERROR_MARK
2421 || TREE_CODE (orig) == ERROR_MARK)
2422 return error_mark_node;
2424 switch (TREE_CODE (type))
2426 case POINTER_TYPE:
2427 case REFERENCE_TYPE:
2428 /* Handle conversions between pointers to different address spaces. */
2429 if (POINTER_TYPE_P (orig)
2430 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2431 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2432 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2433 /* fall through */
2435 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2436 case OFFSET_TYPE:
2437 if (TREE_CODE (arg) == INTEGER_CST)
2439 tem = fold_convert_const (NOP_EXPR, type, arg);
2440 if (tem != NULL_TREE)
2441 return tem;
2443 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2444 || TREE_CODE (orig) == OFFSET_TYPE)
2445 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2446 if (TREE_CODE (orig) == COMPLEX_TYPE)
2447 return fold_convert_loc (loc, type,
2448 fold_build1_loc (loc, REALPART_EXPR,
2449 TREE_TYPE (orig), arg));
2450 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2451 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2452 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2454 case REAL_TYPE:
2455 if (TREE_CODE (arg) == INTEGER_CST)
2457 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2458 if (tem != NULL_TREE)
2459 return tem;
2461 else if (TREE_CODE (arg) == REAL_CST)
2463 tem = fold_convert_const (NOP_EXPR, type, arg);
2464 if (tem != NULL_TREE)
2465 return tem;
2467 else if (TREE_CODE (arg) == FIXED_CST)
2469 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2470 if (tem != NULL_TREE)
2471 return tem;
2474 switch (TREE_CODE (orig))
2476 case INTEGER_TYPE:
2477 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2478 case POINTER_TYPE: case REFERENCE_TYPE:
2479 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2481 case REAL_TYPE:
2482 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2484 case FIXED_POINT_TYPE:
2485 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2487 case COMPLEX_TYPE:
2488 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2489 return fold_convert_loc (loc, type, tem);
2491 default:
2492 gcc_unreachable ();
2495 case FIXED_POINT_TYPE:
2496 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2497 || TREE_CODE (arg) == REAL_CST)
2499 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2500 if (tem != NULL_TREE)
2501 goto fold_convert_exit;
2504 switch (TREE_CODE (orig))
2506 case FIXED_POINT_TYPE:
2507 case INTEGER_TYPE:
2508 case ENUMERAL_TYPE:
2509 case BOOLEAN_TYPE:
2510 case REAL_TYPE:
2511 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2513 case COMPLEX_TYPE:
2514 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2515 return fold_convert_loc (loc, type, tem);
2517 default:
2518 gcc_unreachable ();
2521 case COMPLEX_TYPE:
2522 switch (TREE_CODE (orig))
2524 case INTEGER_TYPE:
2525 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2526 case POINTER_TYPE: case REFERENCE_TYPE:
2527 case REAL_TYPE:
2528 case FIXED_POINT_TYPE:
2529 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2530 fold_convert_loc (loc, TREE_TYPE (type), arg),
2531 fold_convert_loc (loc, TREE_TYPE (type),
2532 integer_zero_node));
2533 case COMPLEX_TYPE:
2535 tree rpart, ipart;
2537 if (TREE_CODE (arg) == COMPLEX_EXPR)
2539 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2540 TREE_OPERAND (arg, 0));
2541 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2542 TREE_OPERAND (arg, 1));
2543 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2546 arg = save_expr (arg);
2547 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2548 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2549 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2550 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2551 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2554 default:
2555 gcc_unreachable ();
2558 case VECTOR_TYPE:
2559 if (integer_zerop (arg))
2560 return build_zero_vector (type);
2561 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2562 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2563 || TREE_CODE (orig) == VECTOR_TYPE);
2564 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2566 case VOID_TYPE:
2567 tem = fold_ignored_result (arg);
2568 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2570 default:
2571 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2572 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2573 gcc_unreachable ();
2575 fold_convert_exit:
2576 protected_set_expr_location_unshare (tem, loc);
2577 return tem;
2580 /* Return false if expr can be assumed not to be an lvalue, true
2581 otherwise. */
2583 static bool
2584 maybe_lvalue_p (const_tree x)
2586 /* We only need to wrap lvalue tree codes. */
2587 switch (TREE_CODE (x))
2589 case VAR_DECL:
2590 case PARM_DECL:
2591 case RESULT_DECL:
2592 case LABEL_DECL:
2593 case FUNCTION_DECL:
2594 case SSA_NAME:
2596 case COMPONENT_REF:
2597 case MEM_REF:
2598 case INDIRECT_REF:
2599 case ARRAY_REF:
2600 case ARRAY_RANGE_REF:
2601 case BIT_FIELD_REF:
2602 case OBJ_TYPE_REF:
2604 case REALPART_EXPR:
2605 case IMAGPART_EXPR:
2606 case PREINCREMENT_EXPR:
2607 case PREDECREMENT_EXPR:
2608 case SAVE_EXPR:
2609 case TRY_CATCH_EXPR:
2610 case WITH_CLEANUP_EXPR:
2611 case COMPOUND_EXPR:
2612 case MODIFY_EXPR:
2613 case TARGET_EXPR:
2614 case COND_EXPR:
2615 case BIND_EXPR:
2616 case VIEW_CONVERT_EXPR:
2617 break;
2619 default:
2620 /* Assume the worst for front-end tree codes. */
2621 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2622 break;
2623 return false;
2626 return true;
2629 /* Return an expr equal to X but certainly not valid as an lvalue. */
2631 tree
2632 non_lvalue_loc (location_t loc, tree x)
2634 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2635 us. */
2636 if (in_gimple_form)
2637 return x;
2639 if (! maybe_lvalue_p (x))
2640 return x;
2641 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2644 /* Given a tree comparison code, return the code that is the logical inverse.
2645 It is generally not safe to do this for floating-point comparisons, except
2646 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2647 ERROR_MARK in this case. */
2649 enum tree_code
2650 invert_tree_comparison (enum tree_code code, bool honor_nans)
2652 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2653 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2654 return ERROR_MARK;
2656 switch (code)
2658 case EQ_EXPR:
2659 return NE_EXPR;
2660 case NE_EXPR:
2661 return EQ_EXPR;
2662 case GT_EXPR:
2663 return honor_nans ? UNLE_EXPR : LE_EXPR;
2664 case GE_EXPR:
2665 return honor_nans ? UNLT_EXPR : LT_EXPR;
2666 case LT_EXPR:
2667 return honor_nans ? UNGE_EXPR : GE_EXPR;
2668 case LE_EXPR:
2669 return honor_nans ? UNGT_EXPR : GT_EXPR;
2670 case LTGT_EXPR:
2671 return UNEQ_EXPR;
2672 case UNEQ_EXPR:
2673 return LTGT_EXPR;
2674 case UNGT_EXPR:
2675 return LE_EXPR;
2676 case UNGE_EXPR:
2677 return LT_EXPR;
2678 case UNLT_EXPR:
2679 return GE_EXPR;
2680 case UNLE_EXPR:
2681 return GT_EXPR;
2682 case ORDERED_EXPR:
2683 return UNORDERED_EXPR;
2684 case UNORDERED_EXPR:
2685 return ORDERED_EXPR;
2686 default:
2687 gcc_unreachable ();
2691 /* Similar, but return the comparison that results if the operands are
2692 swapped. This is safe for floating-point. */
2694 enum tree_code
2695 swap_tree_comparison (enum tree_code code)
2697 switch (code)
2699 case EQ_EXPR:
2700 case NE_EXPR:
2701 case ORDERED_EXPR:
2702 case UNORDERED_EXPR:
2703 case LTGT_EXPR:
2704 case UNEQ_EXPR:
2705 return code;
2706 case GT_EXPR:
2707 return LT_EXPR;
2708 case GE_EXPR:
2709 return LE_EXPR;
2710 case LT_EXPR:
2711 return GT_EXPR;
2712 case LE_EXPR:
2713 return GE_EXPR;
2714 case UNGT_EXPR:
2715 return UNLT_EXPR;
2716 case UNGE_EXPR:
2717 return UNLE_EXPR;
2718 case UNLT_EXPR:
2719 return UNGT_EXPR;
2720 case UNLE_EXPR:
2721 return UNGE_EXPR;
2722 default:
2723 gcc_unreachable ();
2728 /* Convert a comparison tree code from an enum tree_code representation
2729 into a compcode bit-based encoding. This function is the inverse of
2730 compcode_to_comparison. */
2732 static enum comparison_code
2733 comparison_to_compcode (enum tree_code code)
2735 switch (code)
2737 case LT_EXPR:
2738 return COMPCODE_LT;
2739 case EQ_EXPR:
2740 return COMPCODE_EQ;
2741 case LE_EXPR:
2742 return COMPCODE_LE;
2743 case GT_EXPR:
2744 return COMPCODE_GT;
2745 case NE_EXPR:
2746 return COMPCODE_NE;
2747 case GE_EXPR:
2748 return COMPCODE_GE;
2749 case ORDERED_EXPR:
2750 return COMPCODE_ORD;
2751 case UNORDERED_EXPR:
2752 return COMPCODE_UNORD;
2753 case UNLT_EXPR:
2754 return COMPCODE_UNLT;
2755 case UNEQ_EXPR:
2756 return COMPCODE_UNEQ;
2757 case UNLE_EXPR:
2758 return COMPCODE_UNLE;
2759 case UNGT_EXPR:
2760 return COMPCODE_UNGT;
2761 case LTGT_EXPR:
2762 return COMPCODE_LTGT;
2763 case UNGE_EXPR:
2764 return COMPCODE_UNGE;
2765 default:
2766 gcc_unreachable ();
2770 /* Convert a compcode bit-based encoding of a comparison operator back
2771 to GCC's enum tree_code representation. This function is the
2772 inverse of comparison_to_compcode. */
2774 static enum tree_code
2775 compcode_to_comparison (enum comparison_code code)
2777 switch (code)
2779 case COMPCODE_LT:
2780 return LT_EXPR;
2781 case COMPCODE_EQ:
2782 return EQ_EXPR;
2783 case COMPCODE_LE:
2784 return LE_EXPR;
2785 case COMPCODE_GT:
2786 return GT_EXPR;
2787 case COMPCODE_NE:
2788 return NE_EXPR;
2789 case COMPCODE_GE:
2790 return GE_EXPR;
2791 case COMPCODE_ORD:
2792 return ORDERED_EXPR;
2793 case COMPCODE_UNORD:
2794 return UNORDERED_EXPR;
2795 case COMPCODE_UNLT:
2796 return UNLT_EXPR;
2797 case COMPCODE_UNEQ:
2798 return UNEQ_EXPR;
2799 case COMPCODE_UNLE:
2800 return UNLE_EXPR;
2801 case COMPCODE_UNGT:
2802 return UNGT_EXPR;
2803 case COMPCODE_LTGT:
2804 return LTGT_EXPR;
2805 case COMPCODE_UNGE:
2806 return UNGE_EXPR;
2807 default:
2808 gcc_unreachable ();
2812 /* Return true if COND1 tests the opposite condition of COND2. */
2814 bool
2815 inverse_conditions_p (const_tree cond1, const_tree cond2)
2817 return (COMPARISON_CLASS_P (cond1)
2818 && COMPARISON_CLASS_P (cond2)
2819 && (invert_tree_comparison
2820 (TREE_CODE (cond1),
2821 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2822 && operand_equal_p (TREE_OPERAND (cond1, 0),
2823 TREE_OPERAND (cond2, 0), 0)
2824 && operand_equal_p (TREE_OPERAND (cond1, 1),
2825 TREE_OPERAND (cond2, 1), 0));
2828 /* Return a tree for the comparison which is the combination of
2829 doing the AND or OR (depending on CODE) of the two operations LCODE
2830 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2831 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2832 if this makes the transformation invalid. */
2834 tree
2835 combine_comparisons (location_t loc,
2836 enum tree_code code, enum tree_code lcode,
2837 enum tree_code rcode, tree truth_type,
2838 tree ll_arg, tree lr_arg)
2840 bool honor_nans = HONOR_NANS (ll_arg);
2841 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2842 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2843 int compcode;
2845 switch (code)
2847 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2848 compcode = lcompcode & rcompcode;
2849 break;
2851 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2852 compcode = lcompcode | rcompcode;
2853 break;
2855 default:
2856 return NULL_TREE;
2859 if (!honor_nans)
2861 /* Eliminate unordered comparisons, as well as LTGT and ORD
2862 which are not used unless the mode has NaNs. */
2863 compcode &= ~COMPCODE_UNORD;
2864 if (compcode == COMPCODE_LTGT)
2865 compcode = COMPCODE_NE;
2866 else if (compcode == COMPCODE_ORD)
2867 compcode = COMPCODE_TRUE;
2869 else if (flag_trapping_math)
2871 /* Check that the original operation and the optimized ones will trap
2872 under the same condition. */
2873 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2874 && (lcompcode != COMPCODE_EQ)
2875 && (lcompcode != COMPCODE_ORD);
2876 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2877 && (rcompcode != COMPCODE_EQ)
2878 && (rcompcode != COMPCODE_ORD);
2879 bool trap = (compcode & COMPCODE_UNORD) == 0
2880 && (compcode != COMPCODE_EQ)
2881 && (compcode != COMPCODE_ORD);
2883 /* In a short-circuited boolean expression the LHS might be
2884 such that the RHS, if evaluated, will never trap. For
2885 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2886 if neither x nor y is NaN. (This is a mixed blessing: for
2887 example, the expression above will never trap, hence
2888 optimizing it to x < y would be invalid). */
2889 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2890 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2891 rtrap = false;
2893 /* If the comparison was short-circuited, and only the RHS
2894 trapped, we may now generate a spurious trap. */
2895 if (rtrap && !ltrap
2896 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2897 return NULL_TREE;
2899 /* If we changed the conditions that cause a trap, we lose. */
2900 if ((ltrap || rtrap) != trap)
2901 return NULL_TREE;
2904 if (compcode == COMPCODE_TRUE)
2905 return constant_boolean_node (true, truth_type);
2906 else if (compcode == COMPCODE_FALSE)
2907 return constant_boolean_node (false, truth_type);
2908 else
2910 enum tree_code tcode;
2912 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2913 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2917 /* Return nonzero if two operands (typically of the same tree node)
2918 are necessarily equal. FLAGS modifies behavior as follows:
2920 If OEP_ONLY_CONST is set, only return nonzero for constants.
2921 This function tests whether the operands are indistinguishable;
2922 it does not test whether they are equal using C's == operation.
2923 The distinction is important for IEEE floating point, because
2924 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2925 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2927 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2928 even though it may hold multiple values during a function.
2929 This is because a GCC tree node guarantees that nothing else is
2930 executed between the evaluation of its "operands" (which may often
2931 be evaluated in arbitrary order). Hence if the operands themselves
2932 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2933 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2934 unset means assuming isochronic (or instantaneous) tree equivalence.
2935 Unless comparing arbitrary expression trees, such as from different
2936 statements, this flag can usually be left unset.
2938 If OEP_PURE_SAME is set, then pure functions with identical arguments
2939 are considered the same. It is used when the caller has other ways
2940 to ensure that global memory is unchanged in between.
2942 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2943 not values of expressions.
2945 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2946 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2948 If OEP_BITWISE is set, then require the values to be bitwise identical
2949 rather than simply numerically equal. Do not take advantage of things
2950 like math-related flags or undefined behavior; only return true for
2951 values that are provably bitwise identical in all circumstances.
2953 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2954 any operand with side effect. This is unnecesarily conservative in the
2955 case we know that arg0 and arg1 are in disjoint code paths (such as in
2956 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2957 addresses with TREE_CONSTANT flag set so we know that &var == &var
2958 even if var is volatile. */
2960 bool
2961 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
2962 unsigned int flags)
2964 bool r;
2965 if (verify_hash_value (arg0, arg1, flags, &r))
2966 return r;
2968 STRIP_ANY_LOCATION_WRAPPER (arg0);
2969 STRIP_ANY_LOCATION_WRAPPER (arg1);
2971 /* If either is ERROR_MARK, they aren't equal. */
2972 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2973 || TREE_TYPE (arg0) == error_mark_node
2974 || TREE_TYPE (arg1) == error_mark_node)
2975 return false;
2977 /* Similar, if either does not have a type (like a template id),
2978 they aren't equal. */
2979 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2980 return false;
2982 /* Bitwise identity makes no sense if the values have different layouts. */
2983 if ((flags & OEP_BITWISE)
2984 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
2985 return false;
2987 /* We cannot consider pointers to different address space equal. */
2988 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2989 && POINTER_TYPE_P (TREE_TYPE (arg1))
2990 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2991 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2992 return false;
2994 /* Check equality of integer constants before bailing out due to
2995 precision differences. */
2996 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2998 /* Address of INTEGER_CST is not defined; check that we did not forget
2999 to drop the OEP_ADDRESS_OF flags. */
3000 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3001 return tree_int_cst_equal (arg0, arg1);
3004 if (!(flags & OEP_ADDRESS_OF))
3006 /* If both types don't have the same signedness, then we can't consider
3007 them equal. We must check this before the STRIP_NOPS calls
3008 because they may change the signedness of the arguments. As pointers
3009 strictly don't have a signedness, require either two pointers or
3010 two non-pointers as well. */
3011 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3012 || POINTER_TYPE_P (TREE_TYPE (arg0))
3013 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3014 return false;
3016 /* If both types don't have the same precision, then it is not safe
3017 to strip NOPs. */
3018 if (element_precision (TREE_TYPE (arg0))
3019 != element_precision (TREE_TYPE (arg1)))
3020 return false;
3022 STRIP_NOPS (arg0);
3023 STRIP_NOPS (arg1);
3025 #if 0
3026 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3027 sanity check once the issue is solved. */
3028 else
3029 /* Addresses of conversions and SSA_NAMEs (and many other things)
3030 are not defined. Check that we did not forget to drop the
3031 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3032 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3033 && TREE_CODE (arg0) != SSA_NAME);
3034 #endif
3036 /* In case both args are comparisons but with different comparison
3037 code, try to swap the comparison operands of one arg to produce
3038 a match and compare that variant. */
3039 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3040 && COMPARISON_CLASS_P (arg0)
3041 && COMPARISON_CLASS_P (arg1))
3043 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3045 if (TREE_CODE (arg0) == swap_code)
3046 return operand_equal_p (TREE_OPERAND (arg0, 0),
3047 TREE_OPERAND (arg1, 1), flags)
3048 && operand_equal_p (TREE_OPERAND (arg0, 1),
3049 TREE_OPERAND (arg1, 0), flags);
3052 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3054 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3055 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3057 else if (flags & OEP_ADDRESS_OF)
3059 /* If we are interested in comparing addresses ignore
3060 MEM_REF wrappings of the base that can appear just for
3061 TBAA reasons. */
3062 if (TREE_CODE (arg0) == MEM_REF
3063 && DECL_P (arg1)
3064 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3065 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3066 && integer_zerop (TREE_OPERAND (arg0, 1)))
3067 return true;
3068 else if (TREE_CODE (arg1) == MEM_REF
3069 && DECL_P (arg0)
3070 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3071 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3072 && integer_zerop (TREE_OPERAND (arg1, 1)))
3073 return true;
3074 return false;
3076 else
3077 return false;
3080 /* When not checking adddresses, this is needed for conversions and for
3081 COMPONENT_REF. Might as well play it safe and always test this. */
3082 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3083 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3084 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3085 && !(flags & OEP_ADDRESS_OF)))
3086 return false;
3088 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3089 We don't care about side effects in that case because the SAVE_EXPR
3090 takes care of that for us. In all other cases, two expressions are
3091 equal if they have no side effects. If we have two identical
3092 expressions with side effects that should be treated the same due
3093 to the only side effects being identical SAVE_EXPR's, that will
3094 be detected in the recursive calls below.
3095 If we are taking an invariant address of two identical objects
3096 they are necessarily equal as well. */
3097 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3098 && (TREE_CODE (arg0) == SAVE_EXPR
3099 || (flags & OEP_MATCH_SIDE_EFFECTS)
3100 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3101 return true;
3103 /* Next handle constant cases, those for which we can return 1 even
3104 if ONLY_CONST is set. */
3105 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3106 switch (TREE_CODE (arg0))
3108 case INTEGER_CST:
3109 return tree_int_cst_equal (arg0, arg1);
3111 case FIXED_CST:
3112 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3113 TREE_FIXED_CST (arg1));
3115 case REAL_CST:
3116 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3117 return true;
3119 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3121 /* If we do not distinguish between signed and unsigned zero,
3122 consider them equal. */
3123 if (real_zerop (arg0) && real_zerop (arg1))
3124 return true;
3126 return false;
3128 case VECTOR_CST:
3130 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3131 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3132 return false;
3134 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3135 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3136 return false;
3138 unsigned int count = vector_cst_encoded_nelts (arg0);
3139 for (unsigned int i = 0; i < count; ++i)
3140 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3141 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3142 return false;
3143 return true;
3146 case COMPLEX_CST:
3147 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3148 flags)
3149 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3150 flags));
3152 case STRING_CST:
3153 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3154 && ! memcmp (TREE_STRING_POINTER (arg0),
3155 TREE_STRING_POINTER (arg1),
3156 TREE_STRING_LENGTH (arg0)));
3158 case ADDR_EXPR:
3159 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3160 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3161 flags | OEP_ADDRESS_OF
3162 | OEP_MATCH_SIDE_EFFECTS);
3163 case CONSTRUCTOR:
3164 /* In GIMPLE empty constructors are allowed in initializers of
3165 aggregates. */
3166 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3167 default:
3168 break;
3171 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3172 two instances of undefined behavior will give identical results. */
3173 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3174 return false;
3176 /* Define macros to test an operand from arg0 and arg1 for equality and a
3177 variant that allows null and views null as being different from any
3178 non-null value. In the latter case, if either is null, the both
3179 must be; otherwise, do the normal comparison. */
3180 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3181 TREE_OPERAND (arg1, N), flags)
3183 #define OP_SAME_WITH_NULL(N) \
3184 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3185 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3187 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3189 case tcc_unary:
3190 /* Two conversions are equal only if signedness and modes match. */
3191 switch (TREE_CODE (arg0))
3193 CASE_CONVERT:
3194 case FIX_TRUNC_EXPR:
3195 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3196 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3197 return false;
3198 break;
3199 default:
3200 break;
3203 return OP_SAME (0);
3206 case tcc_comparison:
3207 case tcc_binary:
3208 if (OP_SAME (0) && OP_SAME (1))
3209 return true;
3211 /* For commutative ops, allow the other order. */
3212 return (commutative_tree_code (TREE_CODE (arg0))
3213 && operand_equal_p (TREE_OPERAND (arg0, 0),
3214 TREE_OPERAND (arg1, 1), flags)
3215 && operand_equal_p (TREE_OPERAND (arg0, 1),
3216 TREE_OPERAND (arg1, 0), flags));
3218 case tcc_reference:
3219 /* If either of the pointer (or reference) expressions we are
3220 dereferencing contain a side effect, these cannot be equal,
3221 but their addresses can be. */
3222 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3223 && (TREE_SIDE_EFFECTS (arg0)
3224 || TREE_SIDE_EFFECTS (arg1)))
3225 return false;
3227 switch (TREE_CODE (arg0))
3229 case INDIRECT_REF:
3230 if (!(flags & OEP_ADDRESS_OF))
3232 if (TYPE_ALIGN (TREE_TYPE (arg0))
3233 != TYPE_ALIGN (TREE_TYPE (arg1)))
3234 return false;
3235 /* Verify that the access types are compatible. */
3236 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3237 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3238 return false;
3240 flags &= ~OEP_ADDRESS_OF;
3241 return OP_SAME (0);
3243 case IMAGPART_EXPR:
3244 /* Require the same offset. */
3245 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3246 TYPE_SIZE (TREE_TYPE (arg1)),
3247 flags & ~OEP_ADDRESS_OF))
3248 return false;
3250 /* Fallthru. */
3251 case REALPART_EXPR:
3252 case VIEW_CONVERT_EXPR:
3253 return OP_SAME (0);
3255 case TARGET_MEM_REF:
3256 case MEM_REF:
3257 if (!(flags & OEP_ADDRESS_OF))
3259 /* Require equal access sizes */
3260 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3261 && (!TYPE_SIZE (TREE_TYPE (arg0))
3262 || !TYPE_SIZE (TREE_TYPE (arg1))
3263 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3264 TYPE_SIZE (TREE_TYPE (arg1)),
3265 flags)))
3266 return false;
3267 /* Verify that access happens in similar types. */
3268 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3269 return false;
3270 /* Verify that accesses are TBAA compatible. */
3271 if (!alias_ptr_types_compatible_p
3272 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3273 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3274 || (MR_DEPENDENCE_CLIQUE (arg0)
3275 != MR_DEPENDENCE_CLIQUE (arg1))
3276 || (MR_DEPENDENCE_BASE (arg0)
3277 != MR_DEPENDENCE_BASE (arg1)))
3278 return false;
3279 /* Verify that alignment is compatible. */
3280 if (TYPE_ALIGN (TREE_TYPE (arg0))
3281 != TYPE_ALIGN (TREE_TYPE (arg1)))
3282 return false;
3284 flags &= ~OEP_ADDRESS_OF;
3285 return (OP_SAME (0) && OP_SAME (1)
3286 /* TARGET_MEM_REF require equal extra operands. */
3287 && (TREE_CODE (arg0) != TARGET_MEM_REF
3288 || (OP_SAME_WITH_NULL (2)
3289 && OP_SAME_WITH_NULL (3)
3290 && OP_SAME_WITH_NULL (4))));
3292 case ARRAY_REF:
3293 case ARRAY_RANGE_REF:
3294 if (!OP_SAME (0))
3295 return false;
3296 flags &= ~OEP_ADDRESS_OF;
3297 /* Compare the array index by value if it is constant first as we
3298 may have different types but same value here. */
3299 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3300 TREE_OPERAND (arg1, 1))
3301 || OP_SAME (1))
3302 && OP_SAME_WITH_NULL (2)
3303 && OP_SAME_WITH_NULL (3)
3304 /* Compare low bound and element size as with OEP_ADDRESS_OF
3305 we have to account for the offset of the ref. */
3306 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3307 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3308 || (operand_equal_p (array_ref_low_bound
3309 (CONST_CAST_TREE (arg0)),
3310 array_ref_low_bound
3311 (CONST_CAST_TREE (arg1)), flags)
3312 && operand_equal_p (array_ref_element_size
3313 (CONST_CAST_TREE (arg0)),
3314 array_ref_element_size
3315 (CONST_CAST_TREE (arg1)),
3316 flags))));
3318 case COMPONENT_REF:
3319 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3320 may be NULL when we're called to compare MEM_EXPRs. */
3321 if (!OP_SAME_WITH_NULL (0))
3322 return false;
3324 bool compare_address = flags & OEP_ADDRESS_OF;
3326 /* Most of time we only need to compare FIELD_DECLs for equality.
3327 However when determining address look into actual offsets.
3328 These may match for unions and unshared record types. */
3329 flags &= ~OEP_ADDRESS_OF;
3330 if (!OP_SAME (1))
3332 if (compare_address
3333 && (flags & OEP_ADDRESS_OF_SAME_FIELD) == 0)
3335 if (TREE_OPERAND (arg0, 2)
3336 || TREE_OPERAND (arg1, 2))
3337 return OP_SAME_WITH_NULL (2);
3338 tree field0 = TREE_OPERAND (arg0, 1);
3339 tree field1 = TREE_OPERAND (arg1, 1);
3341 if (!operand_equal_p (DECL_FIELD_OFFSET (field0),
3342 DECL_FIELD_OFFSET (field1), flags)
3343 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3344 DECL_FIELD_BIT_OFFSET (field1),
3345 flags))
3346 return false;
3348 else
3349 return false;
3352 return OP_SAME_WITH_NULL (2);
3354 case BIT_FIELD_REF:
3355 if (!OP_SAME (0))
3356 return false;
3357 flags &= ~OEP_ADDRESS_OF;
3358 return OP_SAME (1) && OP_SAME (2);
3360 default:
3361 return false;
3364 case tcc_expression:
3365 switch (TREE_CODE (arg0))
3367 case ADDR_EXPR:
3368 /* Be sure we pass right ADDRESS_OF flag. */
3369 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3370 return operand_equal_p (TREE_OPERAND (arg0, 0),
3371 TREE_OPERAND (arg1, 0),
3372 flags | OEP_ADDRESS_OF);
3374 case TRUTH_NOT_EXPR:
3375 return OP_SAME (0);
3377 case TRUTH_ANDIF_EXPR:
3378 case TRUTH_ORIF_EXPR:
3379 return OP_SAME (0) && OP_SAME (1);
3381 case WIDEN_MULT_PLUS_EXPR:
3382 case WIDEN_MULT_MINUS_EXPR:
3383 if (!OP_SAME (2))
3384 return false;
3385 /* The multiplcation operands are commutative. */
3386 /* FALLTHRU */
3388 case TRUTH_AND_EXPR:
3389 case TRUTH_OR_EXPR:
3390 case TRUTH_XOR_EXPR:
3391 if (OP_SAME (0) && OP_SAME (1))
3392 return true;
3394 /* Otherwise take into account this is a commutative operation. */
3395 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3396 TREE_OPERAND (arg1, 1), flags)
3397 && operand_equal_p (TREE_OPERAND (arg0, 1),
3398 TREE_OPERAND (arg1, 0), flags));
3400 case COND_EXPR:
3401 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3402 return false;
3403 flags &= ~OEP_ADDRESS_OF;
3404 return OP_SAME (0);
3406 case BIT_INSERT_EXPR:
3407 /* BIT_INSERT_EXPR has an implict operand as the type precision
3408 of op1. Need to check to make sure they are the same. */
3409 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3410 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3411 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3412 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3413 return false;
3414 /* FALLTHRU */
3416 case VEC_COND_EXPR:
3417 case DOT_PROD_EXPR:
3418 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3420 case MODIFY_EXPR:
3421 case INIT_EXPR:
3422 case COMPOUND_EXPR:
3423 case PREDECREMENT_EXPR:
3424 case PREINCREMENT_EXPR:
3425 case POSTDECREMENT_EXPR:
3426 case POSTINCREMENT_EXPR:
3427 if (flags & OEP_LEXICOGRAPHIC)
3428 return OP_SAME (0) && OP_SAME (1);
3429 return false;
3431 case CLEANUP_POINT_EXPR:
3432 case EXPR_STMT:
3433 case SAVE_EXPR:
3434 if (flags & OEP_LEXICOGRAPHIC)
3435 return OP_SAME (0);
3436 return false;
3438 case OBJ_TYPE_REF:
3439 /* Virtual table reference. */
3440 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3441 OBJ_TYPE_REF_EXPR (arg1), flags))
3442 return false;
3443 flags &= ~OEP_ADDRESS_OF;
3444 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3445 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3446 return false;
3447 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3448 OBJ_TYPE_REF_OBJECT (arg1), flags))
3449 return false;
3450 if (virtual_method_call_p (arg0))
3452 if (!virtual_method_call_p (arg1))
3453 return false;
3454 return types_same_for_odr (obj_type_ref_class (arg0),
3455 obj_type_ref_class (arg1));
3457 return false;
3459 default:
3460 return false;
3463 case tcc_vl_exp:
3464 switch (TREE_CODE (arg0))
3466 case CALL_EXPR:
3467 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3468 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3469 /* If not both CALL_EXPRs are either internal or normal function
3470 functions, then they are not equal. */
3471 return false;
3472 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3474 /* If the CALL_EXPRs call different internal functions, then they
3475 are not equal. */
3476 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3477 return false;
3479 else
3481 /* If the CALL_EXPRs call different functions, then they are not
3482 equal. */
3483 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3484 flags))
3485 return false;
3488 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3490 unsigned int cef = call_expr_flags (arg0);
3491 if (flags & OEP_PURE_SAME)
3492 cef &= ECF_CONST | ECF_PURE;
3493 else
3494 cef &= ECF_CONST;
3495 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3496 return false;
3499 /* Now see if all the arguments are the same. */
3501 const_call_expr_arg_iterator iter0, iter1;
3502 const_tree a0, a1;
3503 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3504 a1 = first_const_call_expr_arg (arg1, &iter1);
3505 a0 && a1;
3506 a0 = next_const_call_expr_arg (&iter0),
3507 a1 = next_const_call_expr_arg (&iter1))
3508 if (! operand_equal_p (a0, a1, flags))
3509 return false;
3511 /* If we get here and both argument lists are exhausted
3512 then the CALL_EXPRs are equal. */
3513 return ! (a0 || a1);
3515 default:
3516 return false;
3519 case tcc_declaration:
3520 /* Consider __builtin_sqrt equal to sqrt. */
3521 if (TREE_CODE (arg0) == FUNCTION_DECL)
3522 return (fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3523 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3524 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3525 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3527 if (DECL_P (arg0)
3528 && (flags & OEP_DECL_NAME)
3529 && (flags & OEP_LEXICOGRAPHIC))
3531 /* Consider decls with the same name equal. The caller needs
3532 to make sure they refer to the same entity (such as a function
3533 formal parameter). */
3534 tree a0name = DECL_NAME (arg0);
3535 tree a1name = DECL_NAME (arg1);
3536 const char *a0ns = a0name ? IDENTIFIER_POINTER (a0name) : NULL;
3537 const char *a1ns = a1name ? IDENTIFIER_POINTER (a1name) : NULL;
3538 return a0ns && a1ns && strcmp (a0ns, a1ns) == 0;
3540 return false;
3542 case tcc_exceptional:
3543 if (TREE_CODE (arg0) == CONSTRUCTOR)
3545 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3546 return false;
3548 /* In GIMPLE constructors are used only to build vectors from
3549 elements. Individual elements in the constructor must be
3550 indexed in increasing order and form an initial sequence.
3552 We make no effort to compare constructors in generic.
3553 (see sem_variable::equals in ipa-icf which can do so for
3554 constants). */
3555 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3556 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3557 return false;
3559 /* Be sure that vectors constructed have the same representation.
3560 We only tested element precision and modes to match.
3561 Vectors may be BLKmode and thus also check that the number of
3562 parts match. */
3563 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3564 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3565 return false;
3567 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3568 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3569 unsigned int len = vec_safe_length (v0);
3571 if (len != vec_safe_length (v1))
3572 return false;
3574 for (unsigned int i = 0; i < len; i++)
3576 constructor_elt *c0 = &(*v0)[i];
3577 constructor_elt *c1 = &(*v1)[i];
3579 if (!operand_equal_p (c0->value, c1->value, flags)
3580 /* In GIMPLE the indexes can be either NULL or matching i.
3581 Double check this so we won't get false
3582 positives for GENERIC. */
3583 || (c0->index
3584 && (TREE_CODE (c0->index) != INTEGER_CST
3585 || compare_tree_int (c0->index, i)))
3586 || (c1->index
3587 && (TREE_CODE (c1->index) != INTEGER_CST
3588 || compare_tree_int (c1->index, i))))
3589 return false;
3591 return true;
3593 else if (TREE_CODE (arg0) == STATEMENT_LIST
3594 && (flags & OEP_LEXICOGRAPHIC))
3596 /* Compare the STATEMENT_LISTs. */
3597 tree_stmt_iterator tsi1, tsi2;
3598 tree body1 = CONST_CAST_TREE (arg0);
3599 tree body2 = CONST_CAST_TREE (arg1);
3600 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3601 tsi_next (&tsi1), tsi_next (&tsi2))
3603 /* The lists don't have the same number of statements. */
3604 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3605 return false;
3606 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3607 return true;
3608 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3609 flags & (OEP_LEXICOGRAPHIC
3610 | OEP_NO_HASH_CHECK)))
3611 return false;
3614 return false;
3616 case tcc_statement:
3617 switch (TREE_CODE (arg0))
3619 case RETURN_EXPR:
3620 if (flags & OEP_LEXICOGRAPHIC)
3621 return OP_SAME_WITH_NULL (0);
3622 return false;
3623 case DEBUG_BEGIN_STMT:
3624 if (flags & OEP_LEXICOGRAPHIC)
3625 return true;
3626 return false;
3627 default:
3628 return false;
3631 default:
3632 return false;
3635 #undef OP_SAME
3636 #undef OP_SAME_WITH_NULL
3639 /* Generate a hash value for an expression. This can be used iteratively
3640 by passing a previous result as the HSTATE argument. */
3642 void
3643 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3644 unsigned int flags)
3646 int i;
3647 enum tree_code code;
3648 enum tree_code_class tclass;
3650 if (t == NULL_TREE || t == error_mark_node)
3652 hstate.merge_hash (0);
3653 return;
3656 STRIP_ANY_LOCATION_WRAPPER (t);
3658 if (!(flags & OEP_ADDRESS_OF))
3659 STRIP_NOPS (t);
3661 code = TREE_CODE (t);
3663 switch (code)
3665 /* Alas, constants aren't shared, so we can't rely on pointer
3666 identity. */
3667 case VOID_CST:
3668 hstate.merge_hash (0);
3669 return;
3670 case INTEGER_CST:
3671 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3672 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3673 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3674 return;
3675 case REAL_CST:
3677 unsigned int val2;
3678 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3679 val2 = rvc_zero;
3680 else
3681 val2 = real_hash (TREE_REAL_CST_PTR (t));
3682 hstate.merge_hash (val2);
3683 return;
3685 case FIXED_CST:
3687 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3688 hstate.merge_hash (val2);
3689 return;
3691 case STRING_CST:
3692 hstate.add ((const void *) TREE_STRING_POINTER (t),
3693 TREE_STRING_LENGTH (t));
3694 return;
3695 case COMPLEX_CST:
3696 hash_operand (TREE_REALPART (t), hstate, flags);
3697 hash_operand (TREE_IMAGPART (t), hstate, flags);
3698 return;
3699 case VECTOR_CST:
3701 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3702 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3703 unsigned int count = vector_cst_encoded_nelts (t);
3704 for (unsigned int i = 0; i < count; ++i)
3705 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3706 return;
3708 case SSA_NAME:
3709 /* We can just compare by pointer. */
3710 hstate.add_hwi (SSA_NAME_VERSION (t));
3711 return;
3712 case PLACEHOLDER_EXPR:
3713 /* The node itself doesn't matter. */
3714 return;
3715 case BLOCK:
3716 case OMP_CLAUSE:
3717 /* Ignore. */
3718 return;
3719 case TREE_LIST:
3720 /* A list of expressions, for a CALL_EXPR or as the elements of a
3721 VECTOR_CST. */
3722 for (; t; t = TREE_CHAIN (t))
3723 hash_operand (TREE_VALUE (t), hstate, flags);
3724 return;
3725 case CONSTRUCTOR:
3727 unsigned HOST_WIDE_INT idx;
3728 tree field, value;
3729 flags &= ~OEP_ADDRESS_OF;
3730 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3731 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3733 /* In GIMPLE the indexes can be either NULL or matching i. */
3734 if (field == NULL_TREE)
3735 field = bitsize_int (idx);
3736 hash_operand (field, hstate, flags);
3737 hash_operand (value, hstate, flags);
3739 return;
3741 case STATEMENT_LIST:
3743 tree_stmt_iterator i;
3744 for (i = tsi_start (CONST_CAST_TREE (t));
3745 !tsi_end_p (i); tsi_next (&i))
3746 hash_operand (tsi_stmt (i), hstate, flags);
3747 return;
3749 case TREE_VEC:
3750 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3751 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3752 return;
3753 case IDENTIFIER_NODE:
3754 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3755 return;
3756 case FUNCTION_DECL:
3757 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3758 Otherwise nodes that compare equal according to operand_equal_p might
3759 get different hash codes. However, don't do this for machine specific
3760 or front end builtins, since the function code is overloaded in those
3761 cases. */
3762 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3763 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3765 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3766 code = TREE_CODE (t);
3768 /* FALL THROUGH */
3769 default:
3770 if (POLY_INT_CST_P (t))
3772 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3773 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3774 return;
3776 tclass = TREE_CODE_CLASS (code);
3778 if (tclass == tcc_declaration)
3780 /* DECL's have a unique ID */
3781 hstate.add_hwi (DECL_UID (t));
3783 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3785 /* For comparisons that can be swapped, use the lower
3786 tree code. */
3787 enum tree_code ccode = swap_tree_comparison (code);
3788 if (code < ccode)
3789 ccode = code;
3790 hstate.add_object (ccode);
3791 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3792 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3794 else if (CONVERT_EXPR_CODE_P (code))
3796 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3797 operand_equal_p. */
3798 enum tree_code ccode = NOP_EXPR;
3799 hstate.add_object (ccode);
3801 /* Don't hash the type, that can lead to having nodes which
3802 compare equal according to operand_equal_p, but which
3803 have different hash codes. Make sure to include signedness
3804 in the hash computation. */
3805 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3806 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3808 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3809 else if (code == MEM_REF
3810 && (flags & OEP_ADDRESS_OF) != 0
3811 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3812 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3813 && integer_zerop (TREE_OPERAND (t, 1)))
3814 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3815 hstate, flags);
3816 /* Don't ICE on FE specific trees, or their arguments etc.
3817 during operand_equal_p hash verification. */
3818 else if (!IS_EXPR_CODE_CLASS (tclass))
3819 gcc_assert (flags & OEP_HASH_CHECK);
3820 else
3822 unsigned int sflags = flags;
3824 hstate.add_object (code);
3826 switch (code)
3828 case ADDR_EXPR:
3829 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3830 flags |= OEP_ADDRESS_OF;
3831 sflags = flags;
3832 break;
3834 case INDIRECT_REF:
3835 case MEM_REF:
3836 case TARGET_MEM_REF:
3837 flags &= ~OEP_ADDRESS_OF;
3838 sflags = flags;
3839 break;
3841 case COMPONENT_REF:
3842 if (sflags & OEP_ADDRESS_OF)
3844 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3845 if (TREE_OPERAND (t, 2))
3846 hash_operand (TREE_OPERAND (t, 2), hstate,
3847 flags & ~OEP_ADDRESS_OF);
3848 else
3850 tree field = TREE_OPERAND (t, 1);
3851 hash_operand (DECL_FIELD_OFFSET (field),
3852 hstate, flags & ~OEP_ADDRESS_OF);
3853 hash_operand (DECL_FIELD_BIT_OFFSET (field),
3854 hstate, flags & ~OEP_ADDRESS_OF);
3856 return;
3858 break;
3859 case ARRAY_REF:
3860 case ARRAY_RANGE_REF:
3861 case BIT_FIELD_REF:
3862 sflags &= ~OEP_ADDRESS_OF;
3863 break;
3865 case COND_EXPR:
3866 flags &= ~OEP_ADDRESS_OF;
3867 break;
3869 case WIDEN_MULT_PLUS_EXPR:
3870 case WIDEN_MULT_MINUS_EXPR:
3872 /* The multiplication operands are commutative. */
3873 inchash::hash one, two;
3874 hash_operand (TREE_OPERAND (t, 0), one, flags);
3875 hash_operand (TREE_OPERAND (t, 1), two, flags);
3876 hstate.add_commutative (one, two);
3877 hash_operand (TREE_OPERAND (t, 2), two, flags);
3878 return;
3881 case CALL_EXPR:
3882 if (CALL_EXPR_FN (t) == NULL_TREE)
3883 hstate.add_int (CALL_EXPR_IFN (t));
3884 break;
3886 case TARGET_EXPR:
3887 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3888 Usually different TARGET_EXPRs just should use
3889 different temporaries in their slots. */
3890 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
3891 return;
3893 case OBJ_TYPE_REF:
3894 /* Virtual table reference. */
3895 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
3896 flags &= ~OEP_ADDRESS_OF;
3897 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
3898 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
3899 if (!virtual_method_call_p (t))
3900 return;
3901 if (tree c = obj_type_ref_class (t))
3903 c = TYPE_NAME (TYPE_MAIN_VARIANT (c));
3904 /* We compute mangled names only when free_lang_data is run.
3905 In that case we can hash precisely. */
3906 if (TREE_CODE (c) == TYPE_DECL
3907 && DECL_ASSEMBLER_NAME_SET_P (c))
3908 hstate.add_object
3909 (IDENTIFIER_HASH_VALUE
3910 (DECL_ASSEMBLER_NAME (c)));
3912 return;
3913 default:
3914 break;
3917 /* Don't hash the type, that can lead to having nodes which
3918 compare equal according to operand_equal_p, but which
3919 have different hash codes. */
3920 if (code == NON_LVALUE_EXPR)
3922 /* Make sure to include signness in the hash computation. */
3923 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3924 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3927 else if (commutative_tree_code (code))
3929 /* It's a commutative expression. We want to hash it the same
3930 however it appears. We do this by first hashing both operands
3931 and then rehashing based on the order of their independent
3932 hashes. */
3933 inchash::hash one, two;
3934 hash_operand (TREE_OPERAND (t, 0), one, flags);
3935 hash_operand (TREE_OPERAND (t, 1), two, flags);
3936 hstate.add_commutative (one, two);
3938 else
3939 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3940 hash_operand (TREE_OPERAND (t, i), hstate,
3941 i == 0 ? flags : sflags);
3943 return;
3947 bool
3948 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
3949 unsigned int flags, bool *ret)
3951 /* When checking and unless comparing DECL names, verify that if
3952 the outermost operand_equal_p call returns non-zero then ARG0
3953 and ARG1 have the same hash value. */
3954 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
3956 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
3958 if (arg0 != arg1 && !(flags & OEP_DECL_NAME))
3960 inchash::hash hstate0 (0), hstate1 (0);
3961 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
3962 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
3963 hashval_t h0 = hstate0.end ();
3964 hashval_t h1 = hstate1.end ();
3965 gcc_assert (h0 == h1);
3967 *ret = true;
3969 else
3970 *ret = false;
3972 return true;
3975 return false;
3979 static operand_compare default_compare_instance;
3981 /* Conveinece wrapper around operand_compare class because usually we do
3982 not need to play with the valueizer. */
3984 bool
3985 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3987 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
3990 namespace inchash
3993 /* Generate a hash value for an expression. This can be used iteratively
3994 by passing a previous result as the HSTATE argument.
3996 This function is intended to produce the same hash for expressions which
3997 would compare equal using operand_equal_p. */
3998 void
3999 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
4001 default_compare_instance.hash_operand (t, hstate, flags);
4006 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
4007 with a different signedness or a narrower precision. */
4009 static bool
4010 operand_equal_for_comparison_p (tree arg0, tree arg1)
4012 if (operand_equal_p (arg0, arg1, 0))
4013 return true;
4015 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
4016 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
4017 return false;
4019 /* Discard any conversions that don't change the modes of ARG0 and ARG1
4020 and see if the inner values are the same. This removes any
4021 signedness comparison, which doesn't matter here. */
4022 tree op0 = arg0;
4023 tree op1 = arg1;
4024 STRIP_NOPS (op0);
4025 STRIP_NOPS (op1);
4026 if (operand_equal_p (op0, op1, 0))
4027 return true;
4029 /* Discard a single widening conversion from ARG1 and see if the inner
4030 value is the same as ARG0. */
4031 if (CONVERT_EXPR_P (arg1)
4032 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4033 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4034 < TYPE_PRECISION (TREE_TYPE (arg1))
4035 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
4036 return true;
4038 return false;
4041 /* See if ARG is an expression that is either a comparison or is performing
4042 arithmetic on comparisons. The comparisons must only be comparing
4043 two different values, which will be stored in *CVAL1 and *CVAL2; if
4044 they are nonzero it means that some operands have already been found.
4045 No variables may be used anywhere else in the expression except in the
4046 comparisons.
4048 If this is true, return 1. Otherwise, return zero. */
4050 static bool
4051 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4053 enum tree_code code = TREE_CODE (arg);
4054 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4056 /* We can handle some of the tcc_expression cases here. */
4057 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4058 tclass = tcc_unary;
4059 else if (tclass == tcc_expression
4060 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4061 || code == COMPOUND_EXPR))
4062 tclass = tcc_binary;
4064 switch (tclass)
4066 case tcc_unary:
4067 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4069 case tcc_binary:
4070 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4071 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4073 case tcc_constant:
4074 return true;
4076 case tcc_expression:
4077 if (code == COND_EXPR)
4078 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4079 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4080 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4081 return false;
4083 case tcc_comparison:
4084 /* First see if we can handle the first operand, then the second. For
4085 the second operand, we know *CVAL1 can't be zero. It must be that
4086 one side of the comparison is each of the values; test for the
4087 case where this isn't true by failing if the two operands
4088 are the same. */
4090 if (operand_equal_p (TREE_OPERAND (arg, 0),
4091 TREE_OPERAND (arg, 1), 0))
4092 return false;
4094 if (*cval1 == 0)
4095 *cval1 = TREE_OPERAND (arg, 0);
4096 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4098 else if (*cval2 == 0)
4099 *cval2 = TREE_OPERAND (arg, 0);
4100 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4102 else
4103 return false;
4105 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4107 else if (*cval2 == 0)
4108 *cval2 = TREE_OPERAND (arg, 1);
4109 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4111 else
4112 return false;
4114 return true;
4116 default:
4117 return false;
4121 /* ARG is a tree that is known to contain just arithmetic operations and
4122 comparisons. Evaluate the operations in the tree substituting NEW0 for
4123 any occurrence of OLD0 as an operand of a comparison and likewise for
4124 NEW1 and OLD1. */
4126 static tree
4127 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4128 tree old1, tree new1)
4130 tree type = TREE_TYPE (arg);
4131 enum tree_code code = TREE_CODE (arg);
4132 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4134 /* We can handle some of the tcc_expression cases here. */
4135 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4136 tclass = tcc_unary;
4137 else if (tclass == tcc_expression
4138 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4139 tclass = tcc_binary;
4141 switch (tclass)
4143 case tcc_unary:
4144 return fold_build1_loc (loc, code, type,
4145 eval_subst (loc, TREE_OPERAND (arg, 0),
4146 old0, new0, old1, new1));
4148 case tcc_binary:
4149 return fold_build2_loc (loc, code, type,
4150 eval_subst (loc, TREE_OPERAND (arg, 0),
4151 old0, new0, old1, new1),
4152 eval_subst (loc, TREE_OPERAND (arg, 1),
4153 old0, new0, old1, new1));
4155 case tcc_expression:
4156 switch (code)
4158 case SAVE_EXPR:
4159 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4160 old1, new1);
4162 case COMPOUND_EXPR:
4163 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4164 old1, new1);
4166 case COND_EXPR:
4167 return fold_build3_loc (loc, code, type,
4168 eval_subst (loc, TREE_OPERAND (arg, 0),
4169 old0, new0, old1, new1),
4170 eval_subst (loc, TREE_OPERAND (arg, 1),
4171 old0, new0, old1, new1),
4172 eval_subst (loc, TREE_OPERAND (arg, 2),
4173 old0, new0, old1, new1));
4174 default:
4175 break;
4177 /* Fall through - ??? */
4179 case tcc_comparison:
4181 tree arg0 = TREE_OPERAND (arg, 0);
4182 tree arg1 = TREE_OPERAND (arg, 1);
4184 /* We need to check both for exact equality and tree equality. The
4185 former will be true if the operand has a side-effect. In that
4186 case, we know the operand occurred exactly once. */
4188 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4189 arg0 = new0;
4190 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4191 arg0 = new1;
4193 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4194 arg1 = new0;
4195 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4196 arg1 = new1;
4198 return fold_build2_loc (loc, code, type, arg0, arg1);
4201 default:
4202 return arg;
4206 /* Return a tree for the case when the result of an expression is RESULT
4207 converted to TYPE and OMITTED was previously an operand of the expression
4208 but is now not needed (e.g., we folded OMITTED * 0).
4210 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4211 the conversion of RESULT to TYPE. */
4213 tree
4214 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4216 tree t = fold_convert_loc (loc, type, result);
4218 /* If the resulting operand is an empty statement, just return the omitted
4219 statement casted to void. */
4220 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4221 return build1_loc (loc, NOP_EXPR, void_type_node,
4222 fold_ignored_result (omitted));
4224 if (TREE_SIDE_EFFECTS (omitted))
4225 return build2_loc (loc, COMPOUND_EXPR, type,
4226 fold_ignored_result (omitted), t);
4228 return non_lvalue_loc (loc, t);
4231 /* Return a tree for the case when the result of an expression is RESULT
4232 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4233 of the expression but are now not needed.
4235 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4236 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4237 evaluated before OMITTED2. Otherwise, if neither has side effects,
4238 just do the conversion of RESULT to TYPE. */
4240 tree
4241 omit_two_operands_loc (location_t loc, tree type, tree result,
4242 tree omitted1, tree omitted2)
4244 tree t = fold_convert_loc (loc, type, result);
4246 if (TREE_SIDE_EFFECTS (omitted2))
4247 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4248 if (TREE_SIDE_EFFECTS (omitted1))
4249 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4251 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4255 /* Return a simplified tree node for the truth-negation of ARG. This
4256 never alters ARG itself. We assume that ARG is an operation that
4257 returns a truth value (0 or 1).
4259 FIXME: one would think we would fold the result, but it causes
4260 problems with the dominator optimizer. */
4262 static tree
4263 fold_truth_not_expr (location_t loc, tree arg)
4265 tree type = TREE_TYPE (arg);
4266 enum tree_code code = TREE_CODE (arg);
4267 location_t loc1, loc2;
4269 /* If this is a comparison, we can simply invert it, except for
4270 floating-point non-equality comparisons, in which case we just
4271 enclose a TRUTH_NOT_EXPR around what we have. */
4273 if (TREE_CODE_CLASS (code) == tcc_comparison)
4275 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4276 if (FLOAT_TYPE_P (op_type)
4277 && flag_trapping_math
4278 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4279 && code != NE_EXPR && code != EQ_EXPR)
4280 return NULL_TREE;
4282 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4283 if (code == ERROR_MARK)
4284 return NULL_TREE;
4286 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4287 TREE_OPERAND (arg, 1));
4288 copy_warning (ret, arg);
4289 return ret;
4292 switch (code)
4294 case INTEGER_CST:
4295 return constant_boolean_node (integer_zerop (arg), type);
4297 case TRUTH_AND_EXPR:
4298 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4299 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4300 return build2_loc (loc, TRUTH_OR_EXPR, type,
4301 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4302 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4304 case TRUTH_OR_EXPR:
4305 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4306 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4307 return build2_loc (loc, TRUTH_AND_EXPR, type,
4308 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4309 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4311 case TRUTH_XOR_EXPR:
4312 /* Here we can invert either operand. We invert the first operand
4313 unless the second operand is a TRUTH_NOT_EXPR in which case our
4314 result is the XOR of the first operand with the inside of the
4315 negation of the second operand. */
4317 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4318 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4319 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4320 else
4321 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4322 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4323 TREE_OPERAND (arg, 1));
4325 case TRUTH_ANDIF_EXPR:
4326 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4327 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4328 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4329 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4330 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4332 case TRUTH_ORIF_EXPR:
4333 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4334 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4335 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4336 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4337 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4339 case TRUTH_NOT_EXPR:
4340 return TREE_OPERAND (arg, 0);
4342 case COND_EXPR:
4344 tree arg1 = TREE_OPERAND (arg, 1);
4345 tree arg2 = TREE_OPERAND (arg, 2);
4347 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4348 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4350 /* A COND_EXPR may have a throw as one operand, which
4351 then has void type. Just leave void operands
4352 as they are. */
4353 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4354 VOID_TYPE_P (TREE_TYPE (arg1))
4355 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4356 VOID_TYPE_P (TREE_TYPE (arg2))
4357 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4360 case COMPOUND_EXPR:
4361 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4362 return build2_loc (loc, COMPOUND_EXPR, type,
4363 TREE_OPERAND (arg, 0),
4364 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4366 case NON_LVALUE_EXPR:
4367 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4368 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4370 CASE_CONVERT:
4371 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4372 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4374 /* fall through */
4376 case FLOAT_EXPR:
4377 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4378 return build1_loc (loc, TREE_CODE (arg), type,
4379 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4381 case BIT_AND_EXPR:
4382 if (!integer_onep (TREE_OPERAND (arg, 1)))
4383 return NULL_TREE;
4384 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4386 case SAVE_EXPR:
4387 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4389 case CLEANUP_POINT_EXPR:
4390 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4391 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4392 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4394 default:
4395 return NULL_TREE;
4399 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4400 assume that ARG is an operation that returns a truth value (0 or 1
4401 for scalars, 0 or -1 for vectors). Return the folded expression if
4402 folding is successful. Otherwise, return NULL_TREE. */
4404 static tree
4405 fold_invert_truthvalue (location_t loc, tree arg)
4407 tree type = TREE_TYPE (arg);
4408 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4409 ? BIT_NOT_EXPR
4410 : TRUTH_NOT_EXPR,
4411 type, arg);
4414 /* Return a simplified tree node for the truth-negation of ARG. This
4415 never alters ARG itself. We assume that ARG is an operation that
4416 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4418 tree
4419 invert_truthvalue_loc (location_t loc, tree arg)
4421 if (TREE_CODE (arg) == ERROR_MARK)
4422 return arg;
4424 tree type = TREE_TYPE (arg);
4425 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4426 ? BIT_NOT_EXPR
4427 : TRUTH_NOT_EXPR,
4428 type, arg);
4431 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4432 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4433 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4434 is the original memory reference used to preserve the alias set of
4435 the access. */
4437 static tree
4438 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4439 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4440 int unsignedp, int reversep)
4442 tree result, bftype;
4444 /* Attempt not to lose the access path if possible. */
4445 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4447 tree ninner = TREE_OPERAND (orig_inner, 0);
4448 machine_mode nmode;
4449 poly_int64 nbitsize, nbitpos;
4450 tree noffset;
4451 int nunsignedp, nreversep, nvolatilep = 0;
4452 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4453 &noffset, &nmode, &nunsignedp,
4454 &nreversep, &nvolatilep);
4455 if (base == inner
4456 && noffset == NULL_TREE
4457 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4458 && !reversep
4459 && !nreversep
4460 && !nvolatilep)
4462 inner = ninner;
4463 bitpos -= nbitpos;
4467 alias_set_type iset = get_alias_set (orig_inner);
4468 if (iset == 0 && get_alias_set (inner) != iset)
4469 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4470 build_fold_addr_expr (inner),
4471 build_int_cst (ptr_type_node, 0));
4473 if (known_eq (bitpos, 0) && !reversep)
4475 tree size = TYPE_SIZE (TREE_TYPE (inner));
4476 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4477 || POINTER_TYPE_P (TREE_TYPE (inner)))
4478 && tree_fits_shwi_p (size)
4479 && tree_to_shwi (size) == bitsize)
4480 return fold_convert_loc (loc, type, inner);
4483 bftype = type;
4484 if (TYPE_PRECISION (bftype) != bitsize
4485 || TYPE_UNSIGNED (bftype) == !unsignedp)
4486 bftype = build_nonstandard_integer_type (bitsize, 0);
4488 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4489 bitsize_int (bitsize), bitsize_int (bitpos));
4490 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4492 if (bftype != type)
4493 result = fold_convert_loc (loc, type, result);
4495 return result;
4498 /* Optimize a bit-field compare.
4500 There are two cases: First is a compare against a constant and the
4501 second is a comparison of two items where the fields are at the same
4502 bit position relative to the start of a chunk (byte, halfword, word)
4503 large enough to contain it. In these cases we can avoid the shift
4504 implicit in bitfield extractions.
4506 For constants, we emit a compare of the shifted constant with the
4507 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4508 compared. For two fields at the same position, we do the ANDs with the
4509 similar mask and compare the result of the ANDs.
4511 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4512 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4513 are the left and right operands of the comparison, respectively.
4515 If the optimization described above can be done, we return the resulting
4516 tree. Otherwise we return zero. */
4518 static tree
4519 optimize_bit_field_compare (location_t loc, enum tree_code code,
4520 tree compare_type, tree lhs, tree rhs)
4522 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4523 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4524 tree type = TREE_TYPE (lhs);
4525 tree unsigned_type;
4526 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4527 machine_mode lmode, rmode;
4528 scalar_int_mode nmode;
4529 int lunsignedp, runsignedp;
4530 int lreversep, rreversep;
4531 int lvolatilep = 0, rvolatilep = 0;
4532 tree linner, rinner = NULL_TREE;
4533 tree mask;
4534 tree offset;
4536 /* Get all the information about the extractions being done. If the bit size
4537 is the same as the size of the underlying object, we aren't doing an
4538 extraction at all and so can do nothing. We also don't want to
4539 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4540 then will no longer be able to replace it. */
4541 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4542 &lunsignedp, &lreversep, &lvolatilep);
4543 if (linner == lhs
4544 || !known_size_p (plbitsize)
4545 || !plbitsize.is_constant (&lbitsize)
4546 || !plbitpos.is_constant (&lbitpos)
4547 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4548 || offset != 0
4549 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4550 || lvolatilep)
4551 return 0;
4553 if (const_p)
4554 rreversep = lreversep;
4555 else
4557 /* If this is not a constant, we can only do something if bit positions,
4558 sizes, signedness and storage order are the same. */
4559 rinner
4560 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4561 &runsignedp, &rreversep, &rvolatilep);
4563 if (rinner == rhs
4564 || maybe_ne (lbitpos, rbitpos)
4565 || maybe_ne (lbitsize, rbitsize)
4566 || lunsignedp != runsignedp
4567 || lreversep != rreversep
4568 || offset != 0
4569 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4570 || rvolatilep)
4571 return 0;
4574 /* Honor the C++ memory model and mimic what RTL expansion does. */
4575 poly_uint64 bitstart = 0;
4576 poly_uint64 bitend = 0;
4577 if (TREE_CODE (lhs) == COMPONENT_REF)
4579 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4580 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4581 return 0;
4584 /* See if we can find a mode to refer to this field. We should be able to,
4585 but fail if we can't. */
4586 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4587 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4588 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4589 TYPE_ALIGN (TREE_TYPE (rinner))),
4590 BITS_PER_WORD, false, &nmode))
4591 return 0;
4593 /* Set signed and unsigned types of the precision of this mode for the
4594 shifts below. */
4595 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4597 /* Compute the bit position and size for the new reference and our offset
4598 within it. If the new reference is the same size as the original, we
4599 won't optimize anything, so return zero. */
4600 nbitsize = GET_MODE_BITSIZE (nmode);
4601 nbitpos = lbitpos & ~ (nbitsize - 1);
4602 lbitpos -= nbitpos;
4603 if (nbitsize == lbitsize)
4604 return 0;
4606 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4607 lbitpos = nbitsize - lbitsize - lbitpos;
4609 /* Make the mask to be used against the extracted field. */
4610 mask = build_int_cst_type (unsigned_type, -1);
4611 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4612 mask = const_binop (RSHIFT_EXPR, mask,
4613 size_int (nbitsize - lbitsize - lbitpos));
4615 if (! const_p)
4617 if (nbitpos < 0)
4618 return 0;
4620 /* If not comparing with constant, just rework the comparison
4621 and return. */
4622 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4623 nbitsize, nbitpos, 1, lreversep);
4624 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4625 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4626 nbitsize, nbitpos, 1, rreversep);
4627 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4628 return fold_build2_loc (loc, code, compare_type, t1, t2);
4631 /* Otherwise, we are handling the constant case. See if the constant is too
4632 big for the field. Warn and return a tree for 0 (false) if so. We do
4633 this not only for its own sake, but to avoid having to test for this
4634 error case below. If we didn't, we might generate wrong code.
4636 For unsigned fields, the constant shifted right by the field length should
4637 be all zero. For signed fields, the high-order bits should agree with
4638 the sign bit. */
4640 if (lunsignedp)
4642 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4644 warning (0, "comparison is always %d due to width of bit-field",
4645 code == NE_EXPR);
4646 return constant_boolean_node (code == NE_EXPR, compare_type);
4649 else
4651 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4652 if (tem != 0 && tem != -1)
4654 warning (0, "comparison is always %d due to width of bit-field",
4655 code == NE_EXPR);
4656 return constant_boolean_node (code == NE_EXPR, compare_type);
4660 if (nbitpos < 0)
4661 return 0;
4663 /* Single-bit compares should always be against zero. */
4664 if (lbitsize == 1 && ! integer_zerop (rhs))
4666 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4667 rhs = build_int_cst (type, 0);
4670 /* Make a new bitfield reference, shift the constant over the
4671 appropriate number of bits and mask it with the computed mask
4672 (in case this was a signed field). If we changed it, make a new one. */
4673 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4674 nbitsize, nbitpos, 1, lreversep);
4676 rhs = const_binop (BIT_AND_EXPR,
4677 const_binop (LSHIFT_EXPR,
4678 fold_convert_loc (loc, unsigned_type, rhs),
4679 size_int (lbitpos)),
4680 mask);
4682 lhs = build2_loc (loc, code, compare_type,
4683 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4684 return lhs;
4687 /* Subroutine for fold_truth_andor_1: decode a field reference.
4689 If EXP is a comparison reference, we return the innermost reference.
4691 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4692 set to the starting bit number.
4694 If the innermost field can be completely contained in a mode-sized
4695 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4697 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4698 otherwise it is not changed.
4700 *PUNSIGNEDP is set to the signedness of the field.
4702 *PREVERSEP is set to the storage order of the field.
4704 *PMASK is set to the mask used. This is either contained in a
4705 BIT_AND_EXPR or derived from the width of the field.
4707 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4709 Return 0 if this is not a component reference or is one that we can't
4710 do anything with. */
4712 static tree
4713 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4714 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4715 int *punsignedp, int *preversep, int *pvolatilep,
4716 tree *pmask, tree *pand_mask)
4718 tree exp = *exp_;
4719 tree outer_type = 0;
4720 tree and_mask = 0;
4721 tree mask, inner, offset;
4722 tree unsigned_type;
4723 unsigned int precision;
4725 /* All the optimizations using this function assume integer fields.
4726 There are problems with FP fields since the type_for_size call
4727 below can fail for, e.g., XFmode. */
4728 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4729 return NULL_TREE;
4731 /* We are interested in the bare arrangement of bits, so strip everything
4732 that doesn't affect the machine mode. However, record the type of the
4733 outermost expression if it may matter below. */
4734 if (CONVERT_EXPR_P (exp)
4735 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4736 outer_type = TREE_TYPE (exp);
4737 STRIP_NOPS (exp);
4739 if (TREE_CODE (exp) == BIT_AND_EXPR)
4741 and_mask = TREE_OPERAND (exp, 1);
4742 exp = TREE_OPERAND (exp, 0);
4743 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4744 if (TREE_CODE (and_mask) != INTEGER_CST)
4745 return NULL_TREE;
4748 poly_int64 poly_bitsize, poly_bitpos;
4749 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4750 pmode, punsignedp, preversep, pvolatilep);
4751 if ((inner == exp && and_mask == 0)
4752 || !poly_bitsize.is_constant (pbitsize)
4753 || !poly_bitpos.is_constant (pbitpos)
4754 || *pbitsize < 0
4755 || offset != 0
4756 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4757 /* Reject out-of-bound accesses (PR79731). */
4758 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4759 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4760 *pbitpos + *pbitsize) < 0))
4761 return NULL_TREE;
4763 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4764 if (unsigned_type == NULL_TREE)
4765 return NULL_TREE;
4767 *exp_ = exp;
4769 /* If the number of bits in the reference is the same as the bitsize of
4770 the outer type, then the outer type gives the signedness. Otherwise
4771 (in case of a small bitfield) the signedness is unchanged. */
4772 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4773 *punsignedp = TYPE_UNSIGNED (outer_type);
4775 /* Compute the mask to access the bitfield. */
4776 precision = TYPE_PRECISION (unsigned_type);
4778 mask = build_int_cst_type (unsigned_type, -1);
4780 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4781 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4783 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4784 if (and_mask != 0)
4785 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4786 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4788 *pmask = mask;
4789 *pand_mask = and_mask;
4790 return inner;
4793 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4794 bit positions and MASK is SIGNED. */
4796 static bool
4797 all_ones_mask_p (const_tree mask, unsigned int size)
4799 tree type = TREE_TYPE (mask);
4800 unsigned int precision = TYPE_PRECISION (type);
4802 /* If this function returns true when the type of the mask is
4803 UNSIGNED, then there will be errors. In particular see
4804 gcc.c-torture/execute/990326-1.c. There does not appear to be
4805 any documentation paper trail as to why this is so. But the pre
4806 wide-int worked with that restriction and it has been preserved
4807 here. */
4808 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4809 return false;
4811 return wi::mask (size, false, precision) == wi::to_wide (mask);
4814 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4815 represents the sign bit of EXP's type. If EXP represents a sign
4816 or zero extension, also test VAL against the unextended type.
4817 The return value is the (sub)expression whose sign bit is VAL,
4818 or NULL_TREE otherwise. */
4820 tree
4821 sign_bit_p (tree exp, const_tree val)
4823 int width;
4824 tree t;
4826 /* Tree EXP must have an integral type. */
4827 t = TREE_TYPE (exp);
4828 if (! INTEGRAL_TYPE_P (t))
4829 return NULL_TREE;
4831 /* Tree VAL must be an integer constant. */
4832 if (TREE_CODE (val) != INTEGER_CST
4833 || TREE_OVERFLOW (val))
4834 return NULL_TREE;
4836 width = TYPE_PRECISION (t);
4837 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4838 return exp;
4840 /* Handle extension from a narrower type. */
4841 if (TREE_CODE (exp) == NOP_EXPR
4842 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4843 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4845 return NULL_TREE;
4848 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4849 to be evaluated unconditionally. */
4851 static bool
4852 simple_operand_p (const_tree exp)
4854 /* Strip any conversions that don't change the machine mode. */
4855 STRIP_NOPS (exp);
4857 return (CONSTANT_CLASS_P (exp)
4858 || TREE_CODE (exp) == SSA_NAME
4859 || (DECL_P (exp)
4860 && ! TREE_ADDRESSABLE (exp)
4861 && ! TREE_THIS_VOLATILE (exp)
4862 && ! DECL_NONLOCAL (exp)
4863 /* Don't regard global variables as simple. They may be
4864 allocated in ways unknown to the compiler (shared memory,
4865 #pragma weak, etc). */
4866 && ! TREE_PUBLIC (exp)
4867 && ! DECL_EXTERNAL (exp)
4868 /* Weakrefs are not safe to be read, since they can be NULL.
4869 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4870 have DECL_WEAK flag set. */
4871 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4872 /* Loading a static variable is unduly expensive, but global
4873 registers aren't expensive. */
4874 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4877 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4878 to be evaluated unconditionally.
4879 I addition to simple_operand_p, we assume that comparisons, conversions,
4880 and logic-not operations are simple, if their operands are simple, too. */
4882 static bool
4883 simple_operand_p_2 (tree exp)
4885 enum tree_code code;
4887 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
4888 return false;
4890 while (CONVERT_EXPR_P (exp))
4891 exp = TREE_OPERAND (exp, 0);
4893 code = TREE_CODE (exp);
4895 if (TREE_CODE_CLASS (code) == tcc_comparison)
4896 return (simple_operand_p (TREE_OPERAND (exp, 0))
4897 && simple_operand_p (TREE_OPERAND (exp, 1)));
4899 if (code == TRUTH_NOT_EXPR)
4900 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4902 return simple_operand_p (exp);
4906 /* The following functions are subroutines to fold_range_test and allow it to
4907 try to change a logical combination of comparisons into a range test.
4909 For example, both
4910 X == 2 || X == 3 || X == 4 || X == 5
4912 X >= 2 && X <= 5
4913 are converted to
4914 (unsigned) (X - 2) <= 3
4916 We describe each set of comparisons as being either inside or outside
4917 a range, using a variable named like IN_P, and then describe the
4918 range with a lower and upper bound. If one of the bounds is omitted,
4919 it represents either the highest or lowest value of the type.
4921 In the comments below, we represent a range by two numbers in brackets
4922 preceded by a "+" to designate being inside that range, or a "-" to
4923 designate being outside that range, so the condition can be inverted by
4924 flipping the prefix. An omitted bound is represented by a "-". For
4925 example, "- [-, 10]" means being outside the range starting at the lowest
4926 possible value and ending at 10, in other words, being greater than 10.
4927 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4928 always false.
4930 We set up things so that the missing bounds are handled in a consistent
4931 manner so neither a missing bound nor "true" and "false" need to be
4932 handled using a special case. */
4934 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4935 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4936 and UPPER1_P are nonzero if the respective argument is an upper bound
4937 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4938 must be specified for a comparison. ARG1 will be converted to ARG0's
4939 type if both are specified. */
4941 static tree
4942 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4943 tree arg1, int upper1_p)
4945 tree tem;
4946 int result;
4947 int sgn0, sgn1;
4949 /* If neither arg represents infinity, do the normal operation.
4950 Else, if not a comparison, return infinity. Else handle the special
4951 comparison rules. Note that most of the cases below won't occur, but
4952 are handled for consistency. */
4954 if (arg0 != 0 && arg1 != 0)
4956 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4957 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4958 STRIP_NOPS (tem);
4959 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4962 if (TREE_CODE_CLASS (code) != tcc_comparison)
4963 return 0;
4965 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4966 for neither. In real maths, we cannot assume open ended ranges are
4967 the same. But, this is computer arithmetic, where numbers are finite.
4968 We can therefore make the transformation of any unbounded range with
4969 the value Z, Z being greater than any representable number. This permits
4970 us to treat unbounded ranges as equal. */
4971 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4972 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4973 switch (code)
4975 case EQ_EXPR:
4976 result = sgn0 == sgn1;
4977 break;
4978 case NE_EXPR:
4979 result = sgn0 != sgn1;
4980 break;
4981 case LT_EXPR:
4982 result = sgn0 < sgn1;
4983 break;
4984 case LE_EXPR:
4985 result = sgn0 <= sgn1;
4986 break;
4987 case GT_EXPR:
4988 result = sgn0 > sgn1;
4989 break;
4990 case GE_EXPR:
4991 result = sgn0 >= sgn1;
4992 break;
4993 default:
4994 gcc_unreachable ();
4997 return constant_boolean_node (result, type);
5000 /* Helper routine for make_range. Perform one step for it, return
5001 new expression if the loop should continue or NULL_TREE if it should
5002 stop. */
5004 tree
5005 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
5006 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
5007 bool *strict_overflow_p)
5009 tree arg0_type = TREE_TYPE (arg0);
5010 tree n_low, n_high, low = *p_low, high = *p_high;
5011 int in_p = *p_in_p, n_in_p;
5013 switch (code)
5015 case TRUTH_NOT_EXPR:
5016 /* We can only do something if the range is testing for zero. */
5017 if (low == NULL_TREE || high == NULL_TREE
5018 || ! integer_zerop (low) || ! integer_zerop (high))
5019 return NULL_TREE;
5020 *p_in_p = ! in_p;
5021 return arg0;
5023 case EQ_EXPR: case NE_EXPR:
5024 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
5025 /* We can only do something if the range is testing for zero
5026 and if the second operand is an integer constant. Note that
5027 saying something is "in" the range we make is done by
5028 complementing IN_P since it will set in the initial case of
5029 being not equal to zero; "out" is leaving it alone. */
5030 if (low == NULL_TREE || high == NULL_TREE
5031 || ! integer_zerop (low) || ! integer_zerop (high)
5032 || TREE_CODE (arg1) != INTEGER_CST)
5033 return NULL_TREE;
5035 switch (code)
5037 case NE_EXPR: /* - [c, c] */
5038 low = high = arg1;
5039 break;
5040 case EQ_EXPR: /* + [c, c] */
5041 in_p = ! in_p, low = high = arg1;
5042 break;
5043 case GT_EXPR: /* - [-, c] */
5044 low = 0, high = arg1;
5045 break;
5046 case GE_EXPR: /* + [c, -] */
5047 in_p = ! in_p, low = arg1, high = 0;
5048 break;
5049 case LT_EXPR: /* - [c, -] */
5050 low = arg1, high = 0;
5051 break;
5052 case LE_EXPR: /* + [-, c] */
5053 in_p = ! in_p, low = 0, high = arg1;
5054 break;
5055 default:
5056 gcc_unreachable ();
5059 /* If this is an unsigned comparison, we also know that EXP is
5060 greater than or equal to zero. We base the range tests we make
5061 on that fact, so we record it here so we can parse existing
5062 range tests. We test arg0_type since often the return type
5063 of, e.g. EQ_EXPR, is boolean. */
5064 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5066 if (! merge_ranges (&n_in_p, &n_low, &n_high,
5067 in_p, low, high, 1,
5068 build_int_cst (arg0_type, 0),
5069 NULL_TREE))
5070 return NULL_TREE;
5072 in_p = n_in_p, low = n_low, high = n_high;
5074 /* If the high bound is missing, but we have a nonzero low
5075 bound, reverse the range so it goes from zero to the low bound
5076 minus 1. */
5077 if (high == 0 && low && ! integer_zerop (low))
5079 in_p = ! in_p;
5080 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5081 build_int_cst (TREE_TYPE (low), 1), 0);
5082 low = build_int_cst (arg0_type, 0);
5086 *p_low = low;
5087 *p_high = high;
5088 *p_in_p = in_p;
5089 return arg0;
5091 case NEGATE_EXPR:
5092 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5093 low and high are non-NULL, then normalize will DTRT. */
5094 if (!TYPE_UNSIGNED (arg0_type)
5095 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5097 if (low == NULL_TREE)
5098 low = TYPE_MIN_VALUE (arg0_type);
5099 if (high == NULL_TREE)
5100 high = TYPE_MAX_VALUE (arg0_type);
5103 /* (-x) IN [a,b] -> x in [-b, -a] */
5104 n_low = range_binop (MINUS_EXPR, exp_type,
5105 build_int_cst (exp_type, 0),
5106 0, high, 1);
5107 n_high = range_binop (MINUS_EXPR, exp_type,
5108 build_int_cst (exp_type, 0),
5109 0, low, 0);
5110 if (n_high != 0 && TREE_OVERFLOW (n_high))
5111 return NULL_TREE;
5112 goto normalize;
5114 case BIT_NOT_EXPR:
5115 /* ~ X -> -X - 1 */
5116 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5117 build_int_cst (exp_type, 1));
5119 case PLUS_EXPR:
5120 case MINUS_EXPR:
5121 if (TREE_CODE (arg1) != INTEGER_CST)
5122 return NULL_TREE;
5124 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5125 move a constant to the other side. */
5126 if (!TYPE_UNSIGNED (arg0_type)
5127 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5128 return NULL_TREE;
5130 /* If EXP is signed, any overflow in the computation is undefined,
5131 so we don't worry about it so long as our computations on
5132 the bounds don't overflow. For unsigned, overflow is defined
5133 and this is exactly the right thing. */
5134 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5135 arg0_type, low, 0, arg1, 0);
5136 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5137 arg0_type, high, 1, arg1, 0);
5138 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5139 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5140 return NULL_TREE;
5142 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5143 *strict_overflow_p = true;
5145 normalize:
5146 /* Check for an unsigned range which has wrapped around the maximum
5147 value thus making n_high < n_low, and normalize it. */
5148 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5150 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5151 build_int_cst (TREE_TYPE (n_high), 1), 0);
5152 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5153 build_int_cst (TREE_TYPE (n_low), 1), 0);
5155 /* If the range is of the form +/- [ x+1, x ], we won't
5156 be able to normalize it. But then, it represents the
5157 whole range or the empty set, so make it
5158 +/- [ -, - ]. */
5159 if (tree_int_cst_equal (n_low, low)
5160 && tree_int_cst_equal (n_high, high))
5161 low = high = 0;
5162 else
5163 in_p = ! in_p;
5165 else
5166 low = n_low, high = n_high;
5168 *p_low = low;
5169 *p_high = high;
5170 *p_in_p = in_p;
5171 return arg0;
5173 CASE_CONVERT:
5174 case NON_LVALUE_EXPR:
5175 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5176 return NULL_TREE;
5178 if (! INTEGRAL_TYPE_P (arg0_type)
5179 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5180 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5181 return NULL_TREE;
5183 n_low = low, n_high = high;
5185 if (n_low != 0)
5186 n_low = fold_convert_loc (loc, arg0_type, n_low);
5188 if (n_high != 0)
5189 n_high = fold_convert_loc (loc, arg0_type, n_high);
5191 /* If we're converting arg0 from an unsigned type, to exp,
5192 a signed type, we will be doing the comparison as unsigned.
5193 The tests above have already verified that LOW and HIGH
5194 are both positive.
5196 So we have to ensure that we will handle large unsigned
5197 values the same way that the current signed bounds treat
5198 negative values. */
5200 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5202 tree high_positive;
5203 tree equiv_type;
5204 /* For fixed-point modes, we need to pass the saturating flag
5205 as the 2nd parameter. */
5206 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5207 equiv_type
5208 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5209 TYPE_SATURATING (arg0_type));
5210 else
5211 equiv_type
5212 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5214 /* A range without an upper bound is, naturally, unbounded.
5215 Since convert would have cropped a very large value, use
5216 the max value for the destination type. */
5217 high_positive
5218 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5219 : TYPE_MAX_VALUE (arg0_type);
5221 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5222 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5223 fold_convert_loc (loc, arg0_type,
5224 high_positive),
5225 build_int_cst (arg0_type, 1));
5227 /* If the low bound is specified, "and" the range with the
5228 range for which the original unsigned value will be
5229 positive. */
5230 if (low != 0)
5232 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5233 1, fold_convert_loc (loc, arg0_type,
5234 integer_zero_node),
5235 high_positive))
5236 return NULL_TREE;
5238 in_p = (n_in_p == in_p);
5240 else
5242 /* Otherwise, "or" the range with the range of the input
5243 that will be interpreted as negative. */
5244 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5245 1, fold_convert_loc (loc, arg0_type,
5246 integer_zero_node),
5247 high_positive))
5248 return NULL_TREE;
5250 in_p = (in_p != n_in_p);
5254 *p_low = n_low;
5255 *p_high = n_high;
5256 *p_in_p = in_p;
5257 return arg0;
5259 default:
5260 return NULL_TREE;
5264 /* Given EXP, a logical expression, set the range it is testing into
5265 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5266 actually being tested. *PLOW and *PHIGH will be made of the same
5267 type as the returned expression. If EXP is not a comparison, we
5268 will most likely not be returning a useful value and range. Set
5269 *STRICT_OVERFLOW_P to true if the return value is only valid
5270 because signed overflow is undefined; otherwise, do not change
5271 *STRICT_OVERFLOW_P. */
5273 tree
5274 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5275 bool *strict_overflow_p)
5277 enum tree_code code;
5278 tree arg0, arg1 = NULL_TREE;
5279 tree exp_type, nexp;
5280 int in_p;
5281 tree low, high;
5282 location_t loc = EXPR_LOCATION (exp);
5284 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5285 and see if we can refine the range. Some of the cases below may not
5286 happen, but it doesn't seem worth worrying about this. We "continue"
5287 the outer loop when we've changed something; otherwise we "break"
5288 the switch, which will "break" the while. */
5290 in_p = 0;
5291 low = high = build_int_cst (TREE_TYPE (exp), 0);
5293 while (1)
5295 code = TREE_CODE (exp);
5296 exp_type = TREE_TYPE (exp);
5297 arg0 = NULL_TREE;
5299 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5301 if (TREE_OPERAND_LENGTH (exp) > 0)
5302 arg0 = TREE_OPERAND (exp, 0);
5303 if (TREE_CODE_CLASS (code) == tcc_binary
5304 || TREE_CODE_CLASS (code) == tcc_comparison
5305 || (TREE_CODE_CLASS (code) == tcc_expression
5306 && TREE_OPERAND_LENGTH (exp) > 1))
5307 arg1 = TREE_OPERAND (exp, 1);
5309 if (arg0 == NULL_TREE)
5310 break;
5312 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5313 &high, &in_p, strict_overflow_p);
5314 if (nexp == NULL_TREE)
5315 break;
5316 exp = nexp;
5319 /* If EXP is a constant, we can evaluate whether this is true or false. */
5320 if (TREE_CODE (exp) == INTEGER_CST)
5322 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5323 exp, 0, low, 0))
5324 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5325 exp, 1, high, 1)));
5326 low = high = 0;
5327 exp = 0;
5330 *pin_p = in_p, *plow = low, *phigh = high;
5331 return exp;
5334 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5335 a bitwise check i.e. when
5336 LOW == 0xXX...X00...0
5337 HIGH == 0xXX...X11...1
5338 Return corresponding mask in MASK and stem in VALUE. */
5340 static bool
5341 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5342 tree *value)
5344 if (TREE_CODE (low) != INTEGER_CST
5345 || TREE_CODE (high) != INTEGER_CST)
5346 return false;
5348 unsigned prec = TYPE_PRECISION (type);
5349 wide_int lo = wi::to_wide (low, prec);
5350 wide_int hi = wi::to_wide (high, prec);
5352 wide_int end_mask = lo ^ hi;
5353 if ((end_mask & (end_mask + 1)) != 0
5354 || (lo & end_mask) != 0)
5355 return false;
5357 wide_int stem_mask = ~end_mask;
5358 wide_int stem = lo & stem_mask;
5359 if (stem != (hi & stem_mask))
5360 return false;
5362 *mask = wide_int_to_tree (type, stem_mask);
5363 *value = wide_int_to_tree (type, stem);
5365 return true;
5368 /* Helper routine for build_range_check and match.pd. Return the type to
5369 perform the check or NULL if it shouldn't be optimized. */
5371 tree
5372 range_check_type (tree etype)
5374 /* First make sure that arithmetics in this type is valid, then make sure
5375 that it wraps around. */
5376 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5377 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5379 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5381 tree utype, minv, maxv;
5383 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5384 for the type in question, as we rely on this here. */
5385 utype = unsigned_type_for (etype);
5386 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5387 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5388 build_int_cst (TREE_TYPE (maxv), 1), 1);
5389 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5391 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5392 minv, 1, maxv, 1)))
5393 etype = utype;
5394 else
5395 return NULL_TREE;
5397 else if (POINTER_TYPE_P (etype) || TREE_CODE (etype) == OFFSET_TYPE)
5398 etype = unsigned_type_for (etype);
5399 return etype;
5402 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5403 type, TYPE, return an expression to test if EXP is in (or out of, depending
5404 on IN_P) the range. Return 0 if the test couldn't be created. */
5406 tree
5407 build_range_check (location_t loc, tree type, tree exp, int in_p,
5408 tree low, tree high)
5410 tree etype = TREE_TYPE (exp), mask, value;
5412 /* Disable this optimization for function pointer expressions
5413 on targets that require function pointer canonicalization. */
5414 if (targetm.have_canonicalize_funcptr_for_compare ()
5415 && POINTER_TYPE_P (etype)
5416 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5417 return NULL_TREE;
5419 if (! in_p)
5421 value = build_range_check (loc, type, exp, 1, low, high);
5422 if (value != 0)
5423 return invert_truthvalue_loc (loc, value);
5425 return 0;
5428 if (low == 0 && high == 0)
5429 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5431 if (low == 0)
5432 return fold_build2_loc (loc, LE_EXPR, type, exp,
5433 fold_convert_loc (loc, etype, high));
5435 if (high == 0)
5436 return fold_build2_loc (loc, GE_EXPR, type, exp,
5437 fold_convert_loc (loc, etype, low));
5439 if (operand_equal_p (low, high, 0))
5440 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5441 fold_convert_loc (loc, etype, low));
5443 if (TREE_CODE (exp) == BIT_AND_EXPR
5444 && maskable_range_p (low, high, etype, &mask, &value))
5445 return fold_build2_loc (loc, EQ_EXPR, type,
5446 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5447 exp, mask),
5448 value);
5450 if (integer_zerop (low))
5452 if (! TYPE_UNSIGNED (etype))
5454 etype = unsigned_type_for (etype);
5455 high = fold_convert_loc (loc, etype, high);
5456 exp = fold_convert_loc (loc, etype, exp);
5458 return build_range_check (loc, type, exp, 1, 0, high);
5461 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5462 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5464 int prec = TYPE_PRECISION (etype);
5466 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5468 if (TYPE_UNSIGNED (etype))
5470 tree signed_etype = signed_type_for (etype);
5471 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5472 etype
5473 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5474 else
5475 etype = signed_etype;
5476 exp = fold_convert_loc (loc, etype, exp);
5478 return fold_build2_loc (loc, GT_EXPR, type, exp,
5479 build_int_cst (etype, 0));
5483 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5484 This requires wrap-around arithmetics for the type of the expression. */
5485 etype = range_check_type (etype);
5486 if (etype == NULL_TREE)
5487 return NULL_TREE;
5489 high = fold_convert_loc (loc, etype, high);
5490 low = fold_convert_loc (loc, etype, low);
5491 exp = fold_convert_loc (loc, etype, exp);
5493 value = const_binop (MINUS_EXPR, high, low);
5495 if (value != 0 && !TREE_OVERFLOW (value))
5496 return build_range_check (loc, type,
5497 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5498 1, build_int_cst (etype, 0), value);
5500 return 0;
5503 /* Return the predecessor of VAL in its type, handling the infinite case. */
5505 static tree
5506 range_predecessor (tree val)
5508 tree type = TREE_TYPE (val);
5510 if (INTEGRAL_TYPE_P (type)
5511 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5512 return 0;
5513 else
5514 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5515 build_int_cst (TREE_TYPE (val), 1), 0);
5518 /* Return the successor of VAL in its type, handling the infinite case. */
5520 static tree
5521 range_successor (tree val)
5523 tree type = TREE_TYPE (val);
5525 if (INTEGRAL_TYPE_P (type)
5526 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5527 return 0;
5528 else
5529 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5530 build_int_cst (TREE_TYPE (val), 1), 0);
5533 /* Given two ranges, see if we can merge them into one. Return 1 if we
5534 can, 0 if we can't. Set the output range into the specified parameters. */
5536 bool
5537 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5538 tree high0, int in1_p, tree low1, tree high1)
5540 int no_overlap;
5541 int subset;
5542 int temp;
5543 tree tem;
5544 int in_p;
5545 tree low, high;
5546 int lowequal = ((low0 == 0 && low1 == 0)
5547 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5548 low0, 0, low1, 0)));
5549 int highequal = ((high0 == 0 && high1 == 0)
5550 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5551 high0, 1, high1, 1)));
5553 /* Make range 0 be the range that starts first, or ends last if they
5554 start at the same value. Swap them if it isn't. */
5555 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5556 low0, 0, low1, 0))
5557 || (lowequal
5558 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5559 high1, 1, high0, 1))))
5561 temp = in0_p, in0_p = in1_p, in1_p = temp;
5562 tem = low0, low0 = low1, low1 = tem;
5563 tem = high0, high0 = high1, high1 = tem;
5566 /* If the second range is != high1 where high1 is the type maximum of
5567 the type, try first merging with < high1 range. */
5568 if (low1
5569 && high1
5570 && TREE_CODE (low1) == INTEGER_CST
5571 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5572 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5573 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5574 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5575 && operand_equal_p (low1, high1, 0))
5577 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5578 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5579 !in1_p, NULL_TREE, range_predecessor (low1)))
5580 return true;
5581 /* Similarly for the second range != low1 where low1 is the type minimum
5582 of the type, try first merging with > low1 range. */
5583 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5584 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5585 !in1_p, range_successor (low1), NULL_TREE))
5586 return true;
5589 /* Now flag two cases, whether the ranges are disjoint or whether the
5590 second range is totally subsumed in the first. Note that the tests
5591 below are simplified by the ones above. */
5592 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5593 high0, 1, low1, 0));
5594 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5595 high1, 1, high0, 1));
5597 /* We now have four cases, depending on whether we are including or
5598 excluding the two ranges. */
5599 if (in0_p && in1_p)
5601 /* If they don't overlap, the result is false. If the second range
5602 is a subset it is the result. Otherwise, the range is from the start
5603 of the second to the end of the first. */
5604 if (no_overlap)
5605 in_p = 0, low = high = 0;
5606 else if (subset)
5607 in_p = 1, low = low1, high = high1;
5608 else
5609 in_p = 1, low = low1, high = high0;
5612 else if (in0_p && ! in1_p)
5614 /* If they don't overlap, the result is the first range. If they are
5615 equal, the result is false. If the second range is a subset of the
5616 first, and the ranges begin at the same place, we go from just after
5617 the end of the second range to the end of the first. If the second
5618 range is not a subset of the first, or if it is a subset and both
5619 ranges end at the same place, the range starts at the start of the
5620 first range and ends just before the second range.
5621 Otherwise, we can't describe this as a single range. */
5622 if (no_overlap)
5623 in_p = 1, low = low0, high = high0;
5624 else if (lowequal && highequal)
5625 in_p = 0, low = high = 0;
5626 else if (subset && lowequal)
5628 low = range_successor (high1);
5629 high = high0;
5630 in_p = 1;
5631 if (low == 0)
5633 /* We are in the weird situation where high0 > high1 but
5634 high1 has no successor. Punt. */
5635 return 0;
5638 else if (! subset || highequal)
5640 low = low0;
5641 high = range_predecessor (low1);
5642 in_p = 1;
5643 if (high == 0)
5645 /* low0 < low1 but low1 has no predecessor. Punt. */
5646 return 0;
5649 else
5650 return 0;
5653 else if (! in0_p && in1_p)
5655 /* If they don't overlap, the result is the second range. If the second
5656 is a subset of the first, the result is false. Otherwise,
5657 the range starts just after the first range and ends at the
5658 end of the second. */
5659 if (no_overlap)
5660 in_p = 1, low = low1, high = high1;
5661 else if (subset || highequal)
5662 in_p = 0, low = high = 0;
5663 else
5665 low = range_successor (high0);
5666 high = high1;
5667 in_p = 1;
5668 if (low == 0)
5670 /* high1 > high0 but high0 has no successor. Punt. */
5671 return 0;
5676 else
5678 /* The case where we are excluding both ranges. Here the complex case
5679 is if they don't overlap. In that case, the only time we have a
5680 range is if they are adjacent. If the second is a subset of the
5681 first, the result is the first. Otherwise, the range to exclude
5682 starts at the beginning of the first range and ends at the end of the
5683 second. */
5684 if (no_overlap)
5686 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5687 range_successor (high0),
5688 1, low1, 0)))
5689 in_p = 0, low = low0, high = high1;
5690 else
5692 /* Canonicalize - [min, x] into - [-, x]. */
5693 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5694 switch (TREE_CODE (TREE_TYPE (low0)))
5696 case ENUMERAL_TYPE:
5697 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5698 GET_MODE_BITSIZE
5699 (TYPE_MODE (TREE_TYPE (low0)))))
5700 break;
5701 /* FALLTHROUGH */
5702 case INTEGER_TYPE:
5703 if (tree_int_cst_equal (low0,
5704 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5705 low0 = 0;
5706 break;
5707 case POINTER_TYPE:
5708 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5709 && integer_zerop (low0))
5710 low0 = 0;
5711 break;
5712 default:
5713 break;
5716 /* Canonicalize - [x, max] into - [x, -]. */
5717 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5718 switch (TREE_CODE (TREE_TYPE (high1)))
5720 case ENUMERAL_TYPE:
5721 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5722 GET_MODE_BITSIZE
5723 (TYPE_MODE (TREE_TYPE (high1)))))
5724 break;
5725 /* FALLTHROUGH */
5726 case INTEGER_TYPE:
5727 if (tree_int_cst_equal (high1,
5728 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5729 high1 = 0;
5730 break;
5731 case POINTER_TYPE:
5732 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5733 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5734 high1, 1,
5735 build_int_cst (TREE_TYPE (high1), 1),
5736 1)))
5737 high1 = 0;
5738 break;
5739 default:
5740 break;
5743 /* The ranges might be also adjacent between the maximum and
5744 minimum values of the given type. For
5745 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5746 return + [x + 1, y - 1]. */
5747 if (low0 == 0 && high1 == 0)
5749 low = range_successor (high0);
5750 high = range_predecessor (low1);
5751 if (low == 0 || high == 0)
5752 return 0;
5754 in_p = 1;
5756 else
5757 return 0;
5760 else if (subset)
5761 in_p = 0, low = low0, high = high0;
5762 else
5763 in_p = 0, low = low0, high = high1;
5766 *pin_p = in_p, *plow = low, *phigh = high;
5767 return 1;
5771 /* Subroutine of fold, looking inside expressions of the form
5772 A op B ? A : C, where (ARG00, COMP_CODE, ARG01), ARG1 and ARG2
5773 are the three operands of the COND_EXPR. This function is
5774 being used also to optimize A op B ? C : A, by reversing the
5775 comparison first.
5777 Return a folded expression whose code is not a COND_EXPR
5778 anymore, or NULL_TREE if no folding opportunity is found. */
5780 static tree
5781 fold_cond_expr_with_comparison (location_t loc, tree type,
5782 enum tree_code comp_code,
5783 tree arg00, tree arg01, tree arg1, tree arg2)
5785 tree arg1_type = TREE_TYPE (arg1);
5786 tree tem;
5788 STRIP_NOPS (arg1);
5789 STRIP_NOPS (arg2);
5791 /* If we have A op 0 ? A : -A, consider applying the following
5792 transformations:
5794 A == 0? A : -A same as -A
5795 A != 0? A : -A same as A
5796 A >= 0? A : -A same as abs (A)
5797 A > 0? A : -A same as abs (A)
5798 A <= 0? A : -A same as -abs (A)
5799 A < 0? A : -A same as -abs (A)
5801 None of these transformations work for modes with signed
5802 zeros. If A is +/-0, the first two transformations will
5803 change the sign of the result (from +0 to -0, or vice
5804 versa). The last four will fix the sign of the result,
5805 even though the original expressions could be positive or
5806 negative, depending on the sign of A.
5808 Note that all these transformations are correct if A is
5809 NaN, since the two alternatives (A and -A) are also NaNs. */
5810 if (!HONOR_SIGNED_ZEROS (type)
5811 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5812 ? real_zerop (arg01)
5813 : integer_zerop (arg01))
5814 && ((TREE_CODE (arg2) == NEGATE_EXPR
5815 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5816 /* In the case that A is of the form X-Y, '-A' (arg2) may
5817 have already been folded to Y-X, check for that. */
5818 || (TREE_CODE (arg1) == MINUS_EXPR
5819 && TREE_CODE (arg2) == MINUS_EXPR
5820 && operand_equal_p (TREE_OPERAND (arg1, 0),
5821 TREE_OPERAND (arg2, 1), 0)
5822 && operand_equal_p (TREE_OPERAND (arg1, 1),
5823 TREE_OPERAND (arg2, 0), 0))))
5824 switch (comp_code)
5826 case EQ_EXPR:
5827 case UNEQ_EXPR:
5828 tem = fold_convert_loc (loc, arg1_type, arg1);
5829 return fold_convert_loc (loc, type, negate_expr (tem));
5830 case NE_EXPR:
5831 case LTGT_EXPR:
5832 return fold_convert_loc (loc, type, arg1);
5833 case UNGE_EXPR:
5834 case UNGT_EXPR:
5835 if (flag_trapping_math)
5836 break;
5837 /* Fall through. */
5838 case GE_EXPR:
5839 case GT_EXPR:
5840 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5841 break;
5842 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5843 return fold_convert_loc (loc, type, tem);
5844 case UNLE_EXPR:
5845 case UNLT_EXPR:
5846 if (flag_trapping_math)
5847 break;
5848 /* FALLTHRU */
5849 case LE_EXPR:
5850 case LT_EXPR:
5851 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5852 break;
5853 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5854 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
5856 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
5857 is not, invokes UB both in abs and in the negation of it.
5858 So, use ABSU_EXPR instead. */
5859 tree utype = unsigned_type_for (TREE_TYPE (arg1));
5860 tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
5861 tem = negate_expr (tem);
5862 return fold_convert_loc (loc, type, tem);
5864 else
5866 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5867 return negate_expr (fold_convert_loc (loc, type, tem));
5869 default:
5870 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5871 break;
5874 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5875 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5876 both transformations are correct when A is NaN: A != 0
5877 is then true, and A == 0 is false. */
5879 if (!HONOR_SIGNED_ZEROS (type)
5880 && integer_zerop (arg01) && integer_zerop (arg2))
5882 if (comp_code == NE_EXPR)
5883 return fold_convert_loc (loc, type, arg1);
5884 else if (comp_code == EQ_EXPR)
5885 return build_zero_cst (type);
5888 /* Try some transformations of A op B ? A : B.
5890 A == B? A : B same as B
5891 A != B? A : B same as A
5892 A >= B? A : B same as max (A, B)
5893 A > B? A : B same as max (B, A)
5894 A <= B? A : B same as min (A, B)
5895 A < B? A : B same as min (B, A)
5897 As above, these transformations don't work in the presence
5898 of signed zeros. For example, if A and B are zeros of
5899 opposite sign, the first two transformations will change
5900 the sign of the result. In the last four, the original
5901 expressions give different results for (A=+0, B=-0) and
5902 (A=-0, B=+0), but the transformed expressions do not.
5904 The first two transformations are correct if either A or B
5905 is a NaN. In the first transformation, the condition will
5906 be false, and B will indeed be chosen. In the case of the
5907 second transformation, the condition A != B will be true,
5908 and A will be chosen.
5910 The conversions to max() and min() are not correct if B is
5911 a number and A is not. The conditions in the original
5912 expressions will be false, so all four give B. The min()
5913 and max() versions would give a NaN instead. */
5914 if (!HONOR_SIGNED_ZEROS (type)
5915 && operand_equal_for_comparison_p (arg01, arg2)
5916 /* Avoid these transformations if the COND_EXPR may be used
5917 as an lvalue in the C++ front-end. PR c++/19199. */
5918 && (in_gimple_form
5919 || VECTOR_TYPE_P (type)
5920 || (! lang_GNU_CXX ()
5921 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5922 || ! maybe_lvalue_p (arg1)
5923 || ! maybe_lvalue_p (arg2)))
5925 tree comp_op0 = arg00;
5926 tree comp_op1 = arg01;
5927 tree comp_type = TREE_TYPE (comp_op0);
5929 switch (comp_code)
5931 case EQ_EXPR:
5932 return fold_convert_loc (loc, type, arg2);
5933 case NE_EXPR:
5934 return fold_convert_loc (loc, type, arg1);
5935 case LE_EXPR:
5936 case LT_EXPR:
5937 case UNLE_EXPR:
5938 case UNLT_EXPR:
5939 /* In C++ a ?: expression can be an lvalue, so put the
5940 operand which will be used if they are equal first
5941 so that we can convert this back to the
5942 corresponding COND_EXPR. */
5943 if (!HONOR_NANS (arg1))
5945 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5946 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5947 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5948 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5949 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5950 comp_op1, comp_op0);
5951 return fold_convert_loc (loc, type, tem);
5953 break;
5954 case GE_EXPR:
5955 case GT_EXPR:
5956 case UNGE_EXPR:
5957 case UNGT_EXPR:
5958 if (!HONOR_NANS (arg1))
5960 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5961 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5962 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5963 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5964 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5965 comp_op1, comp_op0);
5966 return fold_convert_loc (loc, type, tem);
5968 break;
5969 case UNEQ_EXPR:
5970 if (!HONOR_NANS (arg1))
5971 return fold_convert_loc (loc, type, arg2);
5972 break;
5973 case LTGT_EXPR:
5974 if (!HONOR_NANS (arg1))
5975 return fold_convert_loc (loc, type, arg1);
5976 break;
5977 default:
5978 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5979 break;
5983 return NULL_TREE;
5988 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5989 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5990 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5991 false) >= 2)
5992 #endif
5994 /* EXP is some logical combination of boolean tests. See if we can
5995 merge it into some range test. Return the new tree if so. */
5997 static tree
5998 fold_range_test (location_t loc, enum tree_code code, tree type,
5999 tree op0, tree op1)
6001 int or_op = (code == TRUTH_ORIF_EXPR
6002 || code == TRUTH_OR_EXPR);
6003 int in0_p, in1_p, in_p;
6004 tree low0, low1, low, high0, high1, high;
6005 bool strict_overflow_p = false;
6006 tree tem, lhs, rhs;
6007 const char * const warnmsg = G_("assuming signed overflow does not occur "
6008 "when simplifying range test");
6010 if (!INTEGRAL_TYPE_P (type))
6011 return 0;
6013 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
6014 /* If op0 is known true or false and this is a short-circuiting
6015 operation we must not merge with op1 since that makes side-effects
6016 unconditional. So special-case this. */
6017 if (!lhs
6018 && ((code == TRUTH_ORIF_EXPR && in0_p)
6019 || (code == TRUTH_ANDIF_EXPR && !in0_p)))
6020 return op0;
6021 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
6023 /* If this is an OR operation, invert both sides; we will invert
6024 again at the end. */
6025 if (or_op)
6026 in0_p = ! in0_p, in1_p = ! in1_p;
6028 /* If both expressions are the same, if we can merge the ranges, and we
6029 can build the range test, return it or it inverted. If one of the
6030 ranges is always true or always false, consider it to be the same
6031 expression as the other. */
6032 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
6033 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
6034 in1_p, low1, high1)
6035 && (tem = (build_range_check (loc, type,
6036 lhs != 0 ? lhs
6037 : rhs != 0 ? rhs : integer_zero_node,
6038 in_p, low, high))) != 0)
6040 if (strict_overflow_p)
6041 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
6042 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6045 /* On machines where the branch cost is expensive, if this is a
6046 short-circuited branch and the underlying object on both sides
6047 is the same, make a non-short-circuit operation. */
6048 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6049 if (param_logical_op_non_short_circuit != -1)
6050 logical_op_non_short_circuit
6051 = param_logical_op_non_short_circuit;
6052 if (logical_op_non_short_circuit
6053 && !sanitize_coverage_p ()
6054 && lhs != 0 && rhs != 0
6055 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6056 && operand_equal_p (lhs, rhs, 0))
6058 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6059 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6060 which cases we can't do this. */
6061 if (simple_operand_p (lhs))
6062 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6063 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6064 type, op0, op1);
6066 else if (!lang_hooks.decls.global_bindings_p ()
6067 && !CONTAINS_PLACEHOLDER_P (lhs))
6069 tree common = save_expr (lhs);
6071 if ((lhs = build_range_check (loc, type, common,
6072 or_op ? ! in0_p : in0_p,
6073 low0, high0)) != 0
6074 && (rhs = build_range_check (loc, type, common,
6075 or_op ? ! in1_p : in1_p,
6076 low1, high1)) != 0)
6078 if (strict_overflow_p)
6079 fold_overflow_warning (warnmsg,
6080 WARN_STRICT_OVERFLOW_COMPARISON);
6081 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6082 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6083 type, lhs, rhs);
6088 return 0;
6091 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6092 bit value. Arrange things so the extra bits will be set to zero if and
6093 only if C is signed-extended to its full width. If MASK is nonzero,
6094 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6096 static tree
6097 unextend (tree c, int p, int unsignedp, tree mask)
6099 tree type = TREE_TYPE (c);
6100 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6101 tree temp;
6103 if (p == modesize || unsignedp)
6104 return c;
6106 /* We work by getting just the sign bit into the low-order bit, then
6107 into the high-order bit, then sign-extend. We then XOR that value
6108 with C. */
6109 temp = build_int_cst (TREE_TYPE (c),
6110 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6112 /* We must use a signed type in order to get an arithmetic right shift.
6113 However, we must also avoid introducing accidental overflows, so that
6114 a subsequent call to integer_zerop will work. Hence we must
6115 do the type conversion here. At this point, the constant is either
6116 zero or one, and the conversion to a signed type can never overflow.
6117 We could get an overflow if this conversion is done anywhere else. */
6118 if (TYPE_UNSIGNED (type))
6119 temp = fold_convert (signed_type_for (type), temp);
6121 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6122 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6123 if (mask != 0)
6124 temp = const_binop (BIT_AND_EXPR, temp,
6125 fold_convert (TREE_TYPE (c), mask));
6126 /* If necessary, convert the type back to match the type of C. */
6127 if (TYPE_UNSIGNED (type))
6128 temp = fold_convert (type, temp);
6130 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6133 /* For an expression that has the form
6134 (A && B) || ~B
6136 (A || B) && ~B,
6137 we can drop one of the inner expressions and simplify to
6138 A || ~B
6140 A && ~B
6141 LOC is the location of the resulting expression. OP is the inner
6142 logical operation; the left-hand side in the examples above, while CMPOP
6143 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6144 removing a condition that guards another, as in
6145 (A != NULL && A->...) || A == NULL
6146 which we must not transform. If RHS_ONLY is true, only eliminate the
6147 right-most operand of the inner logical operation. */
6149 static tree
6150 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6151 bool rhs_only)
6153 tree type = TREE_TYPE (cmpop);
6154 enum tree_code code = TREE_CODE (cmpop);
6155 enum tree_code truthop_code = TREE_CODE (op);
6156 tree lhs = TREE_OPERAND (op, 0);
6157 tree rhs = TREE_OPERAND (op, 1);
6158 tree orig_lhs = lhs, orig_rhs = rhs;
6159 enum tree_code rhs_code = TREE_CODE (rhs);
6160 enum tree_code lhs_code = TREE_CODE (lhs);
6161 enum tree_code inv_code;
6163 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6164 return NULL_TREE;
6166 if (TREE_CODE_CLASS (code) != tcc_comparison)
6167 return NULL_TREE;
6169 if (rhs_code == truthop_code)
6171 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6172 if (newrhs != NULL_TREE)
6174 rhs = newrhs;
6175 rhs_code = TREE_CODE (rhs);
6178 if (lhs_code == truthop_code && !rhs_only)
6180 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6181 if (newlhs != NULL_TREE)
6183 lhs = newlhs;
6184 lhs_code = TREE_CODE (lhs);
6188 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6189 if (inv_code == rhs_code
6190 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6191 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6192 return lhs;
6193 if (!rhs_only && inv_code == lhs_code
6194 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6195 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6196 return rhs;
6197 if (rhs != orig_rhs || lhs != orig_lhs)
6198 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6199 lhs, rhs);
6200 return NULL_TREE;
6203 /* Find ways of folding logical expressions of LHS and RHS:
6204 Try to merge two comparisons to the same innermost item.
6205 Look for range tests like "ch >= '0' && ch <= '9'".
6206 Look for combinations of simple terms on machines with expensive branches
6207 and evaluate the RHS unconditionally.
6209 For example, if we have p->a == 2 && p->b == 4 and we can make an
6210 object large enough to span both A and B, we can do this with a comparison
6211 against the object ANDed with the a mask.
6213 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6214 operations to do this with one comparison.
6216 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6217 function and the one above.
6219 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6220 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6222 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6223 two operands.
6225 We return the simplified tree or 0 if no optimization is possible. */
6227 static tree
6228 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6229 tree lhs, tree rhs)
6231 /* If this is the "or" of two comparisons, we can do something if
6232 the comparisons are NE_EXPR. If this is the "and", we can do something
6233 if the comparisons are EQ_EXPR. I.e.,
6234 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6236 WANTED_CODE is this operation code. For single bit fields, we can
6237 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6238 comparison for one-bit fields. */
6240 enum tree_code wanted_code;
6241 enum tree_code lcode, rcode;
6242 tree ll_arg, lr_arg, rl_arg, rr_arg;
6243 tree ll_inner, lr_inner, rl_inner, rr_inner;
6244 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6245 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6246 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6247 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6248 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6249 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6250 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6251 scalar_int_mode lnmode, rnmode;
6252 tree ll_mask, lr_mask, rl_mask, rr_mask;
6253 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6254 tree l_const, r_const;
6255 tree lntype, rntype, result;
6256 HOST_WIDE_INT first_bit, end_bit;
6257 int volatilep;
6259 /* Start by getting the comparison codes. Fail if anything is volatile.
6260 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6261 it were surrounded with a NE_EXPR. */
6263 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6264 return 0;
6266 lcode = TREE_CODE (lhs);
6267 rcode = TREE_CODE (rhs);
6269 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6271 lhs = build2 (NE_EXPR, truth_type, lhs,
6272 build_int_cst (TREE_TYPE (lhs), 0));
6273 lcode = NE_EXPR;
6276 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6278 rhs = build2 (NE_EXPR, truth_type, rhs,
6279 build_int_cst (TREE_TYPE (rhs), 0));
6280 rcode = NE_EXPR;
6283 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6284 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6285 return 0;
6287 ll_arg = TREE_OPERAND (lhs, 0);
6288 lr_arg = TREE_OPERAND (lhs, 1);
6289 rl_arg = TREE_OPERAND (rhs, 0);
6290 rr_arg = TREE_OPERAND (rhs, 1);
6292 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6293 if (simple_operand_p (ll_arg)
6294 && simple_operand_p (lr_arg))
6296 if (operand_equal_p (ll_arg, rl_arg, 0)
6297 && operand_equal_p (lr_arg, rr_arg, 0))
6299 result = combine_comparisons (loc, code, lcode, rcode,
6300 truth_type, ll_arg, lr_arg);
6301 if (result)
6302 return result;
6304 else if (operand_equal_p (ll_arg, rr_arg, 0)
6305 && operand_equal_p (lr_arg, rl_arg, 0))
6307 result = combine_comparisons (loc, code, lcode,
6308 swap_tree_comparison (rcode),
6309 truth_type, ll_arg, lr_arg);
6310 if (result)
6311 return result;
6315 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6316 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6318 /* If the RHS can be evaluated unconditionally and its operands are
6319 simple, it wins to evaluate the RHS unconditionally on machines
6320 with expensive branches. In this case, this isn't a comparison
6321 that can be merged. */
6323 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6324 false) >= 2
6325 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6326 && simple_operand_p (rl_arg)
6327 && simple_operand_p (rr_arg))
6329 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6330 if (code == TRUTH_OR_EXPR
6331 && lcode == NE_EXPR && integer_zerop (lr_arg)
6332 && rcode == NE_EXPR && integer_zerop (rr_arg)
6333 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6334 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6335 return build2_loc (loc, NE_EXPR, truth_type,
6336 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6337 ll_arg, rl_arg),
6338 build_int_cst (TREE_TYPE (ll_arg), 0));
6340 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6341 if (code == TRUTH_AND_EXPR
6342 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6343 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6344 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6345 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6346 return build2_loc (loc, EQ_EXPR, truth_type,
6347 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6348 ll_arg, rl_arg),
6349 build_int_cst (TREE_TYPE (ll_arg), 0));
6352 /* See if the comparisons can be merged. Then get all the parameters for
6353 each side. */
6355 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6356 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6357 return 0;
6359 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6360 volatilep = 0;
6361 ll_inner = decode_field_reference (loc, &ll_arg,
6362 &ll_bitsize, &ll_bitpos, &ll_mode,
6363 &ll_unsignedp, &ll_reversep, &volatilep,
6364 &ll_mask, &ll_and_mask);
6365 lr_inner = decode_field_reference (loc, &lr_arg,
6366 &lr_bitsize, &lr_bitpos, &lr_mode,
6367 &lr_unsignedp, &lr_reversep, &volatilep,
6368 &lr_mask, &lr_and_mask);
6369 rl_inner = decode_field_reference (loc, &rl_arg,
6370 &rl_bitsize, &rl_bitpos, &rl_mode,
6371 &rl_unsignedp, &rl_reversep, &volatilep,
6372 &rl_mask, &rl_and_mask);
6373 rr_inner = decode_field_reference (loc, &rr_arg,
6374 &rr_bitsize, &rr_bitpos, &rr_mode,
6375 &rr_unsignedp, &rr_reversep, &volatilep,
6376 &rr_mask, &rr_and_mask);
6378 /* It must be true that the inner operation on the lhs of each
6379 comparison must be the same if we are to be able to do anything.
6380 Then see if we have constants. If not, the same must be true for
6381 the rhs's. */
6382 if (volatilep
6383 || ll_reversep != rl_reversep
6384 || ll_inner == 0 || rl_inner == 0
6385 || ! operand_equal_p (ll_inner, rl_inner, 0))
6386 return 0;
6388 if (TREE_CODE (lr_arg) == INTEGER_CST
6389 && TREE_CODE (rr_arg) == INTEGER_CST)
6391 l_const = lr_arg, r_const = rr_arg;
6392 lr_reversep = ll_reversep;
6394 else if (lr_reversep != rr_reversep
6395 || lr_inner == 0 || rr_inner == 0
6396 || ! operand_equal_p (lr_inner, rr_inner, 0))
6397 return 0;
6398 else
6399 l_const = r_const = 0;
6401 /* If either comparison code is not correct for our logical operation,
6402 fail. However, we can convert a one-bit comparison against zero into
6403 the opposite comparison against that bit being set in the field. */
6405 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6406 if (lcode != wanted_code)
6408 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6410 /* Make the left operand unsigned, since we are only interested
6411 in the value of one bit. Otherwise we are doing the wrong
6412 thing below. */
6413 ll_unsignedp = 1;
6414 l_const = ll_mask;
6416 else
6417 return 0;
6420 /* This is analogous to the code for l_const above. */
6421 if (rcode != wanted_code)
6423 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6425 rl_unsignedp = 1;
6426 r_const = rl_mask;
6428 else
6429 return 0;
6432 /* See if we can find a mode that contains both fields being compared on
6433 the left. If we can't, fail. Otherwise, update all constants and masks
6434 to be relative to a field of that size. */
6435 first_bit = MIN (ll_bitpos, rl_bitpos);
6436 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6437 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6438 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6439 volatilep, &lnmode))
6440 return 0;
6442 lnbitsize = GET_MODE_BITSIZE (lnmode);
6443 lnbitpos = first_bit & ~ (lnbitsize - 1);
6444 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6445 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6447 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6449 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6450 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6453 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6454 size_int (xll_bitpos));
6455 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6456 size_int (xrl_bitpos));
6457 if (ll_mask == NULL_TREE || rl_mask == NULL_TREE)
6458 return 0;
6460 if (l_const)
6462 l_const = fold_convert_loc (loc, lntype, l_const);
6463 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6464 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6465 if (l_const == NULL_TREE)
6466 return 0;
6467 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6468 fold_build1_loc (loc, BIT_NOT_EXPR,
6469 lntype, ll_mask))))
6471 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6473 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6476 if (r_const)
6478 r_const = fold_convert_loc (loc, lntype, r_const);
6479 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6480 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6481 if (r_const == NULL_TREE)
6482 return 0;
6483 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6484 fold_build1_loc (loc, BIT_NOT_EXPR,
6485 lntype, rl_mask))))
6487 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6489 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6493 /* If the right sides are not constant, do the same for it. Also,
6494 disallow this optimization if a size, signedness or storage order
6495 mismatch occurs between the left and right sides. */
6496 if (l_const == 0)
6498 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6499 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6500 || ll_reversep != lr_reversep
6501 /* Make sure the two fields on the right
6502 correspond to the left without being swapped. */
6503 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6504 return 0;
6506 first_bit = MIN (lr_bitpos, rr_bitpos);
6507 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6508 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6509 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6510 volatilep, &rnmode))
6511 return 0;
6513 rnbitsize = GET_MODE_BITSIZE (rnmode);
6514 rnbitpos = first_bit & ~ (rnbitsize - 1);
6515 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6516 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6518 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6520 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6521 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6524 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6525 rntype, lr_mask),
6526 size_int (xlr_bitpos));
6527 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6528 rntype, rr_mask),
6529 size_int (xrr_bitpos));
6530 if (lr_mask == NULL_TREE || rr_mask == NULL_TREE)
6531 return 0;
6533 /* Make a mask that corresponds to both fields being compared.
6534 Do this for both items being compared. If the operands are the
6535 same size and the bits being compared are in the same position
6536 then we can do this by masking both and comparing the masked
6537 results. */
6538 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6539 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6540 if (lnbitsize == rnbitsize
6541 && xll_bitpos == xlr_bitpos
6542 && lnbitpos >= 0
6543 && rnbitpos >= 0)
6545 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6546 lntype, lnbitsize, lnbitpos,
6547 ll_unsignedp || rl_unsignedp, ll_reversep);
6548 if (! all_ones_mask_p (ll_mask, lnbitsize))
6549 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6551 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6552 rntype, rnbitsize, rnbitpos,
6553 lr_unsignedp || rr_unsignedp, lr_reversep);
6554 if (! all_ones_mask_p (lr_mask, rnbitsize))
6555 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6557 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6560 /* There is still another way we can do something: If both pairs of
6561 fields being compared are adjacent, we may be able to make a wider
6562 field containing them both.
6564 Note that we still must mask the lhs/rhs expressions. Furthermore,
6565 the mask must be shifted to account for the shift done by
6566 make_bit_field_ref. */
6567 if (((ll_bitsize + ll_bitpos == rl_bitpos
6568 && lr_bitsize + lr_bitpos == rr_bitpos)
6569 || (ll_bitpos == rl_bitpos + rl_bitsize
6570 && lr_bitpos == rr_bitpos + rr_bitsize))
6571 && ll_bitpos >= 0
6572 && rl_bitpos >= 0
6573 && lr_bitpos >= 0
6574 && rr_bitpos >= 0)
6576 tree type;
6578 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6579 ll_bitsize + rl_bitsize,
6580 MIN (ll_bitpos, rl_bitpos),
6581 ll_unsignedp, ll_reversep);
6582 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6583 lr_bitsize + rr_bitsize,
6584 MIN (lr_bitpos, rr_bitpos),
6585 lr_unsignedp, lr_reversep);
6587 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6588 size_int (MIN (xll_bitpos, xrl_bitpos)));
6589 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6590 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6591 if (ll_mask == NULL_TREE || lr_mask == NULL_TREE)
6592 return 0;
6594 /* Convert to the smaller type before masking out unwanted bits. */
6595 type = lntype;
6596 if (lntype != rntype)
6598 if (lnbitsize > rnbitsize)
6600 lhs = fold_convert_loc (loc, rntype, lhs);
6601 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6602 type = rntype;
6604 else if (lnbitsize < rnbitsize)
6606 rhs = fold_convert_loc (loc, lntype, rhs);
6607 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6608 type = lntype;
6612 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6613 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6615 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6616 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6618 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6621 return 0;
6624 /* Handle the case of comparisons with constants. If there is something in
6625 common between the masks, those bits of the constants must be the same.
6626 If not, the condition is always false. Test for this to avoid generating
6627 incorrect code below. */
6628 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6629 if (! integer_zerop (result)
6630 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6631 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6633 if (wanted_code == NE_EXPR)
6635 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6636 return constant_boolean_node (true, truth_type);
6638 else
6640 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6641 return constant_boolean_node (false, truth_type);
6645 if (lnbitpos < 0)
6646 return 0;
6648 /* Construct the expression we will return. First get the component
6649 reference we will make. Unless the mask is all ones the width of
6650 that field, perform the mask operation. Then compare with the
6651 merged constant. */
6652 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6653 lntype, lnbitsize, lnbitpos,
6654 ll_unsignedp || rl_unsignedp, ll_reversep);
6656 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6657 if (! all_ones_mask_p (ll_mask, lnbitsize))
6658 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6660 return build2_loc (loc, wanted_code, truth_type, result,
6661 const_binop (BIT_IOR_EXPR, l_const, r_const));
6664 /* T is an integer expression that is being multiplied, divided, or taken a
6665 modulus (CODE says which and what kind of divide or modulus) by a
6666 constant C. See if we can eliminate that operation by folding it with
6667 other operations already in T. WIDE_TYPE, if non-null, is a type that
6668 should be used for the computation if wider than our type.
6670 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6671 (X * 2) + (Y * 4). We must, however, be assured that either the original
6672 expression would not overflow or that overflow is undefined for the type
6673 in the language in question.
6675 If we return a non-null expression, it is an equivalent form of the
6676 original computation, but need not be in the original type.
6678 We set *STRICT_OVERFLOW_P to true if the return values depends on
6679 signed overflow being undefined. Otherwise we do not change
6680 *STRICT_OVERFLOW_P. */
6682 static tree
6683 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6684 bool *strict_overflow_p)
6686 /* To avoid exponential search depth, refuse to allow recursion past
6687 three levels. Beyond that (1) it's highly unlikely that we'll find
6688 something interesting and (2) we've probably processed it before
6689 when we built the inner expression. */
6691 static int depth;
6692 tree ret;
6694 if (depth > 3)
6695 return NULL;
6697 depth++;
6698 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6699 depth--;
6701 return ret;
6704 static tree
6705 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6706 bool *strict_overflow_p)
6708 tree type = TREE_TYPE (t);
6709 enum tree_code tcode = TREE_CODE (t);
6710 tree ctype = (wide_type != 0
6711 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6712 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6713 ? wide_type : type);
6714 tree t1, t2;
6715 int same_p = tcode == code;
6716 tree op0 = NULL_TREE, op1 = NULL_TREE;
6717 bool sub_strict_overflow_p;
6719 /* Don't deal with constants of zero here; they confuse the code below. */
6720 if (integer_zerop (c))
6721 return NULL_TREE;
6723 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6724 op0 = TREE_OPERAND (t, 0);
6726 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6727 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6729 /* Note that we need not handle conditional operations here since fold
6730 already handles those cases. So just do arithmetic here. */
6731 switch (tcode)
6733 case INTEGER_CST:
6734 /* For a constant, we can always simplify if we are a multiply
6735 or (for divide and modulus) if it is a multiple of our constant. */
6736 if (code == MULT_EXPR
6737 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6738 TYPE_SIGN (type)))
6740 tree tem = const_binop (code, fold_convert (ctype, t),
6741 fold_convert (ctype, c));
6742 /* If the multiplication overflowed, we lost information on it.
6743 See PR68142 and PR69845. */
6744 if (TREE_OVERFLOW (tem))
6745 return NULL_TREE;
6746 return tem;
6748 break;
6750 CASE_CONVERT: case NON_LVALUE_EXPR:
6751 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0)))
6752 break;
6753 /* If op0 is an expression ... */
6754 if ((COMPARISON_CLASS_P (op0)
6755 || UNARY_CLASS_P (op0)
6756 || BINARY_CLASS_P (op0)
6757 || VL_EXP_CLASS_P (op0)
6758 || EXPRESSION_CLASS_P (op0))
6759 /* ... and has wrapping overflow, and its type is smaller
6760 than ctype, then we cannot pass through as widening. */
6761 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6762 && (TYPE_PRECISION (ctype)
6763 > TYPE_PRECISION (TREE_TYPE (op0))))
6764 /* ... or this is a truncation (t is narrower than op0),
6765 then we cannot pass through this narrowing. */
6766 || (TYPE_PRECISION (type)
6767 < TYPE_PRECISION (TREE_TYPE (op0)))
6768 /* ... or signedness changes for division or modulus,
6769 then we cannot pass through this conversion. */
6770 || (code != MULT_EXPR
6771 && (TYPE_UNSIGNED (ctype)
6772 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6773 /* ... or has undefined overflow while the converted to
6774 type has not, we cannot do the operation in the inner type
6775 as that would introduce undefined overflow. */
6776 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6777 && !TYPE_OVERFLOW_UNDEFINED (type))))
6778 break;
6780 /* Pass the constant down and see if we can make a simplification. If
6781 we can, replace this expression with the inner simplification for
6782 possible later conversion to our or some other type. */
6783 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6784 && TREE_CODE (t2) == INTEGER_CST
6785 && !TREE_OVERFLOW (t2)
6786 && (t1 = extract_muldiv (op0, t2, code,
6787 code == MULT_EXPR ? ctype : NULL_TREE,
6788 strict_overflow_p)) != 0)
6789 return t1;
6790 break;
6792 case ABS_EXPR:
6793 /* If widening the type changes it from signed to unsigned, then we
6794 must avoid building ABS_EXPR itself as unsigned. */
6795 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6797 tree cstype = (*signed_type_for) (ctype);
6798 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6799 != 0)
6801 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6802 return fold_convert (ctype, t1);
6804 break;
6806 /* If the constant is negative, we cannot simplify this. */
6807 if (tree_int_cst_sgn (c) == -1)
6808 break;
6809 /* FALLTHROUGH */
6810 case NEGATE_EXPR:
6811 /* For division and modulus, type can't be unsigned, as e.g.
6812 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6813 For signed types, even with wrapping overflow, this is fine. */
6814 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6815 break;
6816 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6817 != 0)
6818 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6819 break;
6821 case MIN_EXPR: case MAX_EXPR:
6822 /* If widening the type changes the signedness, then we can't perform
6823 this optimization as that changes the result. */
6824 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6825 break;
6827 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6828 sub_strict_overflow_p = false;
6829 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6830 &sub_strict_overflow_p)) != 0
6831 && (t2 = extract_muldiv (op1, c, code, wide_type,
6832 &sub_strict_overflow_p)) != 0)
6834 if (tree_int_cst_sgn (c) < 0)
6835 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6836 if (sub_strict_overflow_p)
6837 *strict_overflow_p = true;
6838 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6839 fold_convert (ctype, t2));
6841 break;
6843 case LSHIFT_EXPR: case RSHIFT_EXPR:
6844 /* If the second operand is constant, this is a multiplication
6845 or floor division, by a power of two, so we can treat it that
6846 way unless the multiplier or divisor overflows. Signed
6847 left-shift overflow is implementation-defined rather than
6848 undefined in C90, so do not convert signed left shift into
6849 multiplication. */
6850 if (TREE_CODE (op1) == INTEGER_CST
6851 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6852 /* const_binop may not detect overflow correctly,
6853 so check for it explicitly here. */
6854 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6855 wi::to_wide (op1))
6856 && (t1 = fold_convert (ctype,
6857 const_binop (LSHIFT_EXPR, size_one_node,
6858 op1))) != 0
6859 && !TREE_OVERFLOW (t1))
6860 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6861 ? MULT_EXPR : FLOOR_DIV_EXPR,
6862 ctype,
6863 fold_convert (ctype, op0),
6864 t1),
6865 c, code, wide_type, strict_overflow_p);
6866 break;
6868 case PLUS_EXPR: case MINUS_EXPR:
6869 /* See if we can eliminate the operation on both sides. If we can, we
6870 can return a new PLUS or MINUS. If we can't, the only remaining
6871 cases where we can do anything are if the second operand is a
6872 constant. */
6873 sub_strict_overflow_p = false;
6874 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6875 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6876 if (t1 != 0 && t2 != 0
6877 && TYPE_OVERFLOW_WRAPS (ctype)
6878 && (code == MULT_EXPR
6879 /* If not multiplication, we can only do this if both operands
6880 are divisible by c. */
6881 || (multiple_of_p (ctype, op0, c)
6882 && multiple_of_p (ctype, op1, c))))
6884 if (sub_strict_overflow_p)
6885 *strict_overflow_p = true;
6886 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6887 fold_convert (ctype, t2));
6890 /* If this was a subtraction, negate OP1 and set it to be an addition.
6891 This simplifies the logic below. */
6892 if (tcode == MINUS_EXPR)
6894 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6895 /* If OP1 was not easily negatable, the constant may be OP0. */
6896 if (TREE_CODE (op0) == INTEGER_CST)
6898 std::swap (op0, op1);
6899 std::swap (t1, t2);
6903 if (TREE_CODE (op1) != INTEGER_CST)
6904 break;
6906 /* If either OP1 or C are negative, this optimization is not safe for
6907 some of the division and remainder types while for others we need
6908 to change the code. */
6909 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6911 if (code == CEIL_DIV_EXPR)
6912 code = FLOOR_DIV_EXPR;
6913 else if (code == FLOOR_DIV_EXPR)
6914 code = CEIL_DIV_EXPR;
6915 else if (code != MULT_EXPR
6916 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6917 break;
6920 /* If it's a multiply or a division/modulus operation of a multiple
6921 of our constant, do the operation and verify it doesn't overflow. */
6922 if (code == MULT_EXPR
6923 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6924 TYPE_SIGN (type)))
6926 op1 = const_binop (code, fold_convert (ctype, op1),
6927 fold_convert (ctype, c));
6928 /* We allow the constant to overflow with wrapping semantics. */
6929 if (op1 == 0
6930 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6931 break;
6933 else
6934 break;
6936 /* If we have an unsigned type, we cannot widen the operation since it
6937 will change the result if the original computation overflowed. */
6938 if (TYPE_UNSIGNED (ctype) && ctype != type)
6939 break;
6941 /* The last case is if we are a multiply. In that case, we can
6942 apply the distributive law to commute the multiply and addition
6943 if the multiplication of the constants doesn't overflow
6944 and overflow is defined. With undefined overflow
6945 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6946 But fold_plusminus_mult_expr would factor back any power-of-two
6947 value so do not distribute in the first place in this case. */
6948 if (code == MULT_EXPR
6949 && TYPE_OVERFLOW_WRAPS (ctype)
6950 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
6951 return fold_build2 (tcode, ctype,
6952 fold_build2 (code, ctype,
6953 fold_convert (ctype, op0),
6954 fold_convert (ctype, c)),
6955 op1);
6957 break;
6959 case MULT_EXPR:
6960 /* We have a special case here if we are doing something like
6961 (C * 8) % 4 since we know that's zero. */
6962 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6963 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6964 /* If the multiplication can overflow we cannot optimize this. */
6965 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6966 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6967 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6968 TYPE_SIGN (type)))
6970 *strict_overflow_p = true;
6971 return omit_one_operand (type, integer_zero_node, op0);
6974 /* ... fall through ... */
6976 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6977 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6978 /* If we can extract our operation from the LHS, do so and return a
6979 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6980 do something only if the second operand is a constant. */
6981 if (same_p
6982 && TYPE_OVERFLOW_WRAPS (ctype)
6983 && (t1 = extract_muldiv (op0, c, code, wide_type,
6984 strict_overflow_p)) != 0)
6985 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6986 fold_convert (ctype, op1));
6987 else if (tcode == MULT_EXPR && code == MULT_EXPR
6988 && TYPE_OVERFLOW_WRAPS (ctype)
6989 && (t1 = extract_muldiv (op1, c, code, wide_type,
6990 strict_overflow_p)) != 0)
6991 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6992 fold_convert (ctype, t1));
6993 else if (TREE_CODE (op1) != INTEGER_CST)
6994 return 0;
6996 /* If these are the same operation types, we can associate them
6997 assuming no overflow. */
6998 if (tcode == code)
7000 bool overflow_p = false;
7001 wi::overflow_type overflow_mul;
7002 signop sign = TYPE_SIGN (ctype);
7003 unsigned prec = TYPE_PRECISION (ctype);
7004 wide_int mul = wi::mul (wi::to_wide (op1, prec),
7005 wi::to_wide (c, prec),
7006 sign, &overflow_mul);
7007 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
7008 if (overflow_mul
7009 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
7010 overflow_p = true;
7011 if (!overflow_p)
7012 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7013 wide_int_to_tree (ctype, mul));
7016 /* If these operations "cancel" each other, we have the main
7017 optimizations of this pass, which occur when either constant is a
7018 multiple of the other, in which case we replace this with either an
7019 operation or CODE or TCODE.
7021 If we have an unsigned type, we cannot do this since it will change
7022 the result if the original computation overflowed. */
7023 if (TYPE_OVERFLOW_UNDEFINED (ctype)
7024 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
7025 || (tcode == MULT_EXPR
7026 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
7027 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
7028 && code != MULT_EXPR)))
7030 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7031 TYPE_SIGN (type)))
7033 if (TYPE_OVERFLOW_UNDEFINED (ctype))
7034 *strict_overflow_p = true;
7035 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7036 fold_convert (ctype,
7037 const_binop (TRUNC_DIV_EXPR,
7038 op1, c)));
7040 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
7041 TYPE_SIGN (type)))
7043 if (TYPE_OVERFLOW_UNDEFINED (ctype))
7044 *strict_overflow_p = true;
7045 return fold_build2 (code, ctype, fold_convert (ctype, op0),
7046 fold_convert (ctype,
7047 const_binop (TRUNC_DIV_EXPR,
7048 c, op1)));
7051 break;
7053 default:
7054 break;
7057 return 0;
7060 /* Return a node which has the indicated constant VALUE (either 0 or
7061 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7062 and is of the indicated TYPE. */
7064 tree
7065 constant_boolean_node (bool value, tree type)
7067 if (type == integer_type_node)
7068 return value ? integer_one_node : integer_zero_node;
7069 else if (type == boolean_type_node)
7070 return value ? boolean_true_node : boolean_false_node;
7071 else if (TREE_CODE (type) == VECTOR_TYPE)
7072 return build_vector_from_val (type,
7073 build_int_cst (TREE_TYPE (type),
7074 value ? -1 : 0));
7075 else
7076 return fold_convert (type, value ? integer_one_node : integer_zero_node);
7080 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7081 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7082 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7083 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7084 COND is the first argument to CODE; otherwise (as in the example
7085 given here), it is the second argument. TYPE is the type of the
7086 original expression. Return NULL_TREE if no simplification is
7087 possible. */
7089 static tree
7090 fold_binary_op_with_conditional_arg (location_t loc,
7091 enum tree_code code,
7092 tree type, tree op0, tree op1,
7093 tree cond, tree arg, int cond_first_p)
7095 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7096 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7097 tree test, true_value, false_value;
7098 tree lhs = NULL_TREE;
7099 tree rhs = NULL_TREE;
7100 enum tree_code cond_code = COND_EXPR;
7102 /* Do not move possibly trapping operations into the conditional as this
7103 pessimizes code and causes gimplification issues when applied late. */
7104 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7105 ANY_INTEGRAL_TYPE_P (type)
7106 && TYPE_OVERFLOW_TRAPS (type), op1))
7107 return NULL_TREE;
7109 if (TREE_CODE (cond) == COND_EXPR
7110 || TREE_CODE (cond) == VEC_COND_EXPR)
7112 test = TREE_OPERAND (cond, 0);
7113 true_value = TREE_OPERAND (cond, 1);
7114 false_value = TREE_OPERAND (cond, 2);
7115 /* If this operand throws an expression, then it does not make
7116 sense to try to perform a logical or arithmetic operation
7117 involving it. */
7118 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7119 lhs = true_value;
7120 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7121 rhs = false_value;
7123 else if (!(TREE_CODE (type) != VECTOR_TYPE
7124 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
7126 tree testtype = TREE_TYPE (cond);
7127 test = cond;
7128 true_value = constant_boolean_node (true, testtype);
7129 false_value = constant_boolean_node (false, testtype);
7131 else
7132 /* Detect the case of mixing vector and scalar types - bail out. */
7133 return NULL_TREE;
7135 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
7136 cond_code = VEC_COND_EXPR;
7138 /* This transformation is only worthwhile if we don't have to wrap ARG
7139 in a SAVE_EXPR and the operation can be simplified without recursing
7140 on at least one of the branches once its pushed inside the COND_EXPR. */
7141 if (!TREE_CONSTANT (arg)
7142 && (TREE_SIDE_EFFECTS (arg)
7143 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7144 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7145 return NULL_TREE;
7147 arg = fold_convert_loc (loc, arg_type, arg);
7148 if (lhs == 0)
7150 true_value = fold_convert_loc (loc, cond_type, true_value);
7151 if (cond_first_p)
7152 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7153 else
7154 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7156 if (rhs == 0)
7158 false_value = fold_convert_loc (loc, cond_type, false_value);
7159 if (cond_first_p)
7160 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7161 else
7162 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7165 /* Check that we have simplified at least one of the branches. */
7166 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7167 return NULL_TREE;
7169 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7173 /* Subroutine of fold() that checks for the addition of ARG +/- 0.0.
7175 If !NEGATE, return true if ZERO_ARG is +/-0.0 and, for all ARG of
7176 type TYPE, ARG + ZERO_ARG is the same as ARG. If NEGATE, return true
7177 if ARG - ZERO_ARG is the same as X.
7179 If ARG is NULL, check for any value of type TYPE.
7181 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7182 and finite. The problematic cases are when X is zero, and its mode
7183 has signed zeros. In the case of rounding towards -infinity,
7184 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7185 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7187 bool
7188 fold_real_zero_addition_p (const_tree type, const_tree arg,
7189 const_tree zero_arg, int negate)
7191 if (!real_zerop (zero_arg))
7192 return false;
7194 /* Don't allow the fold with -fsignaling-nans. */
7195 if (arg ? tree_expr_maybe_signaling_nan_p (arg) : HONOR_SNANS (type))
7196 return false;
7198 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7199 if (!HONOR_SIGNED_ZEROS (type))
7200 return true;
7202 /* There is no case that is safe for all rounding modes. */
7203 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7204 return false;
7206 /* In a vector or complex, we would need to check the sign of all zeros. */
7207 if (TREE_CODE (zero_arg) == VECTOR_CST)
7208 zero_arg = uniform_vector_p (zero_arg);
7209 if (!zero_arg || TREE_CODE (zero_arg) != REAL_CST)
7210 return false;
7212 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7213 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (zero_arg)))
7214 negate = !negate;
7216 /* The mode has signed zeros, and we have to honor their sign.
7217 In this situation, there are only two cases we can return true for.
7218 (i) X - 0 is the same as X with default rounding.
7219 (ii) X + 0 is X when X can't possibly be -0.0. */
7220 return negate || (arg && !tree_expr_maybe_real_minus_zero_p (arg));
7223 /* Subroutine of match.pd that optimizes comparisons of a division by
7224 a nonzero integer constant against an integer constant, i.e.
7225 X/C1 op C2.
7227 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7228 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7230 enum tree_code
7231 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7232 tree *hi, bool *neg_overflow)
7234 tree prod, tmp, type = TREE_TYPE (c1);
7235 signop sign = TYPE_SIGN (type);
7236 wi::overflow_type overflow;
7238 /* We have to do this the hard way to detect unsigned overflow.
7239 prod = int_const_binop (MULT_EXPR, c1, c2); */
7240 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7241 prod = force_fit_type (type, val, -1, overflow);
7242 *neg_overflow = false;
7244 if (sign == UNSIGNED)
7246 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7247 *lo = prod;
7249 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7250 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7251 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7253 else if (tree_int_cst_sgn (c1) >= 0)
7255 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7256 switch (tree_int_cst_sgn (c2))
7258 case -1:
7259 *neg_overflow = true;
7260 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7261 *hi = prod;
7262 break;
7264 case 0:
7265 *lo = fold_negate_const (tmp, type);
7266 *hi = tmp;
7267 break;
7269 case 1:
7270 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7271 *lo = prod;
7272 break;
7274 default:
7275 gcc_unreachable ();
7278 else
7280 /* A negative divisor reverses the relational operators. */
7281 code = swap_tree_comparison (code);
7283 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7284 switch (tree_int_cst_sgn (c2))
7286 case -1:
7287 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7288 *lo = prod;
7289 break;
7291 case 0:
7292 *hi = fold_negate_const (tmp, type);
7293 *lo = tmp;
7294 break;
7296 case 1:
7297 *neg_overflow = true;
7298 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7299 *hi = prod;
7300 break;
7302 default:
7303 gcc_unreachable ();
7307 if (code != EQ_EXPR && code != NE_EXPR)
7308 return code;
7310 if (TREE_OVERFLOW (*lo)
7311 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7312 *lo = NULL_TREE;
7313 if (TREE_OVERFLOW (*hi)
7314 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7315 *hi = NULL_TREE;
7317 return code;
7321 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7322 equality/inequality test, then return a simplified form of the test
7323 using a sign testing. Otherwise return NULL. TYPE is the desired
7324 result type. */
7326 static tree
7327 fold_single_bit_test_into_sign_test (location_t loc,
7328 enum tree_code code, tree arg0, tree arg1,
7329 tree result_type)
7331 /* If this is testing a single bit, we can optimize the test. */
7332 if ((code == NE_EXPR || code == EQ_EXPR)
7333 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7334 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7336 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7337 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7338 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7340 if (arg00 != NULL_TREE
7341 /* This is only a win if casting to a signed type is cheap,
7342 i.e. when arg00's type is not a partial mode. */
7343 && type_has_mode_precision_p (TREE_TYPE (arg00)))
7345 tree stype = signed_type_for (TREE_TYPE (arg00));
7346 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7347 result_type,
7348 fold_convert_loc (loc, stype, arg00),
7349 build_int_cst (stype, 0));
7353 return NULL_TREE;
7356 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7357 equality/inequality test, then return a simplified form of
7358 the test using shifts and logical operations. Otherwise return
7359 NULL. TYPE is the desired result type. */
7361 tree
7362 fold_single_bit_test (location_t loc, enum tree_code code,
7363 tree arg0, tree arg1, tree result_type)
7365 /* If this is testing a single bit, we can optimize the test. */
7366 if ((code == NE_EXPR || code == EQ_EXPR)
7367 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7368 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7370 tree inner = TREE_OPERAND (arg0, 0);
7371 tree type = TREE_TYPE (arg0);
7372 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7373 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
7374 int ops_unsigned;
7375 tree signed_type, unsigned_type, intermediate_type;
7376 tree tem, one;
7378 /* First, see if we can fold the single bit test into a sign-bit
7379 test. */
7380 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7381 result_type);
7382 if (tem)
7383 return tem;
7385 /* Otherwise we have (A & C) != 0 where C is a single bit,
7386 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7387 Similarly for (A & C) == 0. */
7389 /* If INNER is a right shift of a constant and it plus BITNUM does
7390 not overflow, adjust BITNUM and INNER. */
7391 if (TREE_CODE (inner) == RSHIFT_EXPR
7392 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7393 && bitnum < TYPE_PRECISION (type)
7394 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
7395 TYPE_PRECISION (type) - bitnum))
7397 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
7398 inner = TREE_OPERAND (inner, 0);
7401 /* If we are going to be able to omit the AND below, we must do our
7402 operations as unsigned. If we must use the AND, we have a choice.
7403 Normally unsigned is faster, but for some machines signed is. */
7404 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
7405 && !flag_syntax_only) ? 0 : 1;
7407 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7408 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7409 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7410 inner = fold_convert_loc (loc, intermediate_type, inner);
7412 if (bitnum != 0)
7413 inner = build2 (RSHIFT_EXPR, intermediate_type,
7414 inner, size_int (bitnum));
7416 one = build_int_cst (intermediate_type, 1);
7418 if (code == EQ_EXPR)
7419 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7421 /* Put the AND last so it can combine with more things. */
7422 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7424 /* Make sure to return the proper type. */
7425 inner = fold_convert_loc (loc, result_type, inner);
7427 return inner;
7429 return NULL_TREE;
7432 /* Test whether it is preferable to swap two operands, ARG0 and
7433 ARG1, for example because ARG0 is an integer constant and ARG1
7434 isn't. */
7436 bool
7437 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7439 if (CONSTANT_CLASS_P (arg1))
7440 return 0;
7441 if (CONSTANT_CLASS_P (arg0))
7442 return 1;
7444 STRIP_NOPS (arg0);
7445 STRIP_NOPS (arg1);
7447 if (TREE_CONSTANT (arg1))
7448 return 0;
7449 if (TREE_CONSTANT (arg0))
7450 return 1;
7452 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7453 for commutative and comparison operators. Ensuring a canonical
7454 form allows the optimizers to find additional redundancies without
7455 having to explicitly check for both orderings. */
7456 if (TREE_CODE (arg0) == SSA_NAME
7457 && TREE_CODE (arg1) == SSA_NAME
7458 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7459 return 1;
7461 /* Put SSA_NAMEs last. */
7462 if (TREE_CODE (arg1) == SSA_NAME)
7463 return 0;
7464 if (TREE_CODE (arg0) == SSA_NAME)
7465 return 1;
7467 /* Put variables last. */
7468 if (DECL_P (arg1))
7469 return 0;
7470 if (DECL_P (arg0))
7471 return 1;
7473 return 0;
7477 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7478 means A >= Y && A != MAX, but in this case we know that
7479 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7481 static tree
7482 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7484 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7486 if (TREE_CODE (bound) == LT_EXPR)
7487 a = TREE_OPERAND (bound, 0);
7488 else if (TREE_CODE (bound) == GT_EXPR)
7489 a = TREE_OPERAND (bound, 1);
7490 else
7491 return NULL_TREE;
7493 typea = TREE_TYPE (a);
7494 if (!INTEGRAL_TYPE_P (typea)
7495 && !POINTER_TYPE_P (typea))
7496 return NULL_TREE;
7498 if (TREE_CODE (ineq) == LT_EXPR)
7500 a1 = TREE_OPERAND (ineq, 1);
7501 y = TREE_OPERAND (ineq, 0);
7503 else if (TREE_CODE (ineq) == GT_EXPR)
7505 a1 = TREE_OPERAND (ineq, 0);
7506 y = TREE_OPERAND (ineq, 1);
7508 else
7509 return NULL_TREE;
7511 if (TREE_TYPE (a1) != typea)
7512 return NULL_TREE;
7514 if (POINTER_TYPE_P (typea))
7516 /* Convert the pointer types into integer before taking the difference. */
7517 tree ta = fold_convert_loc (loc, ssizetype, a);
7518 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7519 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7521 else
7522 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7524 if (!diff || !integer_onep (diff))
7525 return NULL_TREE;
7527 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7530 /* Fold a sum or difference of at least one multiplication.
7531 Returns the folded tree or NULL if no simplification could be made. */
7533 static tree
7534 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7535 tree arg0, tree arg1)
7537 tree arg00, arg01, arg10, arg11;
7538 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7540 /* (A * C) +- (B * C) -> (A+-B) * C.
7541 (A * C) +- A -> A * (C+-1).
7542 We are most concerned about the case where C is a constant,
7543 but other combinations show up during loop reduction. Since
7544 it is not difficult, try all four possibilities. */
7546 if (TREE_CODE (arg0) == MULT_EXPR)
7548 arg00 = TREE_OPERAND (arg0, 0);
7549 arg01 = TREE_OPERAND (arg0, 1);
7551 else if (TREE_CODE (arg0) == INTEGER_CST)
7553 arg00 = build_one_cst (type);
7554 arg01 = arg0;
7556 else
7558 /* We cannot generate constant 1 for fract. */
7559 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7560 return NULL_TREE;
7561 arg00 = arg0;
7562 arg01 = build_one_cst (type);
7564 if (TREE_CODE (arg1) == MULT_EXPR)
7566 arg10 = TREE_OPERAND (arg1, 0);
7567 arg11 = TREE_OPERAND (arg1, 1);
7569 else if (TREE_CODE (arg1) == INTEGER_CST)
7571 arg10 = build_one_cst (type);
7572 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7573 the purpose of this canonicalization. */
7574 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7575 && negate_expr_p (arg1)
7576 && code == PLUS_EXPR)
7578 arg11 = negate_expr (arg1);
7579 code = MINUS_EXPR;
7581 else
7582 arg11 = arg1;
7584 else
7586 /* We cannot generate constant 1 for fract. */
7587 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7588 return NULL_TREE;
7589 arg10 = arg1;
7590 arg11 = build_one_cst (type);
7592 same = NULL_TREE;
7594 /* Prefer factoring a common non-constant. */
7595 if (operand_equal_p (arg00, arg10, 0))
7596 same = arg00, alt0 = arg01, alt1 = arg11;
7597 else if (operand_equal_p (arg01, arg11, 0))
7598 same = arg01, alt0 = arg00, alt1 = arg10;
7599 else if (operand_equal_p (arg00, arg11, 0))
7600 same = arg00, alt0 = arg01, alt1 = arg10;
7601 else if (operand_equal_p (arg01, arg10, 0))
7602 same = arg01, alt0 = arg00, alt1 = arg11;
7604 /* No identical multiplicands; see if we can find a common
7605 power-of-two factor in non-power-of-two multiplies. This
7606 can help in multi-dimensional array access. */
7607 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7609 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7610 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7611 HOST_WIDE_INT tmp;
7612 bool swap = false;
7613 tree maybe_same;
7615 /* Move min of absolute values to int11. */
7616 if (absu_hwi (int01) < absu_hwi (int11))
7618 tmp = int01, int01 = int11, int11 = tmp;
7619 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7620 maybe_same = arg01;
7621 swap = true;
7623 else
7624 maybe_same = arg11;
7626 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7627 if (factor > 1
7628 && pow2p_hwi (factor)
7629 && (int01 & (factor - 1)) == 0
7630 /* The remainder should not be a constant, otherwise we
7631 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7632 increased the number of multiplications necessary. */
7633 && TREE_CODE (arg10) != INTEGER_CST)
7635 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7636 build_int_cst (TREE_TYPE (arg00),
7637 int01 / int11));
7638 alt1 = arg10;
7639 same = maybe_same;
7640 if (swap)
7641 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7645 if (!same)
7646 return NULL_TREE;
7648 if (! ANY_INTEGRAL_TYPE_P (type)
7649 || TYPE_OVERFLOW_WRAPS (type)
7650 /* We are neither factoring zero nor minus one. */
7651 || TREE_CODE (same) == INTEGER_CST)
7652 return fold_build2_loc (loc, MULT_EXPR, type,
7653 fold_build2_loc (loc, code, type,
7654 fold_convert_loc (loc, type, alt0),
7655 fold_convert_loc (loc, type, alt1)),
7656 fold_convert_loc (loc, type, same));
7658 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7659 same may be minus one and thus the multiplication may overflow. Perform
7660 the sum operation in an unsigned type. */
7661 tree utype = unsigned_type_for (type);
7662 tree tem = fold_build2_loc (loc, code, utype,
7663 fold_convert_loc (loc, utype, alt0),
7664 fold_convert_loc (loc, utype, alt1));
7665 /* If the sum evaluated to a constant that is not -INF the multiplication
7666 cannot overflow. */
7667 if (TREE_CODE (tem) == INTEGER_CST
7668 && (wi::to_wide (tem)
7669 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7670 return fold_build2_loc (loc, MULT_EXPR, type,
7671 fold_convert (type, tem), same);
7673 /* Do not resort to unsigned multiplication because
7674 we lose the no-overflow property of the expression. */
7675 return NULL_TREE;
7678 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7679 specified by EXPR into the buffer PTR of length LEN bytes.
7680 Return the number of bytes placed in the buffer, or zero
7681 upon failure. */
7683 static int
7684 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7686 tree type = TREE_TYPE (expr);
7687 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7688 int byte, offset, word, words;
7689 unsigned char value;
7691 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7692 return 0;
7693 if (off == -1)
7694 off = 0;
7696 if (ptr == NULL)
7697 /* Dry run. */
7698 return MIN (len, total_bytes - off);
7700 words = total_bytes / UNITS_PER_WORD;
7702 for (byte = 0; byte < total_bytes; byte++)
7704 int bitpos = byte * BITS_PER_UNIT;
7705 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7706 number of bytes. */
7707 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7709 if (total_bytes > UNITS_PER_WORD)
7711 word = byte / UNITS_PER_WORD;
7712 if (WORDS_BIG_ENDIAN)
7713 word = (words - 1) - word;
7714 offset = word * UNITS_PER_WORD;
7715 if (BYTES_BIG_ENDIAN)
7716 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7717 else
7718 offset += byte % UNITS_PER_WORD;
7720 else
7721 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7722 if (offset >= off && offset - off < len)
7723 ptr[offset - off] = value;
7725 return MIN (len, total_bytes - off);
7729 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7730 specified by EXPR into the buffer PTR of length LEN bytes.
7731 Return the number of bytes placed in the buffer, or zero
7732 upon failure. */
7734 static int
7735 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7737 tree type = TREE_TYPE (expr);
7738 scalar_mode mode = SCALAR_TYPE_MODE (type);
7739 int total_bytes = GET_MODE_SIZE (mode);
7740 FIXED_VALUE_TYPE value;
7741 tree i_value, i_type;
7743 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7744 return 0;
7746 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7748 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7749 return 0;
7751 value = TREE_FIXED_CST (expr);
7752 i_value = double_int_to_tree (i_type, value.data);
7754 return native_encode_int (i_value, ptr, len, off);
7758 /* Subroutine of native_encode_expr. Encode the REAL_CST
7759 specified by EXPR into the buffer PTR of length LEN bytes.
7760 Return the number of bytes placed in the buffer, or zero
7761 upon failure. */
7763 static int
7764 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7766 tree type = TREE_TYPE (expr);
7767 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7768 int byte, offset, word, words, bitpos;
7769 unsigned char value;
7771 /* There are always 32 bits in each long, no matter the size of
7772 the hosts long. We handle floating point representations with
7773 up to 192 bits. */
7774 long tmp[6];
7776 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7777 return 0;
7778 if (off == -1)
7779 off = 0;
7781 if (ptr == NULL)
7782 /* Dry run. */
7783 return MIN (len, total_bytes - off);
7785 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7787 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7789 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7790 bitpos += BITS_PER_UNIT)
7792 byte = (bitpos / BITS_PER_UNIT) & 3;
7793 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7795 if (UNITS_PER_WORD < 4)
7797 word = byte / UNITS_PER_WORD;
7798 if (WORDS_BIG_ENDIAN)
7799 word = (words - 1) - word;
7800 offset = word * UNITS_PER_WORD;
7801 if (BYTES_BIG_ENDIAN)
7802 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7803 else
7804 offset += byte % UNITS_PER_WORD;
7806 else
7808 offset = byte;
7809 if (BYTES_BIG_ENDIAN)
7811 /* Reverse bytes within each long, or within the entire float
7812 if it's smaller than a long (for HFmode). */
7813 offset = MIN (3, total_bytes - 1) - offset;
7814 gcc_assert (offset >= 0);
7817 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7818 if (offset >= off
7819 && offset - off < len)
7820 ptr[offset - off] = value;
7822 return MIN (len, total_bytes - off);
7825 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7826 specified by EXPR into the buffer PTR of length LEN bytes.
7827 Return the number of bytes placed in the buffer, or zero
7828 upon failure. */
7830 static int
7831 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7833 int rsize, isize;
7834 tree part;
7836 part = TREE_REALPART (expr);
7837 rsize = native_encode_expr (part, ptr, len, off);
7838 if (off == -1 && rsize == 0)
7839 return 0;
7840 part = TREE_IMAGPART (expr);
7841 if (off != -1)
7842 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7843 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7844 len - rsize, off);
7845 if (off == -1 && isize != rsize)
7846 return 0;
7847 return rsize + isize;
7850 /* Like native_encode_vector, but only encode the first COUNT elements.
7851 The other arguments are as for native_encode_vector. */
7853 static int
7854 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7855 int off, unsigned HOST_WIDE_INT count)
7857 tree itype = TREE_TYPE (TREE_TYPE (expr));
7858 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7859 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7861 /* This is the only case in which elements can be smaller than a byte.
7862 Element 0 is always in the lsb of the containing byte. */
7863 unsigned int elt_bits = TYPE_PRECISION (itype);
7864 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7865 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7866 return 0;
7868 if (off == -1)
7869 off = 0;
7871 /* Zero the buffer and then set bits later where necessary. */
7872 int extract_bytes = MIN (len, total_bytes - off);
7873 if (ptr)
7874 memset (ptr, 0, extract_bytes);
7876 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7877 unsigned int first_elt = off * elts_per_byte;
7878 unsigned int extract_elts = extract_bytes * elts_per_byte;
7879 for (unsigned int i = 0; i < extract_elts; ++i)
7881 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7882 if (TREE_CODE (elt) != INTEGER_CST)
7883 return 0;
7885 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7887 unsigned int bit = i * elt_bits;
7888 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7891 return extract_bytes;
7894 int offset = 0;
7895 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7896 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7898 if (off >= size)
7900 off -= size;
7901 continue;
7903 tree elem = VECTOR_CST_ELT (expr, i);
7904 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7905 len - offset, off);
7906 if ((off == -1 && res != size) || res == 0)
7907 return 0;
7908 offset += res;
7909 if (offset >= len)
7910 return (off == -1 && i < count - 1) ? 0 : offset;
7911 if (off != -1)
7912 off = 0;
7914 return offset;
7917 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7918 specified by EXPR into the buffer PTR of length LEN bytes.
7919 Return the number of bytes placed in the buffer, or zero
7920 upon failure. */
7922 static int
7923 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7925 unsigned HOST_WIDE_INT count;
7926 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7927 return 0;
7928 return native_encode_vector_part (expr, ptr, len, off, count);
7932 /* Subroutine of native_encode_expr. Encode the STRING_CST
7933 specified by EXPR into the buffer PTR of length LEN bytes.
7934 Return the number of bytes placed in the buffer, or zero
7935 upon failure. */
7937 static int
7938 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7940 tree type = TREE_TYPE (expr);
7942 /* Wide-char strings are encoded in target byte-order so native
7943 encoding them is trivial. */
7944 if (BITS_PER_UNIT != CHAR_BIT
7945 || TREE_CODE (type) != ARRAY_TYPE
7946 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7947 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7948 return 0;
7950 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7951 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7952 return 0;
7953 if (off == -1)
7954 off = 0;
7955 len = MIN (total_bytes - off, len);
7956 if (ptr == NULL)
7957 /* Dry run. */;
7958 else
7960 int written = 0;
7961 if (off < TREE_STRING_LENGTH (expr))
7963 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7964 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7966 memset (ptr + written, 0, len - written);
7968 return len;
7972 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
7973 FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
7974 the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
7975 anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
7976 sufficient to encode the entire EXPR, or if OFF is out of bounds.
7977 Otherwise, start at byte offset OFF and encode at most LEN bytes.
7978 Return the number of bytes placed in the buffer, or zero upon failure. */
7981 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7983 /* We don't support starting at negative offset and -1 is special. */
7984 if (off < -1)
7985 return 0;
7987 switch (TREE_CODE (expr))
7989 case INTEGER_CST:
7990 return native_encode_int (expr, ptr, len, off);
7992 case REAL_CST:
7993 return native_encode_real (expr, ptr, len, off);
7995 case FIXED_CST:
7996 return native_encode_fixed (expr, ptr, len, off);
7998 case COMPLEX_CST:
7999 return native_encode_complex (expr, ptr, len, off);
8001 case VECTOR_CST:
8002 return native_encode_vector (expr, ptr, len, off);
8004 case STRING_CST:
8005 return native_encode_string (expr, ptr, len, off);
8007 default:
8008 return 0;
8012 /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
8013 or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
8014 of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
8015 machine modes, we can't just use build_nonstandard_integer_type. */
8017 tree
8018 find_bitfield_repr_type (int fieldsize, int len)
8020 machine_mode mode;
8021 for (int pass = 0; pass < 2; pass++)
8023 enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
8024 FOR_EACH_MODE_IN_CLASS (mode, mclass)
8025 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8026 && known_eq (GET_MODE_PRECISION (mode),
8027 GET_MODE_BITSIZE (mode))
8028 && known_le (GET_MODE_SIZE (mode), len))
8030 tree ret = lang_hooks.types.type_for_mode (mode, 1);
8031 if (ret && TYPE_MODE (ret) == mode)
8032 return ret;
8036 for (int i = 0; i < NUM_INT_N_ENTS; i ++)
8037 if (int_n_enabled_p[i]
8038 && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
8039 && int_n_trees[i].unsigned_type)
8041 tree ret = int_n_trees[i].unsigned_type;
8042 mode = TYPE_MODE (ret);
8043 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8044 && known_eq (GET_MODE_PRECISION (mode),
8045 GET_MODE_BITSIZE (mode))
8046 && known_le (GET_MODE_SIZE (mode), len))
8047 return ret;
8050 return NULL_TREE;
8053 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8054 NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
8055 to be non-NULL and OFF zero), then in addition to filling the
8056 bytes pointed by PTR with the value also clear any bits pointed
8057 by MASK that are known to be initialized, keep them as is for
8058 e.g. uninitialized padding bits or uninitialized fields. */
8061 native_encode_initializer (tree init, unsigned char *ptr, int len,
8062 int off, unsigned char *mask)
8064 int r;
8066 /* We don't support starting at negative offset and -1 is special. */
8067 if (off < -1 || init == NULL_TREE)
8068 return 0;
8070 gcc_assert (mask == NULL || (off == 0 && ptr));
8072 STRIP_NOPS (init);
8073 switch (TREE_CODE (init))
8075 case VIEW_CONVERT_EXPR:
8076 case NON_LVALUE_EXPR:
8077 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
8078 mask);
8079 default:
8080 r = native_encode_expr (init, ptr, len, off);
8081 if (mask)
8082 memset (mask, 0, r);
8083 return r;
8084 case CONSTRUCTOR:
8085 tree type = TREE_TYPE (init);
8086 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
8087 if (total_bytes < 0)
8088 return 0;
8089 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8090 return 0;
8091 int o = off == -1 ? 0 : off;
8092 if (TREE_CODE (type) == ARRAY_TYPE)
8094 tree min_index;
8095 unsigned HOST_WIDE_INT cnt;
8096 HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
8097 constructor_elt *ce;
8099 if (!TYPE_DOMAIN (type)
8100 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
8101 return 0;
8103 fieldsize = int_size_in_bytes (TREE_TYPE (type));
8104 if (fieldsize <= 0)
8105 return 0;
8107 min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8108 if (ptr)
8109 memset (ptr, '\0', MIN (total_bytes - off, len));
8111 for (cnt = 0; ; cnt++)
8113 tree val = NULL_TREE, index = NULL_TREE;
8114 HOST_WIDE_INT pos = curpos, count = 0;
8115 bool full = false;
8116 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8118 val = ce->value;
8119 index = ce->index;
8121 else if (mask == NULL
8122 || CONSTRUCTOR_NO_CLEARING (init)
8123 || curpos >= total_bytes)
8124 break;
8125 else
8126 pos = total_bytes;
8128 if (index && TREE_CODE (index) == RANGE_EXPR)
8130 if (TREE_CODE (TREE_OPERAND (index, 0)) != INTEGER_CST
8131 || TREE_CODE (TREE_OPERAND (index, 1)) != INTEGER_CST)
8132 return 0;
8133 offset_int wpos
8134 = wi::sext (wi::to_offset (TREE_OPERAND (index, 0))
8135 - wi::to_offset (min_index),
8136 TYPE_PRECISION (sizetype));
8137 wpos *= fieldsize;
8138 if (!wi::fits_shwi_p (pos))
8139 return 0;
8140 pos = wpos.to_shwi ();
8141 offset_int wcount
8142 = wi::sext (wi::to_offset (TREE_OPERAND (index, 1))
8143 - wi::to_offset (TREE_OPERAND (index, 0)),
8144 TYPE_PRECISION (sizetype));
8145 if (!wi::fits_shwi_p (wcount))
8146 return 0;
8147 count = wcount.to_shwi ();
8149 else if (index)
8151 if (TREE_CODE (index) != INTEGER_CST)
8152 return 0;
8153 offset_int wpos
8154 = wi::sext (wi::to_offset (index)
8155 - wi::to_offset (min_index),
8156 TYPE_PRECISION (sizetype));
8157 wpos *= fieldsize;
8158 if (!wi::fits_shwi_p (wpos))
8159 return 0;
8160 pos = wpos.to_shwi ();
8163 if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
8165 if (valueinit == -1)
8167 tree zero = build_zero_cst (TREE_TYPE (type));
8168 r = native_encode_initializer (zero, ptr + curpos,
8169 fieldsize, 0,
8170 mask + curpos);
8171 if (TREE_CODE (zero) == CONSTRUCTOR)
8172 ggc_free (zero);
8173 if (!r)
8174 return 0;
8175 valueinit = curpos;
8176 curpos += fieldsize;
8178 while (curpos != pos)
8180 memcpy (ptr + curpos, ptr + valueinit, fieldsize);
8181 memcpy (mask + curpos, mask + valueinit, fieldsize);
8182 curpos += fieldsize;
8186 curpos = pos;
8187 if (val)
8190 if (off == -1
8191 || (curpos >= off
8192 && (curpos + fieldsize
8193 <= (HOST_WIDE_INT) off + len)))
8195 if (full)
8197 if (ptr)
8198 memcpy (ptr + (curpos - o), ptr + (pos - o),
8199 fieldsize);
8200 if (mask)
8201 memcpy (mask + curpos, mask + pos, fieldsize);
8203 else if (!native_encode_initializer (val,
8205 ? ptr + curpos - o
8206 : NULL,
8207 fieldsize,
8208 off == -1 ? -1
8209 : 0,
8210 mask
8211 ? mask + curpos
8212 : NULL))
8213 return 0;
8214 else
8216 full = true;
8217 pos = curpos;
8220 else if (curpos + fieldsize > off
8221 && curpos < (HOST_WIDE_INT) off + len)
8223 /* Partial overlap. */
8224 unsigned char *p = NULL;
8225 int no = 0;
8226 int l;
8227 gcc_assert (mask == NULL);
8228 if (curpos >= off)
8230 if (ptr)
8231 p = ptr + curpos - off;
8232 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8233 fieldsize);
8235 else
8237 p = ptr;
8238 no = off - curpos;
8239 l = len;
8241 if (!native_encode_initializer (val, p, l, no, NULL))
8242 return 0;
8244 curpos += fieldsize;
8246 while (count-- != 0);
8248 return MIN (total_bytes - off, len);
8250 else if (TREE_CODE (type) == RECORD_TYPE
8251 || TREE_CODE (type) == UNION_TYPE)
8253 unsigned HOST_WIDE_INT cnt;
8254 constructor_elt *ce;
8255 tree fld_base = TYPE_FIELDS (type);
8256 tree to_free = NULL_TREE;
8258 gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
8259 if (ptr != NULL)
8260 memset (ptr, '\0', MIN (total_bytes - o, len));
8261 for (cnt = 0; ; cnt++)
8263 tree val = NULL_TREE, field = NULL_TREE;
8264 HOST_WIDE_INT pos = 0, fieldsize;
8265 unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8267 if (to_free)
8269 ggc_free (to_free);
8270 to_free = NULL_TREE;
8273 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8275 val = ce->value;
8276 field = ce->index;
8277 if (field == NULL_TREE)
8278 return 0;
8280 pos = int_byte_position (field);
8281 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8282 continue;
8284 else if (mask == NULL
8285 || CONSTRUCTOR_NO_CLEARING (init))
8286 break;
8287 else
8288 pos = total_bytes;
8290 if (mask && !CONSTRUCTOR_NO_CLEARING (init))
8292 tree fld;
8293 for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
8295 if (TREE_CODE (fld) != FIELD_DECL)
8296 continue;
8297 if (fld == field)
8298 break;
8299 if (DECL_PADDING_P (fld))
8300 continue;
8301 if (DECL_SIZE_UNIT (fld) == NULL_TREE
8302 || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
8303 return 0;
8304 if (integer_zerop (DECL_SIZE_UNIT (fld)))
8305 continue;
8306 break;
8308 if (fld == NULL_TREE)
8310 if (ce == NULL)
8311 break;
8312 return 0;
8314 fld_base = DECL_CHAIN (fld);
8315 if (fld != field)
8317 cnt--;
8318 field = fld;
8319 pos = int_byte_position (field);
8320 val = build_zero_cst (TREE_TYPE (fld));
8321 if (TREE_CODE (val) == CONSTRUCTOR)
8322 to_free = val;
8326 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8327 && TYPE_DOMAIN (TREE_TYPE (field))
8328 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8330 if (mask || off != -1)
8331 return 0;
8332 if (val == NULL_TREE)
8333 continue;
8334 if (TREE_CODE (TREE_TYPE (val)) != ARRAY_TYPE)
8335 return 0;
8336 fieldsize = int_size_in_bytes (TREE_TYPE (val));
8337 if (fieldsize < 0
8338 || (int) fieldsize != fieldsize
8339 || (pos + fieldsize) > INT_MAX)
8340 return 0;
8341 if (pos + fieldsize > total_bytes)
8343 if (ptr != NULL && total_bytes < len)
8344 memset (ptr + total_bytes, '\0',
8345 MIN (pos + fieldsize, len) - total_bytes);
8346 total_bytes = pos + fieldsize;
8349 else
8351 if (DECL_SIZE_UNIT (field) == NULL_TREE
8352 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8353 return 0;
8354 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8356 if (fieldsize == 0)
8357 continue;
8359 if (DECL_BIT_FIELD (field))
8361 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8362 return 0;
8363 fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8364 bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8365 if (bpos % BITS_PER_UNIT)
8366 bpos %= BITS_PER_UNIT;
8367 else
8368 bpos = 0;
8369 fieldsize += bpos;
8370 epos = fieldsize % BITS_PER_UNIT;
8371 fieldsize += BITS_PER_UNIT - 1;
8372 fieldsize /= BITS_PER_UNIT;
8375 if (off != -1 && pos + fieldsize <= off)
8376 continue;
8378 if (val == NULL_TREE)
8379 continue;
8381 if (DECL_BIT_FIELD (field))
8383 /* FIXME: Handle PDP endian. */
8384 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8385 return 0;
8387 if (TREE_CODE (val) != INTEGER_CST)
8388 return 0;
8390 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8391 tree repr_type = NULL_TREE;
8392 HOST_WIDE_INT rpos = 0;
8393 if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8395 rpos = int_byte_position (repr);
8396 repr_type = TREE_TYPE (repr);
8398 else
8400 repr_type = find_bitfield_repr_type (fieldsize, len);
8401 if (repr_type == NULL_TREE)
8402 return 0;
8403 HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
8404 gcc_assert (repr_size > 0 && repr_size <= len);
8405 if (pos + repr_size <= o + len)
8406 rpos = pos;
8407 else
8409 rpos = o + len - repr_size;
8410 gcc_assert (rpos <= pos);
8414 if (rpos > pos)
8415 return 0;
8416 wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
8417 int diff = (TYPE_PRECISION (repr_type)
8418 - TYPE_PRECISION (TREE_TYPE (field)));
8419 HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8420 if (!BYTES_BIG_ENDIAN)
8421 w = wi::lshift (w, bitoff);
8422 else
8423 w = wi::lshift (w, diff - bitoff);
8424 val = wide_int_to_tree (repr_type, w);
8426 unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8427 / BITS_PER_UNIT + 1];
8428 int l = native_encode_int (val, buf, sizeof buf, 0);
8429 if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
8430 return 0;
8432 if (ptr == NULL)
8433 continue;
8435 /* If the bitfield does not start at byte boundary, handle
8436 the partial byte at the start. */
8437 if (bpos
8438 && (off == -1 || (pos >= off && len >= 1)))
8440 if (!BYTES_BIG_ENDIAN)
8442 int msk = (1 << bpos) - 1;
8443 buf[pos - rpos] &= ~msk;
8444 buf[pos - rpos] |= ptr[pos - o] & msk;
8445 if (mask)
8447 if (fieldsize > 1 || epos == 0)
8448 mask[pos] &= msk;
8449 else
8450 mask[pos] &= (msk | ~((1 << epos) - 1));
8453 else
8455 int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
8456 buf[pos - rpos] &= msk;
8457 buf[pos - rpos] |= ptr[pos - o] & ~msk;
8458 if (mask)
8460 if (fieldsize > 1 || epos == 0)
8461 mask[pos] &= ~msk;
8462 else
8463 mask[pos] &= (~msk
8464 | ((1 << (BITS_PER_UNIT - epos))
8465 - 1));
8469 /* If the bitfield does not end at byte boundary, handle
8470 the partial byte at the end. */
8471 if (epos
8472 && (off == -1
8473 || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8475 if (!BYTES_BIG_ENDIAN)
8477 int msk = (1 << epos) - 1;
8478 buf[pos - rpos + fieldsize - 1] &= msk;
8479 buf[pos - rpos + fieldsize - 1]
8480 |= ptr[pos + fieldsize - 1 - o] & ~msk;
8481 if (mask && (fieldsize > 1 || bpos == 0))
8482 mask[pos + fieldsize - 1] &= ~msk;
8484 else
8486 int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
8487 buf[pos - rpos + fieldsize - 1] &= ~msk;
8488 buf[pos - rpos + fieldsize - 1]
8489 |= ptr[pos + fieldsize - 1 - o] & msk;
8490 if (mask && (fieldsize > 1 || bpos == 0))
8491 mask[pos + fieldsize - 1] &= msk;
8494 if (off == -1
8495 || (pos >= off
8496 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8498 memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8499 if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
8500 memset (mask + pos + (bpos != 0), 0,
8501 fieldsize - (bpos != 0) - (epos != 0));
8503 else
8505 /* Partial overlap. */
8506 HOST_WIDE_INT fsz = fieldsize;
8507 gcc_assert (mask == NULL);
8508 if (pos < off)
8510 fsz -= (off - pos);
8511 pos = off;
8513 if (pos + fsz > (HOST_WIDE_INT) off + len)
8514 fsz = (HOST_WIDE_INT) off + len - pos;
8515 memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8517 continue;
8520 if (off == -1
8521 || (pos >= off
8522 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8524 int fldsize = fieldsize;
8525 if (off == -1)
8527 tree fld = DECL_CHAIN (field);
8528 while (fld)
8530 if (TREE_CODE (fld) == FIELD_DECL)
8531 break;
8532 fld = DECL_CHAIN (fld);
8534 if (fld == NULL_TREE)
8535 fldsize = len - pos;
8537 r = native_encode_initializer (val, ptr ? ptr + pos - o
8538 : NULL,
8539 fldsize,
8540 off == -1 ? -1 : 0,
8541 mask ? mask + pos : NULL);
8542 if (!r)
8543 return 0;
8544 if (off == -1
8545 && fldsize != fieldsize
8546 && r > fieldsize
8547 && pos + r > total_bytes)
8548 total_bytes = pos + r;
8550 else
8552 /* Partial overlap. */
8553 unsigned char *p = NULL;
8554 int no = 0;
8555 int l;
8556 gcc_assert (mask == NULL);
8557 if (pos >= off)
8559 if (ptr)
8560 p = ptr + pos - off;
8561 l = MIN ((HOST_WIDE_INT) off + len - pos,
8562 fieldsize);
8564 else
8566 p = ptr;
8567 no = off - pos;
8568 l = len;
8570 if (!native_encode_initializer (val, p, l, no, NULL))
8571 return 0;
8574 return MIN (total_bytes - off, len);
8576 return 0;
8581 /* Subroutine of native_interpret_expr. Interpret the contents of
8582 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8583 If the buffer cannot be interpreted, return NULL_TREE. */
8585 static tree
8586 native_interpret_int (tree type, const unsigned char *ptr, int len)
8588 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8590 if (total_bytes > len
8591 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8592 return NULL_TREE;
8594 wide_int result = wi::from_buffer (ptr, total_bytes);
8596 return wide_int_to_tree (type, result);
8600 /* Subroutine of native_interpret_expr. Interpret the contents of
8601 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8602 If the buffer cannot be interpreted, return NULL_TREE. */
8604 static tree
8605 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8607 scalar_mode mode = SCALAR_TYPE_MODE (type);
8608 int total_bytes = GET_MODE_SIZE (mode);
8609 double_int result;
8610 FIXED_VALUE_TYPE fixed_value;
8612 if (total_bytes > len
8613 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8614 return NULL_TREE;
8616 result = double_int::from_buffer (ptr, total_bytes);
8617 fixed_value = fixed_from_double_int (result, mode);
8619 return build_fixed (type, fixed_value);
8623 /* Subroutine of native_interpret_expr. Interpret the contents of
8624 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8625 If the buffer cannot be interpreted, return NULL_TREE. */
8627 static tree
8628 native_interpret_real (tree type, const unsigned char *ptr, int len)
8630 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8631 int total_bytes = GET_MODE_SIZE (mode);
8632 unsigned char value;
8633 /* There are always 32 bits in each long, no matter the size of
8634 the hosts long. We handle floating point representations with
8635 up to 192 bits. */
8636 REAL_VALUE_TYPE r;
8637 long tmp[6];
8639 if (total_bytes > len || total_bytes > 24)
8640 return NULL_TREE;
8641 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8643 memset (tmp, 0, sizeof (tmp));
8644 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8645 bitpos += BITS_PER_UNIT)
8647 /* Both OFFSET and BYTE index within a long;
8648 bitpos indexes the whole float. */
8649 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8650 if (UNITS_PER_WORD < 4)
8652 int word = byte / UNITS_PER_WORD;
8653 if (WORDS_BIG_ENDIAN)
8654 word = (words - 1) - word;
8655 offset = word * UNITS_PER_WORD;
8656 if (BYTES_BIG_ENDIAN)
8657 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8658 else
8659 offset += byte % UNITS_PER_WORD;
8661 else
8663 offset = byte;
8664 if (BYTES_BIG_ENDIAN)
8666 /* Reverse bytes within each long, or within the entire float
8667 if it's smaller than a long (for HFmode). */
8668 offset = MIN (3, total_bytes - 1) - offset;
8669 gcc_assert (offset >= 0);
8672 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8674 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8677 real_from_target (&r, tmp, mode);
8678 tree ret = build_real (type, r);
8679 if (MODE_COMPOSITE_P (mode))
8681 /* For floating point values in composite modes, punt if this folding
8682 doesn't preserve bit representation. As the mode doesn't have fixed
8683 precision while GCC pretends it does, there could be valid values that
8684 GCC can't really represent accurately. See PR95450. */
8685 unsigned char buf[24];
8686 if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
8687 || memcmp (ptr, buf, total_bytes) != 0)
8688 ret = NULL_TREE;
8690 return ret;
8694 /* Subroutine of native_interpret_expr. Interpret the contents of
8695 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8696 If the buffer cannot be interpreted, return NULL_TREE. */
8698 static tree
8699 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8701 tree etype, rpart, ipart;
8702 int size;
8704 etype = TREE_TYPE (type);
8705 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8706 if (size * 2 > len)
8707 return NULL_TREE;
8708 rpart = native_interpret_expr (etype, ptr, size);
8709 if (!rpart)
8710 return NULL_TREE;
8711 ipart = native_interpret_expr (etype, ptr+size, size);
8712 if (!ipart)
8713 return NULL_TREE;
8714 return build_complex (type, rpart, ipart);
8717 /* Read a vector of type TYPE from the target memory image given by BYTES,
8718 which contains LEN bytes. The vector is known to be encodable using
8719 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8721 Return the vector on success, otherwise return null. */
8723 static tree
8724 native_interpret_vector_part (tree type, const unsigned char *bytes,
8725 unsigned int len, unsigned int npatterns,
8726 unsigned int nelts_per_pattern)
8728 tree elt_type = TREE_TYPE (type);
8729 if (VECTOR_BOOLEAN_TYPE_P (type)
8730 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8732 /* This is the only case in which elements can be smaller than a byte.
8733 Element 0 is always in the lsb of the containing byte. */
8734 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8735 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8736 return NULL_TREE;
8738 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8739 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8741 unsigned int bit_index = i * elt_bits;
8742 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8743 unsigned int lsb = bit_index % BITS_PER_UNIT;
8744 builder.quick_push (bytes[byte_index] & (1 << lsb)
8745 ? build_all_ones_cst (elt_type)
8746 : build_zero_cst (elt_type));
8748 return builder.build ();
8751 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8752 if (elt_bytes * npatterns * nelts_per_pattern > len)
8753 return NULL_TREE;
8755 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8756 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8758 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8759 if (!elt)
8760 return NULL_TREE;
8761 builder.quick_push (elt);
8762 bytes += elt_bytes;
8764 return builder.build ();
8767 /* Subroutine of native_interpret_expr. Interpret the contents of
8768 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8769 If the buffer cannot be interpreted, return NULL_TREE. */
8771 static tree
8772 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8774 tree etype;
8775 unsigned int size;
8776 unsigned HOST_WIDE_INT count;
8778 etype = TREE_TYPE (type);
8779 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8780 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
8781 || size * count > len)
8782 return NULL_TREE;
8784 return native_interpret_vector_part (type, ptr, len, count, 1);
8788 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8789 the buffer PTR of length LEN as a constant of type TYPE. For
8790 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8791 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8792 return NULL_TREE. */
8794 tree
8795 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8797 switch (TREE_CODE (type))
8799 case INTEGER_TYPE:
8800 case ENUMERAL_TYPE:
8801 case BOOLEAN_TYPE:
8802 case POINTER_TYPE:
8803 case REFERENCE_TYPE:
8804 case OFFSET_TYPE:
8805 return native_interpret_int (type, ptr, len);
8807 case REAL_TYPE:
8808 return native_interpret_real (type, ptr, len);
8810 case FIXED_POINT_TYPE:
8811 return native_interpret_fixed (type, ptr, len);
8813 case COMPLEX_TYPE:
8814 return native_interpret_complex (type, ptr, len);
8816 case VECTOR_TYPE:
8817 return native_interpret_vector (type, ptr, len);
8819 default:
8820 return NULL_TREE;
8824 /* Returns true if we can interpret the contents of a native encoding
8825 as TYPE. */
8827 bool
8828 can_native_interpret_type_p (tree type)
8830 switch (TREE_CODE (type))
8832 case INTEGER_TYPE:
8833 case ENUMERAL_TYPE:
8834 case BOOLEAN_TYPE:
8835 case POINTER_TYPE:
8836 case REFERENCE_TYPE:
8837 case FIXED_POINT_TYPE:
8838 case REAL_TYPE:
8839 case COMPLEX_TYPE:
8840 case VECTOR_TYPE:
8841 case OFFSET_TYPE:
8842 return true;
8843 default:
8844 return false;
8848 /* Attempt to interpret aggregate of TYPE from bytes encoded in target
8849 byte order at PTR + OFF with LEN bytes. Does not handle unions. */
8851 tree
8852 native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
8853 int len)
8855 vec<constructor_elt, va_gc> *elts = NULL;
8856 if (TREE_CODE (type) == ARRAY_TYPE)
8858 HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
8859 if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
8860 return NULL_TREE;
8862 HOST_WIDE_INT cnt = 0;
8863 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
8865 if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
8866 return NULL_TREE;
8867 cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
8869 if (eltsz == 0)
8870 cnt = 0;
8871 HOST_WIDE_INT pos = 0;
8872 for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
8874 tree v = NULL_TREE;
8875 if (pos >= len || pos + eltsz > len)
8876 return NULL_TREE;
8877 if (can_native_interpret_type_p (TREE_TYPE (type)))
8879 v = native_interpret_expr (TREE_TYPE (type),
8880 ptr + off + pos, eltsz);
8881 if (v == NULL_TREE)
8882 return NULL_TREE;
8884 else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
8885 || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
8886 v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos,
8887 eltsz);
8888 if (v == NULL_TREE)
8889 return NULL_TREE;
8890 CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
8892 return build_constructor (type, elts);
8894 if (TREE_CODE (type) != RECORD_TYPE)
8895 return NULL_TREE;
8896 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8898 if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field))
8899 continue;
8900 tree fld = field;
8901 HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
8902 int diff = 0;
8903 tree v = NULL_TREE;
8904 if (DECL_BIT_FIELD (field))
8906 fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
8907 if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
8909 poly_int64 bitoffset;
8910 poly_uint64 field_offset, fld_offset;
8911 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8912 && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
8913 bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
8914 else
8915 bitoffset = 0;
8916 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8917 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
8918 diff = (TYPE_PRECISION (TREE_TYPE (fld))
8919 - TYPE_PRECISION (TREE_TYPE (field)));
8920 if (!bitoffset.is_constant (&bitoff)
8921 || bitoff < 0
8922 || bitoff > diff)
8923 return NULL_TREE;
8925 else
8927 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8928 return NULL_TREE;
8929 int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8930 int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8931 bpos %= BITS_PER_UNIT;
8932 fieldsize += bpos;
8933 fieldsize += BITS_PER_UNIT - 1;
8934 fieldsize /= BITS_PER_UNIT;
8935 tree repr_type = find_bitfield_repr_type (fieldsize, len);
8936 if (repr_type == NULL_TREE)
8937 return NULL_TREE;
8938 sz = int_size_in_bytes (repr_type);
8939 if (sz < 0 || sz > len)
8940 return NULL_TREE;
8941 pos = int_byte_position (field);
8942 if (pos < 0 || pos > len || pos + fieldsize > len)
8943 return NULL_TREE;
8944 HOST_WIDE_INT rpos;
8945 if (pos + sz <= len)
8946 rpos = pos;
8947 else
8949 rpos = len - sz;
8950 gcc_assert (rpos <= pos);
8952 bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
8953 pos = rpos;
8954 diff = (TYPE_PRECISION (repr_type)
8955 - TYPE_PRECISION (TREE_TYPE (field)));
8956 v = native_interpret_expr (repr_type, ptr + off + pos, sz);
8957 if (v == NULL_TREE)
8958 return NULL_TREE;
8959 fld = NULL_TREE;
8963 if (fld)
8965 sz = int_size_in_bytes (TREE_TYPE (fld));
8966 if (sz < 0 || sz > len)
8967 return NULL_TREE;
8968 tree byte_pos = byte_position (fld);
8969 if (!tree_fits_shwi_p (byte_pos))
8970 return NULL_TREE;
8971 pos = tree_to_shwi (byte_pos);
8972 if (pos < 0 || pos > len || pos + sz > len)
8973 return NULL_TREE;
8975 if (fld == NULL_TREE)
8976 /* Already handled above. */;
8977 else if (can_native_interpret_type_p (TREE_TYPE (fld)))
8979 v = native_interpret_expr (TREE_TYPE (fld),
8980 ptr + off + pos, sz);
8981 if (v == NULL_TREE)
8982 return NULL_TREE;
8984 else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
8985 || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
8986 v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz);
8987 if (v == NULL_TREE)
8988 return NULL_TREE;
8989 if (fld != field)
8991 if (TREE_CODE (v) != INTEGER_CST)
8992 return NULL_TREE;
8994 /* FIXME: Figure out how to handle PDP endian bitfields. */
8995 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8996 return NULL_TREE;
8997 if (!BYTES_BIG_ENDIAN)
8998 v = wide_int_to_tree (TREE_TYPE (field),
8999 wi::lrshift (wi::to_wide (v), bitoff));
9000 else
9001 v = wide_int_to_tree (TREE_TYPE (field),
9002 wi::lrshift (wi::to_wide (v),
9003 diff - bitoff));
9005 CONSTRUCTOR_APPEND_ELT (elts, field, v);
9007 return build_constructor (type, elts);
9010 /* Routines for manipulation of native_encode_expr encoded data if the encoded
9011 or extracted constant positions and/or sizes aren't byte aligned. */
9013 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
9014 bits between adjacent elements. AMNT should be within
9015 [0, BITS_PER_UNIT).
9016 Example, AMNT = 2:
9017 00011111|11100000 << 2 = 01111111|10000000
9018 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
9020 void
9021 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
9022 unsigned int amnt)
9024 if (amnt == 0)
9025 return;
9027 unsigned char carry_over = 0U;
9028 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
9029 unsigned char clear_mask = (~0U) << amnt;
9031 for (unsigned int i = 0; i < sz; i++)
9033 unsigned prev_carry_over = carry_over;
9034 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
9036 ptr[i] <<= amnt;
9037 if (i != 0)
9039 ptr[i] &= clear_mask;
9040 ptr[i] |= prev_carry_over;
9045 /* Like shift_bytes_in_array_left but for big-endian.
9046 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
9047 bits between adjacent elements. AMNT should be within
9048 [0, BITS_PER_UNIT).
9049 Example, AMNT = 2:
9050 00011111|11100000 >> 2 = 00000111|11111000
9051 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
9053 void
9054 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
9055 unsigned int amnt)
9057 if (amnt == 0)
9058 return;
9060 unsigned char carry_over = 0U;
9061 unsigned char carry_mask = ~(~0U << amnt);
9063 for (unsigned int i = 0; i < sz; i++)
9065 unsigned prev_carry_over = carry_over;
9066 carry_over = ptr[i] & carry_mask;
9068 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
9069 ptr[i] >>= amnt;
9070 ptr[i] |= prev_carry_over;
9074 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9075 directly on the VECTOR_CST encoding, in a way that works for variable-
9076 length vectors. Return the resulting VECTOR_CST on success or null
9077 on failure. */
9079 static tree
9080 fold_view_convert_vector_encoding (tree type, tree expr)
9082 tree expr_type = TREE_TYPE (expr);
9083 poly_uint64 type_bits, expr_bits;
9084 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
9085 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
9086 return NULL_TREE;
9088 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
9089 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
9090 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
9091 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
9093 /* We can only preserve the semantics of a stepped pattern if the new
9094 vector element is an integer of the same size. */
9095 if (VECTOR_CST_STEPPED_P (expr)
9096 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
9097 return NULL_TREE;
9099 /* The number of bits needed to encode one element from every pattern
9100 of the original vector. */
9101 unsigned int expr_sequence_bits
9102 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
9104 /* The number of bits needed to encode one element from every pattern
9105 of the result. */
9106 unsigned int type_sequence_bits
9107 = least_common_multiple (expr_sequence_bits, type_elt_bits);
9109 /* Don't try to read more bytes than are available, which can happen
9110 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9111 The general VIEW_CONVERT handling can cope with that case, so there's
9112 no point complicating things here. */
9113 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
9114 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
9115 BITS_PER_UNIT);
9116 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
9117 if (known_gt (buffer_bits, expr_bits))
9118 return NULL_TREE;
9120 /* Get enough bytes of EXPR to form the new encoding. */
9121 auto_vec<unsigned char, 128> buffer (buffer_bytes);
9122 buffer.quick_grow (buffer_bytes);
9123 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
9124 buffer_bits / expr_elt_bits)
9125 != (int) buffer_bytes)
9126 return NULL_TREE;
9128 /* Reencode the bytes as TYPE. */
9129 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
9130 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
9131 type_npatterns, nelts_per_pattern);
9134 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9135 TYPE at compile-time. If we're unable to perform the conversion
9136 return NULL_TREE. */
9138 static tree
9139 fold_view_convert_expr (tree type, tree expr)
9141 /* We support up to 512-bit values (for V8DFmode). */
9142 unsigned char buffer[64];
9143 int len;
9145 /* Check that the host and target are sane. */
9146 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
9147 return NULL_TREE;
9149 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
9150 if (tree res = fold_view_convert_vector_encoding (type, expr))
9151 return res;
9153 len = native_encode_expr (expr, buffer, sizeof (buffer));
9154 if (len == 0)
9155 return NULL_TREE;
9157 return native_interpret_expr (type, buffer, len);
9160 /* Build an expression for the address of T. Folds away INDIRECT_REF
9161 to avoid confusing the gimplify process. */
9163 tree
9164 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
9166 /* The size of the object is not relevant when talking about its address. */
9167 if (TREE_CODE (t) == WITH_SIZE_EXPR)
9168 t = TREE_OPERAND (t, 0);
9170 if (TREE_CODE (t) == INDIRECT_REF)
9172 t = TREE_OPERAND (t, 0);
9174 if (TREE_TYPE (t) != ptrtype)
9175 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
9177 else if (TREE_CODE (t) == MEM_REF
9178 && integer_zerop (TREE_OPERAND (t, 1)))
9180 t = TREE_OPERAND (t, 0);
9182 if (TREE_TYPE (t) != ptrtype)
9183 t = fold_convert_loc (loc, ptrtype, t);
9185 else if (TREE_CODE (t) == MEM_REF
9186 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
9187 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
9188 TREE_OPERAND (t, 0),
9189 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
9190 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
9192 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
9194 if (TREE_TYPE (t) != ptrtype)
9195 t = fold_convert_loc (loc, ptrtype, t);
9197 else
9198 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
9200 return t;
9203 /* Build an expression for the address of T. */
9205 tree
9206 build_fold_addr_expr_loc (location_t loc, tree t)
9208 tree ptrtype = build_pointer_type (TREE_TYPE (t));
9210 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
9213 /* Fold a unary expression of code CODE and type TYPE with operand
9214 OP0. Return the folded expression if folding is successful.
9215 Otherwise, return NULL_TREE. */
9217 tree
9218 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
9220 tree tem;
9221 tree arg0;
9222 enum tree_code_class kind = TREE_CODE_CLASS (code);
9224 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9225 && TREE_CODE_LENGTH (code) == 1);
9227 arg0 = op0;
9228 if (arg0)
9230 if (CONVERT_EXPR_CODE_P (code)
9231 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
9233 /* Don't use STRIP_NOPS, because signedness of argument type
9234 matters. */
9235 STRIP_SIGN_NOPS (arg0);
9237 else
9239 /* Strip any conversions that don't change the mode. This
9240 is safe for every expression, except for a comparison
9241 expression because its signedness is derived from its
9242 operands.
9244 Note that this is done as an internal manipulation within
9245 the constant folder, in order to find the simplest
9246 representation of the arguments so that their form can be
9247 studied. In any cases, the appropriate type conversions
9248 should be put back in the tree that will get out of the
9249 constant folder. */
9250 STRIP_NOPS (arg0);
9253 if (CONSTANT_CLASS_P (arg0))
9255 tree tem = const_unop (code, type, arg0);
9256 if (tem)
9258 if (TREE_TYPE (tem) != type)
9259 tem = fold_convert_loc (loc, type, tem);
9260 return tem;
9265 tem = generic_simplify (loc, code, type, op0);
9266 if (tem)
9267 return tem;
9269 if (TREE_CODE_CLASS (code) == tcc_unary)
9271 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9272 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9273 fold_build1_loc (loc, code, type,
9274 fold_convert_loc (loc, TREE_TYPE (op0),
9275 TREE_OPERAND (arg0, 1))));
9276 else if (TREE_CODE (arg0) == COND_EXPR)
9278 tree arg01 = TREE_OPERAND (arg0, 1);
9279 tree arg02 = TREE_OPERAND (arg0, 2);
9280 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
9281 arg01 = fold_build1_loc (loc, code, type,
9282 fold_convert_loc (loc,
9283 TREE_TYPE (op0), arg01));
9284 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
9285 arg02 = fold_build1_loc (loc, code, type,
9286 fold_convert_loc (loc,
9287 TREE_TYPE (op0), arg02));
9288 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
9289 arg01, arg02);
9291 /* If this was a conversion, and all we did was to move into
9292 inside the COND_EXPR, bring it back out. But leave it if
9293 it is a conversion from integer to integer and the
9294 result precision is no wider than a word since such a
9295 conversion is cheap and may be optimized away by combine,
9296 while it couldn't if it were outside the COND_EXPR. Then return
9297 so we don't get into an infinite recursion loop taking the
9298 conversion out and then back in. */
9300 if ((CONVERT_EXPR_CODE_P (code)
9301 || code == NON_LVALUE_EXPR)
9302 && TREE_CODE (tem) == COND_EXPR
9303 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
9304 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
9305 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
9306 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
9307 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
9308 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
9309 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9310 && (INTEGRAL_TYPE_P
9311 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
9312 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
9313 || flag_syntax_only))
9314 tem = build1_loc (loc, code, type,
9315 build3 (COND_EXPR,
9316 TREE_TYPE (TREE_OPERAND
9317 (TREE_OPERAND (tem, 1), 0)),
9318 TREE_OPERAND (tem, 0),
9319 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
9320 TREE_OPERAND (TREE_OPERAND (tem, 2),
9321 0)));
9322 return tem;
9326 switch (code)
9328 case NON_LVALUE_EXPR:
9329 if (!maybe_lvalue_p (op0))
9330 return fold_convert_loc (loc, type, op0);
9331 return NULL_TREE;
9333 CASE_CONVERT:
9334 case FLOAT_EXPR:
9335 case FIX_TRUNC_EXPR:
9336 if (COMPARISON_CLASS_P (op0))
9338 /* If we have (type) (a CMP b) and type is an integral type, return
9339 new expression involving the new type. Canonicalize
9340 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9341 non-integral type.
9342 Do not fold the result as that would not simplify further, also
9343 folding again results in recursions. */
9344 if (TREE_CODE (type) == BOOLEAN_TYPE)
9345 return build2_loc (loc, TREE_CODE (op0), type,
9346 TREE_OPERAND (op0, 0),
9347 TREE_OPERAND (op0, 1));
9348 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
9349 && TREE_CODE (type) != VECTOR_TYPE)
9350 return build3_loc (loc, COND_EXPR, type, op0,
9351 constant_boolean_node (true, type),
9352 constant_boolean_node (false, type));
9355 /* Handle (T *)&A.B.C for A being of type T and B and C
9356 living at offset zero. This occurs frequently in
9357 C++ upcasting and then accessing the base. */
9358 if (TREE_CODE (op0) == ADDR_EXPR
9359 && POINTER_TYPE_P (type)
9360 && handled_component_p (TREE_OPERAND (op0, 0)))
9362 poly_int64 bitsize, bitpos;
9363 tree offset;
9364 machine_mode mode;
9365 int unsignedp, reversep, volatilep;
9366 tree base
9367 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
9368 &offset, &mode, &unsignedp, &reversep,
9369 &volatilep);
9370 /* If the reference was to a (constant) zero offset, we can use
9371 the address of the base if it has the same base type
9372 as the result type and the pointer type is unqualified. */
9373 if (!offset
9374 && known_eq (bitpos, 0)
9375 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
9376 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
9377 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
9378 return fold_convert_loc (loc, type,
9379 build_fold_addr_expr_loc (loc, base));
9382 if (TREE_CODE (op0) == MODIFY_EXPR
9383 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
9384 /* Detect assigning a bitfield. */
9385 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
9386 && DECL_BIT_FIELD
9387 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
9389 /* Don't leave an assignment inside a conversion
9390 unless assigning a bitfield. */
9391 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
9392 /* First do the assignment, then return converted constant. */
9393 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
9394 suppress_warning (tem /* What warning? */);
9395 TREE_USED (tem) = 1;
9396 return tem;
9399 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9400 constants (if x has signed type, the sign bit cannot be set
9401 in c). This folds extension into the BIT_AND_EXPR.
9402 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9403 very likely don't have maximal range for their precision and this
9404 transformation effectively doesn't preserve non-maximal ranges. */
9405 if (TREE_CODE (type) == INTEGER_TYPE
9406 && TREE_CODE (op0) == BIT_AND_EXPR
9407 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9409 tree and_expr = op0;
9410 tree and0 = TREE_OPERAND (and_expr, 0);
9411 tree and1 = TREE_OPERAND (and_expr, 1);
9412 int change = 0;
9414 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
9415 || (TYPE_PRECISION (type)
9416 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
9417 change = 1;
9418 else if (TYPE_PRECISION (TREE_TYPE (and1))
9419 <= HOST_BITS_PER_WIDE_INT
9420 && tree_fits_uhwi_p (and1))
9422 unsigned HOST_WIDE_INT cst;
9424 cst = tree_to_uhwi (and1);
9425 cst &= HOST_WIDE_INT_M1U
9426 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
9427 change = (cst == 0);
9428 if (change
9429 && !flag_syntax_only
9430 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
9431 == ZERO_EXTEND))
9433 tree uns = unsigned_type_for (TREE_TYPE (and0));
9434 and0 = fold_convert_loc (loc, uns, and0);
9435 and1 = fold_convert_loc (loc, uns, and1);
9438 if (change)
9440 tem = force_fit_type (type, wi::to_widest (and1), 0,
9441 TREE_OVERFLOW (and1));
9442 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9443 fold_convert_loc (loc, type, and0), tem);
9447 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9448 cast (T1)X will fold away. We assume that this happens when X itself
9449 is a cast. */
9450 if (POINTER_TYPE_P (type)
9451 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
9452 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
9454 tree arg00 = TREE_OPERAND (arg0, 0);
9455 tree arg01 = TREE_OPERAND (arg0, 1);
9457 /* If -fsanitize=alignment, avoid this optimization in GENERIC
9458 when the pointed type needs higher alignment than
9459 the p+ first operand's pointed type. */
9460 if (!in_gimple_form
9461 && sanitize_flags_p (SANITIZE_ALIGNMENT)
9462 && (min_align_of_type (TREE_TYPE (type))
9463 > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00)))))
9464 return NULL_TREE;
9466 arg00 = fold_convert_loc (loc, type, arg00);
9467 return fold_build_pointer_plus_loc (loc, arg00, arg01);
9470 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9471 of the same precision, and X is an integer type not narrower than
9472 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9473 if (INTEGRAL_TYPE_P (type)
9474 && TREE_CODE (op0) == BIT_NOT_EXPR
9475 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9476 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
9477 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
9479 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
9480 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9481 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9482 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9483 fold_convert_loc (loc, type, tem));
9486 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9487 type of X and Y (integer types only). */
9488 if (INTEGRAL_TYPE_P (type)
9489 && TREE_CODE (op0) == MULT_EXPR
9490 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9491 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
9493 /* Be careful not to introduce new overflows. */
9494 tree mult_type;
9495 if (TYPE_OVERFLOW_WRAPS (type))
9496 mult_type = type;
9497 else
9498 mult_type = unsigned_type_for (type);
9500 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9502 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9503 fold_convert_loc (loc, mult_type,
9504 TREE_OPERAND (op0, 0)),
9505 fold_convert_loc (loc, mult_type,
9506 TREE_OPERAND (op0, 1)));
9507 return fold_convert_loc (loc, type, tem);
9511 return NULL_TREE;
9513 case VIEW_CONVERT_EXPR:
9514 if (TREE_CODE (op0) == MEM_REF)
9516 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9517 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9518 tem = fold_build2_loc (loc, MEM_REF, type,
9519 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9520 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9521 return tem;
9524 return NULL_TREE;
9526 case NEGATE_EXPR:
9527 tem = fold_negate_expr (loc, arg0);
9528 if (tem)
9529 return fold_convert_loc (loc, type, tem);
9530 return NULL_TREE;
9532 case ABS_EXPR:
9533 /* Convert fabs((double)float) into (double)fabsf(float). */
9534 if (TREE_CODE (arg0) == NOP_EXPR
9535 && TREE_CODE (type) == REAL_TYPE)
9537 tree targ0 = strip_float_extensions (arg0);
9538 if (targ0 != arg0)
9539 return fold_convert_loc (loc, type,
9540 fold_build1_loc (loc, ABS_EXPR,
9541 TREE_TYPE (targ0),
9542 targ0));
9544 return NULL_TREE;
9546 case BIT_NOT_EXPR:
9547 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9548 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9549 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9550 fold_convert_loc (loc, type,
9551 TREE_OPERAND (arg0, 0)))))
9552 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9553 fold_convert_loc (loc, type,
9554 TREE_OPERAND (arg0, 1)));
9555 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9556 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9557 fold_convert_loc (loc, type,
9558 TREE_OPERAND (arg0, 1)))))
9559 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9560 fold_convert_loc (loc, type,
9561 TREE_OPERAND (arg0, 0)), tem);
9563 return NULL_TREE;
9565 case TRUTH_NOT_EXPR:
9566 /* Note that the operand of this must be an int
9567 and its values must be 0 or 1.
9568 ("true" is a fixed value perhaps depending on the language,
9569 but we don't handle values other than 1 correctly yet.) */
9570 tem = fold_truth_not_expr (loc, arg0);
9571 if (!tem)
9572 return NULL_TREE;
9573 return fold_convert_loc (loc, type, tem);
9575 case INDIRECT_REF:
9576 /* Fold *&X to X if X is an lvalue. */
9577 if (TREE_CODE (op0) == ADDR_EXPR)
9579 tree op00 = TREE_OPERAND (op0, 0);
9580 if ((VAR_P (op00)
9581 || TREE_CODE (op00) == PARM_DECL
9582 || TREE_CODE (op00) == RESULT_DECL)
9583 && !TREE_READONLY (op00))
9584 return op00;
9586 return NULL_TREE;
9588 default:
9589 return NULL_TREE;
9590 } /* switch (code) */
9594 /* If the operation was a conversion do _not_ mark a resulting constant
9595 with TREE_OVERFLOW if the original constant was not. These conversions
9596 have implementation defined behavior and retaining the TREE_OVERFLOW
9597 flag here would confuse later passes such as VRP. */
9598 tree
9599 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9600 tree type, tree op0)
9602 tree res = fold_unary_loc (loc, code, type, op0);
9603 if (res
9604 && TREE_CODE (res) == INTEGER_CST
9605 && TREE_CODE (op0) == INTEGER_CST
9606 && CONVERT_EXPR_CODE_P (code))
9607 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9609 return res;
9612 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9613 operands OP0 and OP1. LOC is the location of the resulting expression.
9614 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9615 Return the folded expression if folding is successful. Otherwise,
9616 return NULL_TREE. */
9617 static tree
9618 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9619 tree arg0, tree arg1, tree op0, tree op1)
9621 tree tem;
9623 /* We only do these simplifications if we are optimizing. */
9624 if (!optimize)
9625 return NULL_TREE;
9627 /* Check for things like (A || B) && (A || C). We can convert this
9628 to A || (B && C). Note that either operator can be any of the four
9629 truth and/or operations and the transformation will still be
9630 valid. Also note that we only care about order for the
9631 ANDIF and ORIF operators. If B contains side effects, this
9632 might change the truth-value of A. */
9633 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9634 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9635 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9636 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9637 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9638 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9640 tree a00 = TREE_OPERAND (arg0, 0);
9641 tree a01 = TREE_OPERAND (arg0, 1);
9642 tree a10 = TREE_OPERAND (arg1, 0);
9643 tree a11 = TREE_OPERAND (arg1, 1);
9644 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9645 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9646 && (code == TRUTH_AND_EXPR
9647 || code == TRUTH_OR_EXPR));
9649 if (operand_equal_p (a00, a10, 0))
9650 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9651 fold_build2_loc (loc, code, type, a01, a11));
9652 else if (commutative && operand_equal_p (a00, a11, 0))
9653 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9654 fold_build2_loc (loc, code, type, a01, a10));
9655 else if (commutative && operand_equal_p (a01, a10, 0))
9656 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9657 fold_build2_loc (loc, code, type, a00, a11));
9659 /* This case if tricky because we must either have commutative
9660 operators or else A10 must not have side-effects. */
9662 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9663 && operand_equal_p (a01, a11, 0))
9664 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9665 fold_build2_loc (loc, code, type, a00, a10),
9666 a01);
9669 /* See if we can build a range comparison. */
9670 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9671 return tem;
9673 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9674 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9676 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9677 if (tem)
9678 return fold_build2_loc (loc, code, type, tem, arg1);
9681 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9682 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9684 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9685 if (tem)
9686 return fold_build2_loc (loc, code, type, arg0, tem);
9689 /* Check for the possibility of merging component references. If our
9690 lhs is another similar operation, try to merge its rhs with our
9691 rhs. Then try to merge our lhs and rhs. */
9692 if (TREE_CODE (arg0) == code
9693 && (tem = fold_truth_andor_1 (loc, code, type,
9694 TREE_OPERAND (arg0, 1), arg1)) != 0)
9695 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9697 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9698 return tem;
9700 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9701 if (param_logical_op_non_short_circuit != -1)
9702 logical_op_non_short_circuit
9703 = param_logical_op_non_short_circuit;
9704 if (logical_op_non_short_circuit
9705 && !sanitize_coverage_p ()
9706 && (code == TRUTH_AND_EXPR
9707 || code == TRUTH_ANDIF_EXPR
9708 || code == TRUTH_OR_EXPR
9709 || code == TRUTH_ORIF_EXPR))
9711 enum tree_code ncode, icode;
9713 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9714 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9715 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9717 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9718 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9719 We don't want to pack more than two leafs to a non-IF AND/OR
9720 expression.
9721 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9722 equal to IF-CODE, then we don't want to add right-hand operand.
9723 If the inner right-hand side of left-hand operand has
9724 side-effects, or isn't simple, then we can't add to it,
9725 as otherwise we might destroy if-sequence. */
9726 if (TREE_CODE (arg0) == icode
9727 && simple_operand_p_2 (arg1)
9728 /* Needed for sequence points to handle trappings, and
9729 side-effects. */
9730 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
9732 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9733 arg1);
9734 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9735 tem);
9737 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9738 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9739 else if (TREE_CODE (arg1) == icode
9740 && simple_operand_p_2 (arg0)
9741 /* Needed for sequence points to handle trappings, and
9742 side-effects. */
9743 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
9745 tem = fold_build2_loc (loc, ncode, type,
9746 arg0, TREE_OPERAND (arg1, 0));
9747 return fold_build2_loc (loc, icode, type, tem,
9748 TREE_OPERAND (arg1, 1));
9750 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9751 into (A OR B).
9752 For sequence point consistancy, we need to check for trapping,
9753 and side-effects. */
9754 else if (code == icode && simple_operand_p_2 (arg0)
9755 && simple_operand_p_2 (arg1))
9756 return fold_build2_loc (loc, ncode, type, arg0, arg1);
9759 return NULL_TREE;
9762 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9763 by changing CODE to reduce the magnitude of constants involved in
9764 ARG0 of the comparison.
9765 Returns a canonicalized comparison tree if a simplification was
9766 possible, otherwise returns NULL_TREE.
9767 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9768 valid if signed overflow is undefined. */
9770 static tree
9771 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9772 tree arg0, tree arg1,
9773 bool *strict_overflow_p)
9775 enum tree_code code0 = TREE_CODE (arg0);
9776 tree t, cst0 = NULL_TREE;
9777 int sgn0;
9779 /* Match A +- CST code arg1. We can change this only if overflow
9780 is undefined. */
9781 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9782 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9783 /* In principle pointers also have undefined overflow behavior,
9784 but that causes problems elsewhere. */
9785 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9786 && (code0 == MINUS_EXPR
9787 || code0 == PLUS_EXPR)
9788 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9789 return NULL_TREE;
9791 /* Identify the constant in arg0 and its sign. */
9792 cst0 = TREE_OPERAND (arg0, 1);
9793 sgn0 = tree_int_cst_sgn (cst0);
9795 /* Overflowed constants and zero will cause problems. */
9796 if (integer_zerop (cst0)
9797 || TREE_OVERFLOW (cst0))
9798 return NULL_TREE;
9800 /* See if we can reduce the magnitude of the constant in
9801 arg0 by changing the comparison code. */
9802 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9803 if (code == LT_EXPR
9804 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9805 code = LE_EXPR;
9806 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9807 else if (code == GT_EXPR
9808 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9809 code = GE_EXPR;
9810 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9811 else if (code == LE_EXPR
9812 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9813 code = LT_EXPR;
9814 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9815 else if (code == GE_EXPR
9816 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9817 code = GT_EXPR;
9818 else
9819 return NULL_TREE;
9820 *strict_overflow_p = true;
9822 /* Now build the constant reduced in magnitude. But not if that
9823 would produce one outside of its types range. */
9824 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9825 && ((sgn0 == 1
9826 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9827 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9828 || (sgn0 == -1
9829 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9830 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9831 return NULL_TREE;
9833 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9834 cst0, build_int_cst (TREE_TYPE (cst0), 1));
9835 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9836 t = fold_convert (TREE_TYPE (arg1), t);
9838 return fold_build2_loc (loc, code, type, t, arg1);
9841 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9842 overflow further. Try to decrease the magnitude of constants involved
9843 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9844 and put sole constants at the second argument position.
9845 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9847 static tree
9848 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9849 tree arg0, tree arg1)
9851 tree t;
9852 bool strict_overflow_p;
9853 const char * const warnmsg = G_("assuming signed overflow does not occur "
9854 "when reducing constant in comparison");
9856 /* Try canonicalization by simplifying arg0. */
9857 strict_overflow_p = false;
9858 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9859 &strict_overflow_p);
9860 if (t)
9862 if (strict_overflow_p)
9863 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9864 return t;
9867 /* Try canonicalization by simplifying arg1 using the swapped
9868 comparison. */
9869 code = swap_tree_comparison (code);
9870 strict_overflow_p = false;
9871 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9872 &strict_overflow_p);
9873 if (t && strict_overflow_p)
9874 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9875 return t;
9878 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9879 space. This is used to avoid issuing overflow warnings for
9880 expressions like &p->x which cannot wrap. */
9882 static bool
9883 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9885 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9886 return true;
9888 if (maybe_lt (bitpos, 0))
9889 return true;
9891 poly_wide_int wi_offset;
9892 int precision = TYPE_PRECISION (TREE_TYPE (base));
9893 if (offset == NULL_TREE)
9894 wi_offset = wi::zero (precision);
9895 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9896 return true;
9897 else
9898 wi_offset = wi::to_poly_wide (offset);
9900 wi::overflow_type overflow;
9901 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9902 precision);
9903 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9904 if (overflow)
9905 return true;
9907 poly_uint64 total_hwi, size;
9908 if (!total.to_uhwi (&total_hwi)
9909 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
9910 &size)
9911 || known_eq (size, 0U))
9912 return true;
9914 if (known_le (total_hwi, size))
9915 return false;
9917 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9918 array. */
9919 if (TREE_CODE (base) == ADDR_EXPR
9920 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
9921 &size)
9922 && maybe_ne (size, 0U)
9923 && known_le (total_hwi, size))
9924 return false;
9926 return true;
9929 /* Return a positive integer when the symbol DECL is known to have
9930 a nonzero address, zero when it's known not to (e.g., it's a weak
9931 symbol), and a negative integer when the symbol is not yet in the
9932 symbol table and so whether or not its address is zero is unknown.
9933 For function local objects always return positive integer. */
9934 static int
9935 maybe_nonzero_address (tree decl)
9937 /* Normally, don't do anything for variables and functions before symtab is
9938 built; it is quite possible that DECL will be declared weak later.
9939 But if folding_initializer, we need a constant answer now, so create
9940 the symtab entry and prevent later weak declaration. */
9941 if (DECL_P (decl) && decl_in_symtab_p (decl))
9942 if (struct symtab_node *symbol
9943 = (folding_initializer
9944 ? symtab_node::get_create (decl)
9945 : symtab_node::get (decl)))
9946 return symbol->nonzero_address ();
9948 /* Function local objects are never NULL. */
9949 if (DECL_P (decl)
9950 && (DECL_CONTEXT (decl)
9951 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
9952 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
9953 return 1;
9955 return -1;
9958 /* Subroutine of fold_binary. This routine performs all of the
9959 transformations that are common to the equality/inequality
9960 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9961 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9962 fold_binary should call fold_binary. Fold a comparison with
9963 tree code CODE and type TYPE with operands OP0 and OP1. Return
9964 the folded comparison or NULL_TREE. */
9966 static tree
9967 fold_comparison (location_t loc, enum tree_code code, tree type,
9968 tree op0, tree op1)
9970 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
9971 tree arg0, arg1, tem;
9973 arg0 = op0;
9974 arg1 = op1;
9976 STRIP_SIGN_NOPS (arg0);
9977 STRIP_SIGN_NOPS (arg1);
9979 /* For comparisons of pointers we can decompose it to a compile time
9980 comparison of the base objects and the offsets into the object.
9981 This requires at least one operand being an ADDR_EXPR or a
9982 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9983 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9984 && (TREE_CODE (arg0) == ADDR_EXPR
9985 || TREE_CODE (arg1) == ADDR_EXPR
9986 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9987 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9989 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9990 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
9991 machine_mode mode;
9992 int volatilep, reversep, unsignedp;
9993 bool indirect_base0 = false, indirect_base1 = false;
9995 /* Get base and offset for the access. Strip ADDR_EXPR for
9996 get_inner_reference, but put it back by stripping INDIRECT_REF
9997 off the base object if possible. indirect_baseN will be true
9998 if baseN is not an address but refers to the object itself. */
9999 base0 = arg0;
10000 if (TREE_CODE (arg0) == ADDR_EXPR)
10002 base0
10003 = get_inner_reference (TREE_OPERAND (arg0, 0),
10004 &bitsize, &bitpos0, &offset0, &mode,
10005 &unsignedp, &reversep, &volatilep);
10006 if (TREE_CODE (base0) == INDIRECT_REF)
10007 base0 = TREE_OPERAND (base0, 0);
10008 else
10009 indirect_base0 = true;
10011 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10013 base0 = TREE_OPERAND (arg0, 0);
10014 STRIP_SIGN_NOPS (base0);
10015 if (TREE_CODE (base0) == ADDR_EXPR)
10017 base0
10018 = get_inner_reference (TREE_OPERAND (base0, 0),
10019 &bitsize, &bitpos0, &offset0, &mode,
10020 &unsignedp, &reversep, &volatilep);
10021 if (TREE_CODE (base0) == INDIRECT_REF)
10022 base0 = TREE_OPERAND (base0, 0);
10023 else
10024 indirect_base0 = true;
10026 if (offset0 == NULL_TREE || integer_zerop (offset0))
10027 offset0 = TREE_OPERAND (arg0, 1);
10028 else
10029 offset0 = size_binop (PLUS_EXPR, offset0,
10030 TREE_OPERAND (arg0, 1));
10031 if (poly_int_tree_p (offset0))
10033 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
10034 TYPE_PRECISION (sizetype));
10035 tem <<= LOG2_BITS_PER_UNIT;
10036 tem += bitpos0;
10037 if (tem.to_shwi (&bitpos0))
10038 offset0 = NULL_TREE;
10042 base1 = arg1;
10043 if (TREE_CODE (arg1) == ADDR_EXPR)
10045 base1
10046 = get_inner_reference (TREE_OPERAND (arg1, 0),
10047 &bitsize, &bitpos1, &offset1, &mode,
10048 &unsignedp, &reversep, &volatilep);
10049 if (TREE_CODE (base1) == INDIRECT_REF)
10050 base1 = TREE_OPERAND (base1, 0);
10051 else
10052 indirect_base1 = true;
10054 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10056 base1 = TREE_OPERAND (arg1, 0);
10057 STRIP_SIGN_NOPS (base1);
10058 if (TREE_CODE (base1) == ADDR_EXPR)
10060 base1
10061 = get_inner_reference (TREE_OPERAND (base1, 0),
10062 &bitsize, &bitpos1, &offset1, &mode,
10063 &unsignedp, &reversep, &volatilep);
10064 if (TREE_CODE (base1) == INDIRECT_REF)
10065 base1 = TREE_OPERAND (base1, 0);
10066 else
10067 indirect_base1 = true;
10069 if (offset1 == NULL_TREE || integer_zerop (offset1))
10070 offset1 = TREE_OPERAND (arg1, 1);
10071 else
10072 offset1 = size_binop (PLUS_EXPR, offset1,
10073 TREE_OPERAND (arg1, 1));
10074 if (poly_int_tree_p (offset1))
10076 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
10077 TYPE_PRECISION (sizetype));
10078 tem <<= LOG2_BITS_PER_UNIT;
10079 tem += bitpos1;
10080 if (tem.to_shwi (&bitpos1))
10081 offset1 = NULL_TREE;
10085 /* If we have equivalent bases we might be able to simplify. */
10086 if (indirect_base0 == indirect_base1
10087 && operand_equal_p (base0, base1,
10088 indirect_base0 ? OEP_ADDRESS_OF : 0))
10090 /* We can fold this expression to a constant if the non-constant
10091 offset parts are equal. */
10092 if ((offset0 == offset1
10093 || (offset0 && offset1
10094 && operand_equal_p (offset0, offset1, 0)))
10095 && (equality_code
10096 || (indirect_base0
10097 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10098 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10100 if (!equality_code
10101 && maybe_ne (bitpos0, bitpos1)
10102 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10103 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10104 fold_overflow_warning (("assuming pointer wraparound does not "
10105 "occur when comparing P +- C1 with "
10106 "P +- C2"),
10107 WARN_STRICT_OVERFLOW_CONDITIONAL);
10109 switch (code)
10111 case EQ_EXPR:
10112 if (known_eq (bitpos0, bitpos1))
10113 return constant_boolean_node (true, type);
10114 if (known_ne (bitpos0, bitpos1))
10115 return constant_boolean_node (false, type);
10116 break;
10117 case NE_EXPR:
10118 if (known_ne (bitpos0, bitpos1))
10119 return constant_boolean_node (true, type);
10120 if (known_eq (bitpos0, bitpos1))
10121 return constant_boolean_node (false, type);
10122 break;
10123 case LT_EXPR:
10124 if (known_lt (bitpos0, bitpos1))
10125 return constant_boolean_node (true, type);
10126 if (known_ge (bitpos0, bitpos1))
10127 return constant_boolean_node (false, type);
10128 break;
10129 case LE_EXPR:
10130 if (known_le (bitpos0, bitpos1))
10131 return constant_boolean_node (true, type);
10132 if (known_gt (bitpos0, bitpos1))
10133 return constant_boolean_node (false, type);
10134 break;
10135 case GE_EXPR:
10136 if (known_ge (bitpos0, bitpos1))
10137 return constant_boolean_node (true, type);
10138 if (known_lt (bitpos0, bitpos1))
10139 return constant_boolean_node (false, type);
10140 break;
10141 case GT_EXPR:
10142 if (known_gt (bitpos0, bitpos1))
10143 return constant_boolean_node (true, type);
10144 if (known_le (bitpos0, bitpos1))
10145 return constant_boolean_node (false, type);
10146 break;
10147 default:;
10150 /* We can simplify the comparison to a comparison of the variable
10151 offset parts if the constant offset parts are equal.
10152 Be careful to use signed sizetype here because otherwise we
10153 mess with array offsets in the wrong way. This is possible
10154 because pointer arithmetic is restricted to retain within an
10155 object and overflow on pointer differences is undefined as of
10156 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10157 else if (known_eq (bitpos0, bitpos1)
10158 && (equality_code
10159 || (indirect_base0
10160 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10161 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10163 /* By converting to signed sizetype we cover middle-end pointer
10164 arithmetic which operates on unsigned pointer types of size
10165 type size and ARRAY_REF offsets which are properly sign or
10166 zero extended from their type in case it is narrower than
10167 sizetype. */
10168 if (offset0 == NULL_TREE)
10169 offset0 = build_int_cst (ssizetype, 0);
10170 else
10171 offset0 = fold_convert_loc (loc, ssizetype, offset0);
10172 if (offset1 == NULL_TREE)
10173 offset1 = build_int_cst (ssizetype, 0);
10174 else
10175 offset1 = fold_convert_loc (loc, ssizetype, offset1);
10177 if (!equality_code
10178 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10179 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10180 fold_overflow_warning (("assuming pointer wraparound does not "
10181 "occur when comparing P +- C1 with "
10182 "P +- C2"),
10183 WARN_STRICT_OVERFLOW_COMPARISON);
10185 return fold_build2_loc (loc, code, type, offset0, offset1);
10188 /* For equal offsets we can simplify to a comparison of the
10189 base addresses. */
10190 else if (known_eq (bitpos0, bitpos1)
10191 && (indirect_base0
10192 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
10193 && (indirect_base1
10194 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
10195 && ((offset0 == offset1)
10196 || (offset0 && offset1
10197 && operand_equal_p (offset0, offset1, 0))))
10199 if (indirect_base0)
10200 base0 = build_fold_addr_expr_loc (loc, base0);
10201 if (indirect_base1)
10202 base1 = build_fold_addr_expr_loc (loc, base1);
10203 return fold_build2_loc (loc, code, type, base0, base1);
10205 /* Comparison between an ordinary (non-weak) symbol and a null
10206 pointer can be eliminated since such symbols must have a non
10207 null address. In C, relational expressions between pointers
10208 to objects and null pointers are undefined. The results
10209 below follow the C++ rules with the additional property that
10210 every object pointer compares greater than a null pointer.
10212 else if (((DECL_P (base0)
10213 && maybe_nonzero_address (base0) > 0
10214 /* Avoid folding references to struct members at offset 0 to
10215 prevent tests like '&ptr->firstmember == 0' from getting
10216 eliminated. When ptr is null, although the -> expression
10217 is strictly speaking invalid, GCC retains it as a matter
10218 of QoI. See PR c/44555. */
10219 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
10220 || CONSTANT_CLASS_P (base0))
10221 && indirect_base0
10222 /* The caller guarantees that when one of the arguments is
10223 constant (i.e., null in this case) it is second. */
10224 && integer_zerop (arg1))
10226 switch (code)
10228 case EQ_EXPR:
10229 case LE_EXPR:
10230 case LT_EXPR:
10231 return constant_boolean_node (false, type);
10232 case GE_EXPR:
10233 case GT_EXPR:
10234 case NE_EXPR:
10235 return constant_boolean_node (true, type);
10236 default:
10237 gcc_unreachable ();
10242 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10243 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10244 the resulting offset is smaller in absolute value than the
10245 original one and has the same sign. */
10246 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10247 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
10248 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10249 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10250 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
10251 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
10252 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10253 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
10255 tree const1 = TREE_OPERAND (arg0, 1);
10256 tree const2 = TREE_OPERAND (arg1, 1);
10257 tree variable1 = TREE_OPERAND (arg0, 0);
10258 tree variable2 = TREE_OPERAND (arg1, 0);
10259 tree cst;
10260 const char * const warnmsg = G_("assuming signed overflow does not "
10261 "occur when combining constants around "
10262 "a comparison");
10264 /* Put the constant on the side where it doesn't overflow and is
10265 of lower absolute value and of same sign than before. */
10266 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10267 ? MINUS_EXPR : PLUS_EXPR,
10268 const2, const1);
10269 if (!TREE_OVERFLOW (cst)
10270 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
10271 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
10273 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10274 return fold_build2_loc (loc, code, type,
10275 variable1,
10276 fold_build2_loc (loc, TREE_CODE (arg1),
10277 TREE_TYPE (arg1),
10278 variable2, cst));
10281 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10282 ? MINUS_EXPR : PLUS_EXPR,
10283 const1, const2);
10284 if (!TREE_OVERFLOW (cst)
10285 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
10286 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
10288 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10289 return fold_build2_loc (loc, code, type,
10290 fold_build2_loc (loc, TREE_CODE (arg0),
10291 TREE_TYPE (arg0),
10292 variable1, cst),
10293 variable2);
10297 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
10298 if (tem)
10299 return tem;
10301 /* If we are comparing an expression that just has comparisons
10302 of two integer values, arithmetic expressions of those comparisons,
10303 and constants, we can simplify it. There are only three cases
10304 to check: the two values can either be equal, the first can be
10305 greater, or the second can be greater. Fold the expression for
10306 those three values. Since each value must be 0 or 1, we have
10307 eight possibilities, each of which corresponds to the constant 0
10308 or 1 or one of the six possible comparisons.
10310 This handles common cases like (a > b) == 0 but also handles
10311 expressions like ((x > y) - (y > x)) > 0, which supposedly
10312 occur in macroized code. */
10314 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
10316 tree cval1 = 0, cval2 = 0;
10318 if (twoval_comparison_p (arg0, &cval1, &cval2)
10319 /* Don't handle degenerate cases here; they should already
10320 have been handled anyway. */
10321 && cval1 != 0 && cval2 != 0
10322 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
10323 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
10324 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
10325 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
10326 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
10327 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
10328 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
10330 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
10331 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
10333 /* We can't just pass T to eval_subst in case cval1 or cval2
10334 was the same as ARG1. */
10336 tree high_result
10337 = fold_build2_loc (loc, code, type,
10338 eval_subst (loc, arg0, cval1, maxval,
10339 cval2, minval),
10340 arg1);
10341 tree equal_result
10342 = fold_build2_loc (loc, code, type,
10343 eval_subst (loc, arg0, cval1, maxval,
10344 cval2, maxval),
10345 arg1);
10346 tree low_result
10347 = fold_build2_loc (loc, code, type,
10348 eval_subst (loc, arg0, cval1, minval,
10349 cval2, maxval),
10350 arg1);
10352 /* All three of these results should be 0 or 1. Confirm they are.
10353 Then use those values to select the proper code to use. */
10355 if (TREE_CODE (high_result) == INTEGER_CST
10356 && TREE_CODE (equal_result) == INTEGER_CST
10357 && TREE_CODE (low_result) == INTEGER_CST)
10359 /* Make a 3-bit mask with the high-order bit being the
10360 value for `>', the next for '=', and the low for '<'. */
10361 switch ((integer_onep (high_result) * 4)
10362 + (integer_onep (equal_result) * 2)
10363 + integer_onep (low_result))
10365 case 0:
10366 /* Always false. */
10367 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10368 case 1:
10369 code = LT_EXPR;
10370 break;
10371 case 2:
10372 code = EQ_EXPR;
10373 break;
10374 case 3:
10375 code = LE_EXPR;
10376 break;
10377 case 4:
10378 code = GT_EXPR;
10379 break;
10380 case 5:
10381 code = NE_EXPR;
10382 break;
10383 case 6:
10384 code = GE_EXPR;
10385 break;
10386 case 7:
10387 /* Always true. */
10388 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10391 return fold_build2_loc (loc, code, type, cval1, cval2);
10396 return NULL_TREE;
10400 /* Subroutine of fold_binary. Optimize complex multiplications of the
10401 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10402 argument EXPR represents the expression "z" of type TYPE. */
10404 static tree
10405 fold_mult_zconjz (location_t loc, tree type, tree expr)
10407 tree itype = TREE_TYPE (type);
10408 tree rpart, ipart, tem;
10410 if (TREE_CODE (expr) == COMPLEX_EXPR)
10412 rpart = TREE_OPERAND (expr, 0);
10413 ipart = TREE_OPERAND (expr, 1);
10415 else if (TREE_CODE (expr) == COMPLEX_CST)
10417 rpart = TREE_REALPART (expr);
10418 ipart = TREE_IMAGPART (expr);
10420 else
10422 expr = save_expr (expr);
10423 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
10424 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
10427 rpart = save_expr (rpart);
10428 ipart = save_expr (ipart);
10429 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
10430 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
10431 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
10432 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
10433 build_zero_cst (itype));
10437 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10438 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10439 true if successful. */
10441 static bool
10442 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
10444 unsigned HOST_WIDE_INT i, nunits;
10446 if (TREE_CODE (arg) == VECTOR_CST
10447 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
10449 for (i = 0; i < nunits; ++i)
10450 elts[i] = VECTOR_CST_ELT (arg, i);
10452 else if (TREE_CODE (arg) == CONSTRUCTOR)
10454 constructor_elt *elt;
10456 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
10457 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
10458 return false;
10459 else
10460 elts[i] = elt->value;
10462 else
10463 return false;
10464 for (; i < nelts; i++)
10465 elts[i]
10466 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
10467 return true;
10470 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10471 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10472 NULL_TREE otherwise. */
10474 tree
10475 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
10477 unsigned int i;
10478 unsigned HOST_WIDE_INT nelts;
10479 bool need_ctor = false;
10481 if (!sel.length ().is_constant (&nelts))
10482 return NULL_TREE;
10483 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
10484 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
10485 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
10486 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
10487 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10488 return NULL_TREE;
10490 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10491 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
10492 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
10493 return NULL_TREE;
10495 tree_vector_builder out_elts (type, nelts, 1);
10496 for (i = 0; i < nelts; i++)
10498 HOST_WIDE_INT index;
10499 if (!sel[i].is_constant (&index))
10500 return NULL_TREE;
10501 if (!CONSTANT_CLASS_P (in_elts[index]))
10502 need_ctor = true;
10503 out_elts.quick_push (unshare_expr (in_elts[index]));
10506 if (need_ctor)
10508 vec<constructor_elt, va_gc> *v;
10509 vec_alloc (v, nelts);
10510 for (i = 0; i < nelts; i++)
10511 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
10512 return build_constructor (type, v);
10514 else
10515 return out_elts.build ();
10518 /* Try to fold a pointer difference of type TYPE two address expressions of
10519 array references AREF0 and AREF1 using location LOC. Return a
10520 simplified expression for the difference or NULL_TREE. */
10522 static tree
10523 fold_addr_of_array_ref_difference (location_t loc, tree type,
10524 tree aref0, tree aref1,
10525 bool use_pointer_diff)
10527 tree base0 = TREE_OPERAND (aref0, 0);
10528 tree base1 = TREE_OPERAND (aref1, 0);
10529 tree base_offset = build_int_cst (type, 0);
10531 /* If the bases are array references as well, recurse. If the bases
10532 are pointer indirections compute the difference of the pointers.
10533 If the bases are equal, we are set. */
10534 if ((TREE_CODE (base0) == ARRAY_REF
10535 && TREE_CODE (base1) == ARRAY_REF
10536 && (base_offset
10537 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
10538 use_pointer_diff)))
10539 || (INDIRECT_REF_P (base0)
10540 && INDIRECT_REF_P (base1)
10541 && (base_offset
10542 = use_pointer_diff
10543 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
10544 TREE_OPERAND (base0, 0),
10545 TREE_OPERAND (base1, 0))
10546 : fold_binary_loc (loc, MINUS_EXPR, type,
10547 fold_convert (type,
10548 TREE_OPERAND (base0, 0)),
10549 fold_convert (type,
10550 TREE_OPERAND (base1, 0)))))
10551 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
10553 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10554 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10555 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
10556 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
10557 return fold_build2_loc (loc, PLUS_EXPR, type,
10558 base_offset,
10559 fold_build2_loc (loc, MULT_EXPR, type,
10560 diff, esz));
10562 return NULL_TREE;
10565 /* If the real or vector real constant CST of type TYPE has an exact
10566 inverse, return it, else return NULL. */
10568 tree
10569 exact_inverse (tree type, tree cst)
10571 REAL_VALUE_TYPE r;
10572 tree unit_type;
10573 machine_mode mode;
10575 switch (TREE_CODE (cst))
10577 case REAL_CST:
10578 r = TREE_REAL_CST (cst);
10580 if (exact_real_inverse (TYPE_MODE (type), &r))
10581 return build_real (type, r);
10583 return NULL_TREE;
10585 case VECTOR_CST:
10587 unit_type = TREE_TYPE (type);
10588 mode = TYPE_MODE (unit_type);
10590 tree_vector_builder elts;
10591 if (!elts.new_unary_operation (type, cst, false))
10592 return NULL_TREE;
10593 unsigned int count = elts.encoded_nelts ();
10594 for (unsigned int i = 0; i < count; ++i)
10596 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10597 if (!exact_real_inverse (mode, &r))
10598 return NULL_TREE;
10599 elts.quick_push (build_real (unit_type, r));
10602 return elts.build ();
10605 default:
10606 return NULL_TREE;
10610 /* Mask out the tz least significant bits of X of type TYPE where
10611 tz is the number of trailing zeroes in Y. */
10612 static wide_int
10613 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10615 int tz = wi::ctz (y);
10616 if (tz > 0)
10617 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10618 return x;
10621 /* Return true when T is an address and is known to be nonzero.
10622 For floating point we further ensure that T is not denormal.
10623 Similar logic is present in nonzero_address in rtlanal.h.
10625 If the return value is based on the assumption that signed overflow
10626 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10627 change *STRICT_OVERFLOW_P. */
10629 static bool
10630 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10632 tree type = TREE_TYPE (t);
10633 enum tree_code code;
10635 /* Doing something useful for floating point would need more work. */
10636 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10637 return false;
10639 code = TREE_CODE (t);
10640 switch (TREE_CODE_CLASS (code))
10642 case tcc_unary:
10643 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10644 strict_overflow_p);
10645 case tcc_binary:
10646 case tcc_comparison:
10647 return tree_binary_nonzero_warnv_p (code, type,
10648 TREE_OPERAND (t, 0),
10649 TREE_OPERAND (t, 1),
10650 strict_overflow_p);
10651 case tcc_constant:
10652 case tcc_declaration:
10653 case tcc_reference:
10654 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10656 default:
10657 break;
10660 switch (code)
10662 case TRUTH_NOT_EXPR:
10663 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10664 strict_overflow_p);
10666 case TRUTH_AND_EXPR:
10667 case TRUTH_OR_EXPR:
10668 case TRUTH_XOR_EXPR:
10669 return tree_binary_nonzero_warnv_p (code, type,
10670 TREE_OPERAND (t, 0),
10671 TREE_OPERAND (t, 1),
10672 strict_overflow_p);
10674 case COND_EXPR:
10675 case CONSTRUCTOR:
10676 case OBJ_TYPE_REF:
10677 case ASSERT_EXPR:
10678 case ADDR_EXPR:
10679 case WITH_SIZE_EXPR:
10680 case SSA_NAME:
10681 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10683 case COMPOUND_EXPR:
10684 case MODIFY_EXPR:
10685 case BIND_EXPR:
10686 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10687 strict_overflow_p);
10689 case SAVE_EXPR:
10690 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10691 strict_overflow_p);
10693 case CALL_EXPR:
10695 tree fndecl = get_callee_fndecl (t);
10696 if (!fndecl) return false;
10697 if (flag_delete_null_pointer_checks && !flag_check_new
10698 && DECL_IS_OPERATOR_NEW_P (fndecl)
10699 && !TREE_NOTHROW (fndecl))
10700 return true;
10701 if (flag_delete_null_pointer_checks
10702 && lookup_attribute ("returns_nonnull",
10703 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10704 return true;
10705 return alloca_call_p (t);
10708 default:
10709 break;
10711 return false;
10714 /* Return true when T is an address and is known to be nonzero.
10715 Handle warnings about undefined signed overflow. */
10717 bool
10718 tree_expr_nonzero_p (tree t)
10720 bool ret, strict_overflow_p;
10722 strict_overflow_p = false;
10723 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10724 if (strict_overflow_p)
10725 fold_overflow_warning (("assuming signed overflow does not occur when "
10726 "determining that expression is always "
10727 "non-zero"),
10728 WARN_STRICT_OVERFLOW_MISC);
10729 return ret;
10732 /* Return true if T is known not to be equal to an integer W. */
10734 bool
10735 expr_not_equal_to (tree t, const wide_int &w)
10737 value_range vr;
10738 switch (TREE_CODE (t))
10740 case INTEGER_CST:
10741 return wi::to_wide (t) != w;
10743 case SSA_NAME:
10744 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10745 return false;
10747 if (cfun)
10748 get_range_query (cfun)->range_of_expr (vr, t);
10749 else
10750 get_global_range_query ()->range_of_expr (vr, t);
10752 if (!vr.undefined_p ()
10753 && !vr.contains_p (wide_int_to_tree (TREE_TYPE (t), w)))
10754 return true;
10755 /* If T has some known zero bits and W has any of those bits set,
10756 then T is known not to be equal to W. */
10757 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10758 TYPE_PRECISION (TREE_TYPE (t))), 0))
10759 return true;
10760 return false;
10762 default:
10763 return false;
10767 /* Fold a binary expression of code CODE and type TYPE with operands
10768 OP0 and OP1. LOC is the location of the resulting expression.
10769 Return the folded expression if folding is successful. Otherwise,
10770 return NULL_TREE. */
10772 tree
10773 fold_binary_loc (location_t loc, enum tree_code code, tree type,
10774 tree op0, tree op1)
10776 enum tree_code_class kind = TREE_CODE_CLASS (code);
10777 tree arg0, arg1, tem;
10778 tree t1 = NULL_TREE;
10779 bool strict_overflow_p;
10780 unsigned int prec;
10782 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10783 && TREE_CODE_LENGTH (code) == 2
10784 && op0 != NULL_TREE
10785 && op1 != NULL_TREE);
10787 arg0 = op0;
10788 arg1 = op1;
10790 /* Strip any conversions that don't change the mode. This is
10791 safe for every expression, except for a comparison expression
10792 because its signedness is derived from its operands. So, in
10793 the latter case, only strip conversions that don't change the
10794 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10795 preserved.
10797 Note that this is done as an internal manipulation within the
10798 constant folder, in order to find the simplest representation
10799 of the arguments so that their form can be studied. In any
10800 cases, the appropriate type conversions should be put back in
10801 the tree that will get out of the constant folder. */
10803 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10805 STRIP_SIGN_NOPS (arg0);
10806 STRIP_SIGN_NOPS (arg1);
10808 else
10810 STRIP_NOPS (arg0);
10811 STRIP_NOPS (arg1);
10814 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10815 constant but we can't do arithmetic on them. */
10816 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
10818 tem = const_binop (code, type, arg0, arg1);
10819 if (tem != NULL_TREE)
10821 if (TREE_TYPE (tem) != type)
10822 tem = fold_convert_loc (loc, type, tem);
10823 return tem;
10827 /* If this is a commutative operation, and ARG0 is a constant, move it
10828 to ARG1 to reduce the number of tests below. */
10829 if (commutative_tree_code (code)
10830 && tree_swap_operands_p (arg0, arg1))
10831 return fold_build2_loc (loc, code, type, op1, op0);
10833 /* Likewise if this is a comparison, and ARG0 is a constant, move it
10834 to ARG1 to reduce the number of tests below. */
10835 if (kind == tcc_comparison
10836 && tree_swap_operands_p (arg0, arg1))
10837 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
10839 tem = generic_simplify (loc, code, type, op0, op1);
10840 if (tem)
10841 return tem;
10843 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10845 First check for cases where an arithmetic operation is applied to a
10846 compound, conditional, or comparison operation. Push the arithmetic
10847 operation inside the compound or conditional to see if any folding
10848 can then be done. Convert comparison to conditional for this purpose.
10849 The also optimizes non-constant cases that used to be done in
10850 expand_expr.
10852 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10853 one of the operands is a comparison and the other is a comparison, a
10854 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10855 code below would make the expression more complex. Change it to a
10856 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10857 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10859 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10860 || code == EQ_EXPR || code == NE_EXPR)
10861 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
10862 && ((truth_value_p (TREE_CODE (arg0))
10863 && (truth_value_p (TREE_CODE (arg1))
10864 || (TREE_CODE (arg1) == BIT_AND_EXPR
10865 && integer_onep (TREE_OPERAND (arg1, 1)))))
10866 || (truth_value_p (TREE_CODE (arg1))
10867 && (truth_value_p (TREE_CODE (arg0))
10868 || (TREE_CODE (arg0) == BIT_AND_EXPR
10869 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10871 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10872 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10873 : TRUTH_XOR_EXPR,
10874 boolean_type_node,
10875 fold_convert_loc (loc, boolean_type_node, arg0),
10876 fold_convert_loc (loc, boolean_type_node, arg1));
10878 if (code == EQ_EXPR)
10879 tem = invert_truthvalue_loc (loc, tem);
10881 return fold_convert_loc (loc, type, tem);
10884 if (TREE_CODE_CLASS (code) == tcc_binary
10885 || TREE_CODE_CLASS (code) == tcc_comparison)
10887 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10889 tem = fold_build2_loc (loc, code, type,
10890 fold_convert_loc (loc, TREE_TYPE (op0),
10891 TREE_OPERAND (arg0, 1)), op1);
10892 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10893 tem);
10895 if (TREE_CODE (arg1) == COMPOUND_EXPR)
10897 tem = fold_build2_loc (loc, code, type, op0,
10898 fold_convert_loc (loc, TREE_TYPE (op1),
10899 TREE_OPERAND (arg1, 1)));
10900 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10901 tem);
10904 if (TREE_CODE (arg0) == COND_EXPR
10905 || TREE_CODE (arg0) == VEC_COND_EXPR
10906 || COMPARISON_CLASS_P (arg0))
10908 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10909 arg0, arg1,
10910 /*cond_first_p=*/1);
10911 if (tem != NULL_TREE)
10912 return tem;
10915 if (TREE_CODE (arg1) == COND_EXPR
10916 || TREE_CODE (arg1) == VEC_COND_EXPR
10917 || COMPARISON_CLASS_P (arg1))
10919 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10920 arg1, arg0,
10921 /*cond_first_p=*/0);
10922 if (tem != NULL_TREE)
10923 return tem;
10927 switch (code)
10929 case MEM_REF:
10930 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10931 if (TREE_CODE (arg0) == ADDR_EXPR
10932 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10934 tree iref = TREE_OPERAND (arg0, 0);
10935 return fold_build2 (MEM_REF, type,
10936 TREE_OPERAND (iref, 0),
10937 int_const_binop (PLUS_EXPR, arg1,
10938 TREE_OPERAND (iref, 1)));
10941 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10942 if (TREE_CODE (arg0) == ADDR_EXPR
10943 && handled_component_p (TREE_OPERAND (arg0, 0)))
10945 tree base;
10946 poly_int64 coffset;
10947 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10948 &coffset);
10949 if (!base)
10950 return NULL_TREE;
10951 return fold_build2 (MEM_REF, type,
10952 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
10953 int_const_binop (PLUS_EXPR, arg1,
10954 size_int (coffset)));
10957 return NULL_TREE;
10959 case POINTER_PLUS_EXPR:
10960 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10961 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10962 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10963 return fold_convert_loc (loc, type,
10964 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10965 fold_convert_loc (loc, sizetype,
10966 arg1),
10967 fold_convert_loc (loc, sizetype,
10968 arg0)));
10970 return NULL_TREE;
10972 case PLUS_EXPR:
10973 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10975 /* X + (X / CST) * -CST is X % CST. */
10976 if (TREE_CODE (arg1) == MULT_EXPR
10977 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10978 && operand_equal_p (arg0,
10979 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10981 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10982 tree cst1 = TREE_OPERAND (arg1, 1);
10983 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10984 cst1, cst0);
10985 if (sum && integer_zerop (sum))
10986 return fold_convert_loc (loc, type,
10987 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10988 TREE_TYPE (arg0), arg0,
10989 cst0));
10993 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10994 one. Make sure the type is not saturating and has the signedness of
10995 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10996 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10997 if ((TREE_CODE (arg0) == MULT_EXPR
10998 || TREE_CODE (arg1) == MULT_EXPR)
10999 && !TYPE_SATURATING (type)
11000 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11001 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11002 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11004 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11005 if (tem)
11006 return tem;
11009 if (! FLOAT_TYPE_P (type))
11011 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
11012 (plus (plus (mult) (mult)) (foo)) so that we can
11013 take advantage of the factoring cases below. */
11014 if (ANY_INTEGRAL_TYPE_P (type)
11015 && TYPE_OVERFLOW_WRAPS (type)
11016 && (((TREE_CODE (arg0) == PLUS_EXPR
11017 || TREE_CODE (arg0) == MINUS_EXPR)
11018 && TREE_CODE (arg1) == MULT_EXPR)
11019 || ((TREE_CODE (arg1) == PLUS_EXPR
11020 || TREE_CODE (arg1) == MINUS_EXPR)
11021 && TREE_CODE (arg0) == MULT_EXPR)))
11023 tree parg0, parg1, parg, marg;
11024 enum tree_code pcode;
11026 if (TREE_CODE (arg1) == MULT_EXPR)
11027 parg = arg0, marg = arg1;
11028 else
11029 parg = arg1, marg = arg0;
11030 pcode = TREE_CODE (parg);
11031 parg0 = TREE_OPERAND (parg, 0);
11032 parg1 = TREE_OPERAND (parg, 1);
11033 STRIP_NOPS (parg0);
11034 STRIP_NOPS (parg1);
11036 if (TREE_CODE (parg0) == MULT_EXPR
11037 && TREE_CODE (parg1) != MULT_EXPR)
11038 return fold_build2_loc (loc, pcode, type,
11039 fold_build2_loc (loc, PLUS_EXPR, type,
11040 fold_convert_loc (loc, type,
11041 parg0),
11042 fold_convert_loc (loc, type,
11043 marg)),
11044 fold_convert_loc (loc, type, parg1));
11045 if (TREE_CODE (parg0) != MULT_EXPR
11046 && TREE_CODE (parg1) == MULT_EXPR)
11047 return
11048 fold_build2_loc (loc, PLUS_EXPR, type,
11049 fold_convert_loc (loc, type, parg0),
11050 fold_build2_loc (loc, pcode, type,
11051 fold_convert_loc (loc, type, marg),
11052 fold_convert_loc (loc, type,
11053 parg1)));
11056 else
11058 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
11059 to __complex__ ( x, y ). This is not the same for SNaNs or
11060 if signed zeros are involved. */
11061 if (!HONOR_SNANS (arg0)
11062 && !HONOR_SIGNED_ZEROS (arg0)
11063 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11065 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11066 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11067 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11068 bool arg0rz = false, arg0iz = false;
11069 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11070 || (arg0i && (arg0iz = real_zerop (arg0i))))
11072 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11073 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11074 if (arg0rz && arg1i && real_zerop (arg1i))
11076 tree rp = arg1r ? arg1r
11077 : build1 (REALPART_EXPR, rtype, arg1);
11078 tree ip = arg0i ? arg0i
11079 : build1 (IMAGPART_EXPR, rtype, arg0);
11080 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11082 else if (arg0iz && arg1r && real_zerop (arg1r))
11084 tree rp = arg0r ? arg0r
11085 : build1 (REALPART_EXPR, rtype, arg0);
11086 tree ip = arg1i ? arg1i
11087 : build1 (IMAGPART_EXPR, rtype, arg1);
11088 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11093 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11094 We associate floats only if the user has specified
11095 -fassociative-math. */
11096 if (flag_associative_math
11097 && TREE_CODE (arg1) == PLUS_EXPR
11098 && TREE_CODE (arg0) != MULT_EXPR)
11100 tree tree10 = TREE_OPERAND (arg1, 0);
11101 tree tree11 = TREE_OPERAND (arg1, 1);
11102 if (TREE_CODE (tree11) == MULT_EXPR
11103 && TREE_CODE (tree10) == MULT_EXPR)
11105 tree tree0;
11106 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
11107 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
11110 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11111 We associate floats only if the user has specified
11112 -fassociative-math. */
11113 if (flag_associative_math
11114 && TREE_CODE (arg0) == PLUS_EXPR
11115 && TREE_CODE (arg1) != MULT_EXPR)
11117 tree tree00 = TREE_OPERAND (arg0, 0);
11118 tree tree01 = TREE_OPERAND (arg0, 1);
11119 if (TREE_CODE (tree01) == MULT_EXPR
11120 && TREE_CODE (tree00) == MULT_EXPR)
11122 tree tree0;
11123 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
11124 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
11129 bit_rotate:
11130 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11131 is a rotate of A by C1 bits. */
11132 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11133 is a rotate of A by B bits.
11134 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11135 though in this case CODE must be | and not + or ^, otherwise
11136 it doesn't return A when B is 0. */
11138 enum tree_code code0, code1;
11139 tree rtype;
11140 code0 = TREE_CODE (arg0);
11141 code1 = TREE_CODE (arg1);
11142 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
11143 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
11144 && operand_equal_p (TREE_OPERAND (arg0, 0),
11145 TREE_OPERAND (arg1, 0), 0)
11146 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
11147 TYPE_UNSIGNED (rtype))
11148 /* Only create rotates in complete modes. Other cases are not
11149 expanded properly. */
11150 && (element_precision (rtype)
11151 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
11153 tree tree01, tree11;
11154 tree orig_tree01, orig_tree11;
11155 enum tree_code code01, code11;
11157 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
11158 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
11159 STRIP_NOPS (tree01);
11160 STRIP_NOPS (tree11);
11161 code01 = TREE_CODE (tree01);
11162 code11 = TREE_CODE (tree11);
11163 if (code11 != MINUS_EXPR
11164 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
11166 std::swap (code0, code1);
11167 std::swap (code01, code11);
11168 std::swap (tree01, tree11);
11169 std::swap (orig_tree01, orig_tree11);
11171 if (code01 == INTEGER_CST
11172 && code11 == INTEGER_CST
11173 && (wi::to_widest (tree01) + wi::to_widest (tree11)
11174 == element_precision (rtype)))
11176 tem = build2_loc (loc, LROTATE_EXPR,
11177 rtype, TREE_OPERAND (arg0, 0),
11178 code0 == LSHIFT_EXPR
11179 ? orig_tree01 : orig_tree11);
11180 return fold_convert_loc (loc, type, tem);
11182 else if (code11 == MINUS_EXPR)
11184 tree tree110, tree111;
11185 tree110 = TREE_OPERAND (tree11, 0);
11186 tree111 = TREE_OPERAND (tree11, 1);
11187 STRIP_NOPS (tree110);
11188 STRIP_NOPS (tree111);
11189 if (TREE_CODE (tree110) == INTEGER_CST
11190 && compare_tree_int (tree110,
11191 element_precision (rtype)) == 0
11192 && operand_equal_p (tree01, tree111, 0))
11194 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11195 ? LROTATE_EXPR : RROTATE_EXPR),
11196 rtype, TREE_OPERAND (arg0, 0),
11197 orig_tree01);
11198 return fold_convert_loc (loc, type, tem);
11201 else if (code == BIT_IOR_EXPR
11202 && code11 == BIT_AND_EXPR
11203 && pow2p_hwi (element_precision (rtype)))
11205 tree tree110, tree111;
11206 tree110 = TREE_OPERAND (tree11, 0);
11207 tree111 = TREE_OPERAND (tree11, 1);
11208 STRIP_NOPS (tree110);
11209 STRIP_NOPS (tree111);
11210 if (TREE_CODE (tree110) == NEGATE_EXPR
11211 && TREE_CODE (tree111) == INTEGER_CST
11212 && compare_tree_int (tree111,
11213 element_precision (rtype) - 1) == 0
11214 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
11216 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11217 ? LROTATE_EXPR : RROTATE_EXPR),
11218 rtype, TREE_OPERAND (arg0, 0),
11219 orig_tree01);
11220 return fold_convert_loc (loc, type, tem);
11226 associate:
11227 /* In most languages, can't associate operations on floats through
11228 parentheses. Rather than remember where the parentheses were, we
11229 don't associate floats at all, unless the user has specified
11230 -fassociative-math.
11231 And, we need to make sure type is not saturating. */
11233 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
11234 && !TYPE_SATURATING (type))
11236 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
11237 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
11238 tree atype = type;
11239 bool ok = true;
11241 /* Split both trees into variables, constants, and literals. Then
11242 associate each group together, the constants with literals,
11243 then the result with variables. This increases the chances of
11244 literals being recombined later and of generating relocatable
11245 expressions for the sum of a constant and literal. */
11246 var0 = split_tree (arg0, type, code,
11247 &minus_var0, &con0, &minus_con0,
11248 &lit0, &minus_lit0, 0);
11249 var1 = split_tree (arg1, type, code,
11250 &minus_var1, &con1, &minus_con1,
11251 &lit1, &minus_lit1, code == MINUS_EXPR);
11253 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11254 if (code == MINUS_EXPR)
11255 code = PLUS_EXPR;
11257 /* With undefined overflow prefer doing association in a type
11258 which wraps on overflow, if that is one of the operand types. */
11259 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
11260 && !TYPE_OVERFLOW_WRAPS (type))
11262 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11263 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11264 atype = TREE_TYPE (arg0);
11265 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11266 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
11267 atype = TREE_TYPE (arg1);
11268 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
11271 /* With undefined overflow we can only associate constants with one
11272 variable, and constants whose association doesn't overflow. */
11273 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
11274 && !TYPE_OVERFLOW_WRAPS (atype))
11276 if ((var0 && var1) || (minus_var0 && minus_var1))
11278 /* ??? If split_tree would handle NEGATE_EXPR we could
11279 simply reject these cases and the allowed cases would
11280 be the var0/minus_var1 ones. */
11281 tree tmp0 = var0 ? var0 : minus_var0;
11282 tree tmp1 = var1 ? var1 : minus_var1;
11283 bool one_neg = false;
11285 if (TREE_CODE (tmp0) == NEGATE_EXPR)
11287 tmp0 = TREE_OPERAND (tmp0, 0);
11288 one_neg = !one_neg;
11290 if (CONVERT_EXPR_P (tmp0)
11291 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11292 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11293 <= TYPE_PRECISION (atype)))
11294 tmp0 = TREE_OPERAND (tmp0, 0);
11295 if (TREE_CODE (tmp1) == NEGATE_EXPR)
11297 tmp1 = TREE_OPERAND (tmp1, 0);
11298 one_neg = !one_neg;
11300 if (CONVERT_EXPR_P (tmp1)
11301 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11302 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11303 <= TYPE_PRECISION (atype)))
11304 tmp1 = TREE_OPERAND (tmp1, 0);
11305 /* The only case we can still associate with two variables
11306 is if they cancel out. */
11307 if (!one_neg
11308 || !operand_equal_p (tmp0, tmp1, 0))
11309 ok = false;
11311 else if ((var0 && minus_var1
11312 && ! operand_equal_p (var0, minus_var1, 0))
11313 || (minus_var0 && var1
11314 && ! operand_equal_p (minus_var0, var1, 0)))
11315 ok = false;
11318 /* Only do something if we found more than two objects. Otherwise,
11319 nothing has changed and we risk infinite recursion. */
11320 if (ok
11321 && ((var0 != 0) + (var1 != 0)
11322 + (minus_var0 != 0) + (minus_var1 != 0)
11323 + (con0 != 0) + (con1 != 0)
11324 + (minus_con0 != 0) + (minus_con1 != 0)
11325 + (lit0 != 0) + (lit1 != 0)
11326 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
11328 var0 = associate_trees (loc, var0, var1, code, atype);
11329 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
11330 code, atype);
11331 con0 = associate_trees (loc, con0, con1, code, atype);
11332 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
11333 code, atype);
11334 lit0 = associate_trees (loc, lit0, lit1, code, atype);
11335 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
11336 code, atype);
11338 if (minus_var0 && var0)
11340 var0 = associate_trees (loc, var0, minus_var0,
11341 MINUS_EXPR, atype);
11342 minus_var0 = 0;
11344 if (minus_con0 && con0)
11346 con0 = associate_trees (loc, con0, minus_con0,
11347 MINUS_EXPR, atype);
11348 minus_con0 = 0;
11351 /* Preserve the MINUS_EXPR if the negative part of the literal is
11352 greater than the positive part. Otherwise, the multiplicative
11353 folding code (i.e extract_muldiv) may be fooled in case
11354 unsigned constants are subtracted, like in the following
11355 example: ((X*2 + 4) - 8U)/2. */
11356 if (minus_lit0 && lit0)
11358 if (TREE_CODE (lit0) == INTEGER_CST
11359 && TREE_CODE (minus_lit0) == INTEGER_CST
11360 && tree_int_cst_lt (lit0, minus_lit0)
11361 /* But avoid ending up with only negated parts. */
11362 && (var0 || con0))
11364 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
11365 MINUS_EXPR, atype);
11366 lit0 = 0;
11368 else
11370 lit0 = associate_trees (loc, lit0, minus_lit0,
11371 MINUS_EXPR, atype);
11372 minus_lit0 = 0;
11376 /* Don't introduce overflows through reassociation. */
11377 if ((lit0 && TREE_OVERFLOW_P (lit0))
11378 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
11379 return NULL_TREE;
11381 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11382 con0 = associate_trees (loc, con0, lit0, code, atype);
11383 lit0 = 0;
11384 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
11385 code, atype);
11386 minus_lit0 = 0;
11388 /* Eliminate minus_con0. */
11389 if (minus_con0)
11391 if (con0)
11392 con0 = associate_trees (loc, con0, minus_con0,
11393 MINUS_EXPR, atype);
11394 else if (var0)
11395 var0 = associate_trees (loc, var0, minus_con0,
11396 MINUS_EXPR, atype);
11397 else
11398 gcc_unreachable ();
11399 minus_con0 = 0;
11402 /* Eliminate minus_var0. */
11403 if (minus_var0)
11405 if (con0)
11406 con0 = associate_trees (loc, con0, minus_var0,
11407 MINUS_EXPR, atype);
11408 else
11409 gcc_unreachable ();
11410 minus_var0 = 0;
11413 return
11414 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
11415 code, atype));
11419 return NULL_TREE;
11421 case POINTER_DIFF_EXPR:
11422 case MINUS_EXPR:
11423 /* Fold &a[i] - &a[j] to i-j. */
11424 if (TREE_CODE (arg0) == ADDR_EXPR
11425 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11426 && TREE_CODE (arg1) == ADDR_EXPR
11427 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11429 tree tem = fold_addr_of_array_ref_difference (loc, type,
11430 TREE_OPERAND (arg0, 0),
11431 TREE_OPERAND (arg1, 0),
11432 code
11433 == POINTER_DIFF_EXPR);
11434 if (tem)
11435 return tem;
11438 /* Further transformations are not for pointers. */
11439 if (code == POINTER_DIFF_EXPR)
11440 return NULL_TREE;
11442 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11443 if (TREE_CODE (arg0) == NEGATE_EXPR
11444 && negate_expr_p (op1)
11445 /* If arg0 is e.g. unsigned int and type is int, then this could
11446 introduce UB, because if A is INT_MIN at runtime, the original
11447 expression can be well defined while the latter is not.
11448 See PR83269. */
11449 && !(ANY_INTEGRAL_TYPE_P (type)
11450 && TYPE_OVERFLOW_UNDEFINED (type)
11451 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11452 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
11453 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
11454 fold_convert_loc (loc, type,
11455 TREE_OPERAND (arg0, 0)));
11457 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11458 __complex__ ( x, -y ). This is not the same for SNaNs or if
11459 signed zeros are involved. */
11460 if (!HONOR_SNANS (arg0)
11461 && !HONOR_SIGNED_ZEROS (arg0)
11462 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11464 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11465 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11466 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11467 bool arg0rz = false, arg0iz = false;
11468 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11469 || (arg0i && (arg0iz = real_zerop (arg0i))))
11471 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11472 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11473 if (arg0rz && arg1i && real_zerop (arg1i))
11475 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11476 arg1r ? arg1r
11477 : build1 (REALPART_EXPR, rtype, arg1));
11478 tree ip = arg0i ? arg0i
11479 : build1 (IMAGPART_EXPR, rtype, arg0);
11480 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11482 else if (arg0iz && arg1r && real_zerop (arg1r))
11484 tree rp = arg0r ? arg0r
11485 : build1 (REALPART_EXPR, rtype, arg0);
11486 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11487 arg1i ? arg1i
11488 : build1 (IMAGPART_EXPR, rtype, arg1));
11489 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11494 /* A - B -> A + (-B) if B is easily negatable. */
11495 if (negate_expr_p (op1)
11496 && ! TYPE_OVERFLOW_SANITIZED (type)
11497 && ((FLOAT_TYPE_P (type)
11498 /* Avoid this transformation if B is a positive REAL_CST. */
11499 && (TREE_CODE (op1) != REAL_CST
11500 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
11501 || INTEGRAL_TYPE_P (type)))
11502 return fold_build2_loc (loc, PLUS_EXPR, type,
11503 fold_convert_loc (loc, type, arg0),
11504 negate_expr (op1));
11506 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11507 one. Make sure the type is not saturating and has the signedness of
11508 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11509 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11510 if ((TREE_CODE (arg0) == MULT_EXPR
11511 || TREE_CODE (arg1) == MULT_EXPR)
11512 && !TYPE_SATURATING (type)
11513 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11514 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11515 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11517 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11518 if (tem)
11519 return tem;
11522 goto associate;
11524 case MULT_EXPR:
11525 if (! FLOAT_TYPE_P (type))
11527 /* Transform x * -C into -x * C if x is easily negatable. */
11528 if (TREE_CODE (op1) == INTEGER_CST
11529 && tree_int_cst_sgn (op1) == -1
11530 && negate_expr_p (op0)
11531 && negate_expr_p (op1)
11532 && (tem = negate_expr (op1)) != op1
11533 && ! TREE_OVERFLOW (tem))
11534 return fold_build2_loc (loc, MULT_EXPR, type,
11535 fold_convert_loc (loc, type,
11536 negate_expr (op0)), tem);
11538 strict_overflow_p = false;
11539 if (TREE_CODE (arg1) == INTEGER_CST
11540 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11541 &strict_overflow_p)) != 0)
11543 if (strict_overflow_p)
11544 fold_overflow_warning (("assuming signed overflow does not "
11545 "occur when simplifying "
11546 "multiplication"),
11547 WARN_STRICT_OVERFLOW_MISC);
11548 return fold_convert_loc (loc, type, tem);
11551 /* Optimize z * conj(z) for integer complex numbers. */
11552 if (TREE_CODE (arg0) == CONJ_EXPR
11553 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11554 return fold_mult_zconjz (loc, type, arg1);
11555 if (TREE_CODE (arg1) == CONJ_EXPR
11556 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11557 return fold_mult_zconjz (loc, type, arg0);
11559 else
11561 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11562 This is not the same for NaNs or if signed zeros are
11563 involved. */
11564 if (!HONOR_NANS (arg0)
11565 && !HONOR_SIGNED_ZEROS (arg0)
11566 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11567 && TREE_CODE (arg1) == COMPLEX_CST
11568 && real_zerop (TREE_REALPART (arg1)))
11570 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11571 if (real_onep (TREE_IMAGPART (arg1)))
11572 return
11573 fold_build2_loc (loc, COMPLEX_EXPR, type,
11574 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11575 rtype, arg0)),
11576 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11577 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11578 return
11579 fold_build2_loc (loc, COMPLEX_EXPR, type,
11580 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11581 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11582 rtype, arg0)));
11585 /* Optimize z * conj(z) for floating point complex numbers.
11586 Guarded by flag_unsafe_math_optimizations as non-finite
11587 imaginary components don't produce scalar results. */
11588 if (flag_unsafe_math_optimizations
11589 && TREE_CODE (arg0) == CONJ_EXPR
11590 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11591 return fold_mult_zconjz (loc, type, arg1);
11592 if (flag_unsafe_math_optimizations
11593 && TREE_CODE (arg1) == CONJ_EXPR
11594 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11595 return fold_mult_zconjz (loc, type, arg0);
11597 goto associate;
11599 case BIT_IOR_EXPR:
11600 /* Canonicalize (X & C1) | C2. */
11601 if (TREE_CODE (arg0) == BIT_AND_EXPR
11602 && TREE_CODE (arg1) == INTEGER_CST
11603 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11605 int width = TYPE_PRECISION (type), w;
11606 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
11607 wide_int c2 = wi::to_wide (arg1);
11609 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11610 if ((c1 & c2) == c1)
11611 return omit_one_operand_loc (loc, type, arg1,
11612 TREE_OPERAND (arg0, 0));
11614 wide_int msk = wi::mask (width, false,
11615 TYPE_PRECISION (TREE_TYPE (arg1)));
11617 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11618 if (wi::bit_and_not (msk, c1 | c2) == 0)
11620 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11621 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11624 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11625 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11626 mode which allows further optimizations. */
11627 c1 &= msk;
11628 c2 &= msk;
11629 wide_int c3 = wi::bit_and_not (c1, c2);
11630 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11632 wide_int mask = wi::mask (w, false,
11633 TYPE_PRECISION (type));
11634 if (((c1 | c2) & mask) == mask
11635 && wi::bit_and_not (c1, mask) == 0)
11637 c3 = mask;
11638 break;
11642 if (c3 != c1)
11644 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11645 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
11646 wide_int_to_tree (type, c3));
11647 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11651 /* See if this can be simplified into a rotate first. If that
11652 is unsuccessful continue in the association code. */
11653 goto bit_rotate;
11655 case BIT_XOR_EXPR:
11656 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11657 if (TREE_CODE (arg0) == BIT_AND_EXPR
11658 && INTEGRAL_TYPE_P (type)
11659 && integer_onep (TREE_OPERAND (arg0, 1))
11660 && integer_onep (arg1))
11661 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11662 build_zero_cst (TREE_TYPE (arg0)));
11664 /* See if this can be simplified into a rotate first. If that
11665 is unsuccessful continue in the association code. */
11666 goto bit_rotate;
11668 case BIT_AND_EXPR:
11669 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11670 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11671 && INTEGRAL_TYPE_P (type)
11672 && integer_onep (TREE_OPERAND (arg0, 1))
11673 && integer_onep (arg1))
11675 tree tem2;
11676 tem = TREE_OPERAND (arg0, 0);
11677 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11678 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11679 tem, tem2);
11680 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11681 build_zero_cst (TREE_TYPE (tem)));
11683 /* Fold ~X & 1 as (X & 1) == 0. */
11684 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11685 && INTEGRAL_TYPE_P (type)
11686 && integer_onep (arg1))
11688 tree tem2;
11689 tem = TREE_OPERAND (arg0, 0);
11690 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11691 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11692 tem, tem2);
11693 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11694 build_zero_cst (TREE_TYPE (tem)));
11696 /* Fold !X & 1 as X == 0. */
11697 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11698 && integer_onep (arg1))
11700 tem = TREE_OPERAND (arg0, 0);
11701 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11702 build_zero_cst (TREE_TYPE (tem)));
11705 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11706 multiple of 1 << CST. */
11707 if (TREE_CODE (arg1) == INTEGER_CST)
11709 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11710 wide_int ncst1 = -cst1;
11711 if ((cst1 & ncst1) == ncst1
11712 && multiple_of_p (type, arg0,
11713 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11714 return fold_convert_loc (loc, type, arg0);
11717 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11718 bits from CST2. */
11719 if (TREE_CODE (arg1) == INTEGER_CST
11720 && TREE_CODE (arg0) == MULT_EXPR
11721 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11723 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11724 wide_int masked
11725 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11727 if (masked == 0)
11728 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11729 arg0, arg1);
11730 else if (masked != warg1)
11732 /* Avoid the transform if arg1 is a mask of some
11733 mode which allows further optimizations. */
11734 int pop = wi::popcount (warg1);
11735 if (!(pop >= BITS_PER_UNIT
11736 && pow2p_hwi (pop)
11737 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11738 return fold_build2_loc (loc, code, type, op0,
11739 wide_int_to_tree (type, masked));
11743 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11744 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11745 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11747 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11749 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11750 if (mask == -1)
11751 return
11752 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11755 goto associate;
11757 case RDIV_EXPR:
11758 /* Don't touch a floating-point divide by zero unless the mode
11759 of the constant can represent infinity. */
11760 if (TREE_CODE (arg1) == REAL_CST
11761 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11762 && real_zerop (arg1))
11763 return NULL_TREE;
11765 /* (-A) / (-B) -> A / B */
11766 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11767 return fold_build2_loc (loc, RDIV_EXPR, type,
11768 TREE_OPERAND (arg0, 0),
11769 negate_expr (arg1));
11770 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11771 return fold_build2_loc (loc, RDIV_EXPR, type,
11772 negate_expr (arg0),
11773 TREE_OPERAND (arg1, 0));
11774 return NULL_TREE;
11776 case TRUNC_DIV_EXPR:
11777 /* Fall through */
11779 case FLOOR_DIV_EXPR:
11780 /* Simplify A / (B << N) where A and B are positive and B is
11781 a power of 2, to A >> (N + log2(B)). */
11782 strict_overflow_p = false;
11783 if (TREE_CODE (arg1) == LSHIFT_EXPR
11784 && (TYPE_UNSIGNED (type)
11785 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11787 tree sval = TREE_OPERAND (arg1, 0);
11788 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11790 tree sh_cnt = TREE_OPERAND (arg1, 1);
11791 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11792 wi::exact_log2 (wi::to_wide (sval)));
11794 if (strict_overflow_p)
11795 fold_overflow_warning (("assuming signed overflow does not "
11796 "occur when simplifying A / (B << N)"),
11797 WARN_STRICT_OVERFLOW_MISC);
11799 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11800 sh_cnt, pow2);
11801 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11802 fold_convert_loc (loc, type, arg0), sh_cnt);
11806 /* Fall through */
11808 case ROUND_DIV_EXPR:
11809 case CEIL_DIV_EXPR:
11810 case EXACT_DIV_EXPR:
11811 if (integer_zerop (arg1))
11812 return NULL_TREE;
11814 /* Convert -A / -B to A / B when the type is signed and overflow is
11815 undefined. */
11816 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11817 && TREE_CODE (op0) == NEGATE_EXPR
11818 && negate_expr_p (op1))
11820 if (ANY_INTEGRAL_TYPE_P (type))
11821 fold_overflow_warning (("assuming signed overflow does not occur "
11822 "when distributing negation across "
11823 "division"),
11824 WARN_STRICT_OVERFLOW_MISC);
11825 return fold_build2_loc (loc, code, type,
11826 fold_convert_loc (loc, type,
11827 TREE_OPERAND (arg0, 0)),
11828 negate_expr (op1));
11830 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11831 && TREE_CODE (arg1) == NEGATE_EXPR
11832 && negate_expr_p (op0))
11834 if (ANY_INTEGRAL_TYPE_P (type))
11835 fold_overflow_warning (("assuming signed overflow does not occur "
11836 "when distributing negation across "
11837 "division"),
11838 WARN_STRICT_OVERFLOW_MISC);
11839 return fold_build2_loc (loc, code, type,
11840 negate_expr (op0),
11841 fold_convert_loc (loc, type,
11842 TREE_OPERAND (arg1, 0)));
11845 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11846 operation, EXACT_DIV_EXPR.
11848 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11849 At one time others generated faster code, it's not clear if they do
11850 after the last round to changes to the DIV code in expmed.c. */
11851 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11852 && multiple_of_p (type, arg0, arg1))
11853 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
11854 fold_convert (type, arg0),
11855 fold_convert (type, arg1));
11857 strict_overflow_p = false;
11858 if (TREE_CODE (arg1) == INTEGER_CST
11859 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11860 &strict_overflow_p)) != 0)
11862 if (strict_overflow_p)
11863 fold_overflow_warning (("assuming signed overflow does not occur "
11864 "when simplifying division"),
11865 WARN_STRICT_OVERFLOW_MISC);
11866 return fold_convert_loc (loc, type, tem);
11869 return NULL_TREE;
11871 case CEIL_MOD_EXPR:
11872 case FLOOR_MOD_EXPR:
11873 case ROUND_MOD_EXPR:
11874 case TRUNC_MOD_EXPR:
11875 strict_overflow_p = false;
11876 if (TREE_CODE (arg1) == INTEGER_CST
11877 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11878 &strict_overflow_p)) != 0)
11880 if (strict_overflow_p)
11881 fold_overflow_warning (("assuming signed overflow does not occur "
11882 "when simplifying modulus"),
11883 WARN_STRICT_OVERFLOW_MISC);
11884 return fold_convert_loc (loc, type, tem);
11887 return NULL_TREE;
11889 case LROTATE_EXPR:
11890 case RROTATE_EXPR:
11891 case RSHIFT_EXPR:
11892 case LSHIFT_EXPR:
11893 /* Since negative shift count is not well-defined,
11894 don't try to compute it in the compiler. */
11895 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11896 return NULL_TREE;
11898 prec = element_precision (type);
11900 /* If we have a rotate of a bit operation with the rotate count and
11901 the second operand of the bit operation both constant,
11902 permute the two operations. */
11903 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11904 && (TREE_CODE (arg0) == BIT_AND_EXPR
11905 || TREE_CODE (arg0) == BIT_IOR_EXPR
11906 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11907 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11909 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11910 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11911 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11912 fold_build2_loc (loc, code, type,
11913 arg00, arg1),
11914 fold_build2_loc (loc, code, type,
11915 arg01, arg1));
11918 /* Two consecutive rotates adding up to the some integer
11919 multiple of the precision of the type can be ignored. */
11920 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11921 && TREE_CODE (arg0) == RROTATE_EXPR
11922 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11923 && wi::umod_trunc (wi::to_wide (arg1)
11924 + wi::to_wide (TREE_OPERAND (arg0, 1)),
11925 prec) == 0)
11926 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11928 return NULL_TREE;
11930 case MIN_EXPR:
11931 case MAX_EXPR:
11932 goto associate;
11934 case TRUTH_ANDIF_EXPR:
11935 /* Note that the operands of this must be ints
11936 and their values must be 0 or 1.
11937 ("true" is a fixed value perhaps depending on the language.) */
11938 /* If first arg is constant zero, return it. */
11939 if (integer_zerop (arg0))
11940 return fold_convert_loc (loc, type, arg0);
11941 /* FALLTHRU */
11942 case TRUTH_AND_EXPR:
11943 /* If either arg is constant true, drop it. */
11944 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11945 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11946 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11947 /* Preserve sequence points. */
11948 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11949 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11950 /* If second arg is constant zero, result is zero, but first arg
11951 must be evaluated. */
11952 if (integer_zerop (arg1))
11953 return omit_one_operand_loc (loc, type, arg1, arg0);
11954 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11955 case will be handled here. */
11956 if (integer_zerop (arg0))
11957 return omit_one_operand_loc (loc, type, arg0, arg1);
11959 /* !X && X is always false. */
11960 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11961 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11962 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11963 /* X && !X is always false. */
11964 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11965 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11966 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11968 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11969 means A >= Y && A != MAX, but in this case we know that
11970 A < X <= MAX. */
11972 if (!TREE_SIDE_EFFECTS (arg0)
11973 && !TREE_SIDE_EFFECTS (arg1))
11975 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11976 if (tem && !operand_equal_p (tem, arg0, 0))
11977 return fold_build2_loc (loc, code, type, tem, arg1);
11979 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11980 if (tem && !operand_equal_p (tem, arg1, 0))
11981 return fold_build2_loc (loc, code, type, arg0, tem);
11984 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11985 != NULL_TREE)
11986 return tem;
11988 return NULL_TREE;
11990 case TRUTH_ORIF_EXPR:
11991 /* Note that the operands of this must be ints
11992 and their values must be 0 or true.
11993 ("true" is a fixed value perhaps depending on the language.) */
11994 /* If first arg is constant true, return it. */
11995 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11996 return fold_convert_loc (loc, type, arg0);
11997 /* FALLTHRU */
11998 case TRUTH_OR_EXPR:
11999 /* If either arg is constant zero, drop it. */
12000 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12001 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12002 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12003 /* Preserve sequence points. */
12004 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12005 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12006 /* If second arg is constant true, result is true, but we must
12007 evaluate first arg. */
12008 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12009 return omit_one_operand_loc (loc, type, arg1, arg0);
12010 /* Likewise for first arg, but note this only occurs here for
12011 TRUTH_OR_EXPR. */
12012 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12013 return omit_one_operand_loc (loc, type, arg0, arg1);
12015 /* !X || X is always true. */
12016 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12017 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12018 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12019 /* X || !X is always true. */
12020 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12021 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12022 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12024 /* (X && !Y) || (!X && Y) is X ^ Y */
12025 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12026 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12028 tree a0, a1, l0, l1, n0, n1;
12030 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12031 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12033 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12034 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12036 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12037 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12039 if ((operand_equal_p (n0, a0, 0)
12040 && operand_equal_p (n1, a1, 0))
12041 || (operand_equal_p (n0, a1, 0)
12042 && operand_equal_p (n1, a0, 0)))
12043 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12046 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12047 != NULL_TREE)
12048 return tem;
12050 return NULL_TREE;
12052 case TRUTH_XOR_EXPR:
12053 /* If the second arg is constant zero, drop it. */
12054 if (integer_zerop (arg1))
12055 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12056 /* If the second arg is constant true, this is a logical inversion. */
12057 if (integer_onep (arg1))
12059 tem = invert_truthvalue_loc (loc, arg0);
12060 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12062 /* Identical arguments cancel to zero. */
12063 if (operand_equal_p (arg0, arg1, 0))
12064 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12066 /* !X ^ X is always true. */
12067 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12068 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12069 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12071 /* X ^ !X is always true. */
12072 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12073 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12074 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12076 return NULL_TREE;
12078 case EQ_EXPR:
12079 case NE_EXPR:
12080 STRIP_NOPS (arg0);
12081 STRIP_NOPS (arg1);
12083 tem = fold_comparison (loc, code, type, op0, op1);
12084 if (tem != NULL_TREE)
12085 return tem;
12087 /* bool_var != 1 becomes !bool_var. */
12088 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12089 && code == NE_EXPR)
12090 return fold_convert_loc (loc, type,
12091 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12092 TREE_TYPE (arg0), arg0));
12094 /* bool_var == 0 becomes !bool_var. */
12095 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12096 && code == EQ_EXPR)
12097 return fold_convert_loc (loc, type,
12098 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12099 TREE_TYPE (arg0), arg0));
12101 /* !exp != 0 becomes !exp */
12102 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12103 && code == NE_EXPR)
12104 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12106 /* If this is an EQ or NE comparison with zero and ARG0 is
12107 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12108 two operations, but the latter can be done in one less insn
12109 on machines that have only two-operand insns or on which a
12110 constant cannot be the first operand. */
12111 if (TREE_CODE (arg0) == BIT_AND_EXPR
12112 && integer_zerop (arg1))
12114 tree arg00 = TREE_OPERAND (arg0, 0);
12115 tree arg01 = TREE_OPERAND (arg0, 1);
12116 if (TREE_CODE (arg00) == LSHIFT_EXPR
12117 && integer_onep (TREE_OPERAND (arg00, 0)))
12119 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12120 arg01, TREE_OPERAND (arg00, 1));
12121 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12122 build_one_cst (TREE_TYPE (arg0)));
12123 return fold_build2_loc (loc, code, type,
12124 fold_convert_loc (loc, TREE_TYPE (arg1),
12125 tem), arg1);
12127 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12128 && integer_onep (TREE_OPERAND (arg01, 0)))
12130 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12131 arg00, TREE_OPERAND (arg01, 1));
12132 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12133 build_one_cst (TREE_TYPE (arg0)));
12134 return fold_build2_loc (loc, code, type,
12135 fold_convert_loc (loc, TREE_TYPE (arg1),
12136 tem), arg1);
12140 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12141 C1 is a valid shift constant, and C2 is a power of two, i.e.
12142 a single bit. */
12143 if (TREE_CODE (arg0) == BIT_AND_EXPR
12144 && integer_pow2p (TREE_OPERAND (arg0, 1))
12145 && integer_zerop (arg1))
12147 tree arg00 = TREE_OPERAND (arg0, 0);
12148 STRIP_NOPS (arg00);
12149 if (TREE_CODE (arg00) == RSHIFT_EXPR
12150 && TREE_CODE (TREE_OPERAND (arg00, 1)) == INTEGER_CST)
12152 tree itype = TREE_TYPE (arg00);
12153 tree arg001 = TREE_OPERAND (arg00, 1);
12154 prec = TYPE_PRECISION (itype);
12156 /* Check for a valid shift count. */
12157 if (wi::ltu_p (wi::to_wide (arg001), prec))
12159 tree arg01 = TREE_OPERAND (arg0, 1);
12160 tree arg000 = TREE_OPERAND (arg00, 0);
12161 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12162 /* If (C2 << C1) doesn't overflow, then
12163 ((X >> C1) & C2) != 0 can be rewritten as
12164 (X & (C2 << C1)) != 0. */
12165 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12167 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype,
12168 arg01, arg001);
12169 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype,
12170 arg000, tem);
12171 return fold_build2_loc (loc, code, type, tem,
12172 fold_convert_loc (loc, itype, arg1));
12174 /* Otherwise, for signed (arithmetic) shifts,
12175 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12176 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12177 else if (!TYPE_UNSIGNED (itype))
12178 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR
12179 : LT_EXPR,
12180 type, arg000,
12181 build_int_cst (itype, 0));
12182 /* Otherwise, of unsigned (logical) shifts,
12183 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12184 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12185 else
12186 return omit_one_operand_loc (loc, type,
12187 code == EQ_EXPR ? integer_one_node
12188 : integer_zero_node,
12189 arg000);
12194 /* If this is a comparison of a field, we may be able to simplify it. */
12195 if ((TREE_CODE (arg0) == COMPONENT_REF
12196 || TREE_CODE (arg0) == BIT_FIELD_REF)
12197 /* Handle the constant case even without -O
12198 to make sure the warnings are given. */
12199 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12201 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12202 if (t1)
12203 return t1;
12206 /* Optimize comparisons of strlen vs zero to a compare of the
12207 first character of the string vs zero. To wit,
12208 strlen(ptr) == 0 => *ptr == 0
12209 strlen(ptr) != 0 => *ptr != 0
12210 Other cases should reduce to one of these two (or a constant)
12211 due to the return value of strlen being unsigned. */
12212 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
12214 tree fndecl = get_callee_fndecl (arg0);
12216 if (fndecl
12217 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
12218 && call_expr_nargs (arg0) == 1
12219 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
12220 == POINTER_TYPE))
12222 tree ptrtype
12223 = build_pointer_type (build_qualified_type (char_type_node,
12224 TYPE_QUAL_CONST));
12225 tree ptr = fold_convert_loc (loc, ptrtype,
12226 CALL_EXPR_ARG (arg0, 0));
12227 tree iref = build_fold_indirect_ref_loc (loc, ptr);
12228 return fold_build2_loc (loc, code, type, iref,
12229 build_int_cst (TREE_TYPE (iref), 0));
12233 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12234 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12235 if (TREE_CODE (arg0) == RSHIFT_EXPR
12236 && integer_zerop (arg1)
12237 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12239 tree arg00 = TREE_OPERAND (arg0, 0);
12240 tree arg01 = TREE_OPERAND (arg0, 1);
12241 tree itype = TREE_TYPE (arg00);
12242 if (wi::to_wide (arg01) == element_precision (itype) - 1)
12244 if (TYPE_UNSIGNED (itype))
12246 itype = signed_type_for (itype);
12247 arg00 = fold_convert_loc (loc, itype, arg00);
12249 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12250 type, arg00, build_zero_cst (itype));
12254 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12255 (X & C) == 0 when C is a single bit. */
12256 if (TREE_CODE (arg0) == BIT_AND_EXPR
12257 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12258 && integer_zerop (arg1)
12259 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12261 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12262 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12263 TREE_OPERAND (arg0, 1));
12264 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12265 type, tem,
12266 fold_convert_loc (loc, TREE_TYPE (arg0),
12267 arg1));
12270 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12271 constant C is a power of two, i.e. a single bit. */
12272 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12273 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12274 && integer_zerop (arg1)
12275 && integer_pow2p (TREE_OPERAND (arg0, 1))
12276 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12277 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12279 tree arg00 = TREE_OPERAND (arg0, 0);
12280 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12281 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12284 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12285 when is C is a power of two, i.e. a single bit. */
12286 if (TREE_CODE (arg0) == BIT_AND_EXPR
12287 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12288 && integer_zerop (arg1)
12289 && integer_pow2p (TREE_OPERAND (arg0, 1))
12290 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12291 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12293 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12294 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12295 arg000, TREE_OPERAND (arg0, 1));
12296 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12297 tem, build_int_cst (TREE_TYPE (tem), 0));
12300 if (integer_zerop (arg1)
12301 && tree_expr_nonzero_p (arg0))
12303 tree res = constant_boolean_node (code==NE_EXPR, type);
12304 return omit_one_operand_loc (loc, type, res, arg0);
12307 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12308 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12310 tree arg00 = TREE_OPERAND (arg0, 0);
12311 tree arg01 = TREE_OPERAND (arg0, 1);
12312 tree arg10 = TREE_OPERAND (arg1, 0);
12313 tree arg11 = TREE_OPERAND (arg1, 1);
12314 tree itype = TREE_TYPE (arg0);
12316 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12317 operand_equal_p guarantees no side-effects so we don't need
12318 to use omit_one_operand on Z. */
12319 if (operand_equal_p (arg01, arg11, 0))
12320 return fold_build2_loc (loc, code, type, arg00,
12321 fold_convert_loc (loc, TREE_TYPE (arg00),
12322 arg10));
12323 if (operand_equal_p (arg01, arg10, 0))
12324 return fold_build2_loc (loc, code, type, arg00,
12325 fold_convert_loc (loc, TREE_TYPE (arg00),
12326 arg11));
12327 if (operand_equal_p (arg00, arg11, 0))
12328 return fold_build2_loc (loc, code, type, arg01,
12329 fold_convert_loc (loc, TREE_TYPE (arg01),
12330 arg10));
12331 if (operand_equal_p (arg00, arg10, 0))
12332 return fold_build2_loc (loc, code, type, arg01,
12333 fold_convert_loc (loc, TREE_TYPE (arg01),
12334 arg11));
12336 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12337 if (TREE_CODE (arg01) == INTEGER_CST
12338 && TREE_CODE (arg11) == INTEGER_CST)
12340 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12341 fold_convert_loc (loc, itype, arg11));
12342 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12343 return fold_build2_loc (loc, code, type, tem,
12344 fold_convert_loc (loc, itype, arg10));
12348 /* Attempt to simplify equality/inequality comparisons of complex
12349 values. Only lower the comparison if the result is known or
12350 can be simplified to a single scalar comparison. */
12351 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12352 || TREE_CODE (arg0) == COMPLEX_CST)
12353 && (TREE_CODE (arg1) == COMPLEX_EXPR
12354 || TREE_CODE (arg1) == COMPLEX_CST))
12356 tree real0, imag0, real1, imag1;
12357 tree rcond, icond;
12359 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12361 real0 = TREE_OPERAND (arg0, 0);
12362 imag0 = TREE_OPERAND (arg0, 1);
12364 else
12366 real0 = TREE_REALPART (arg0);
12367 imag0 = TREE_IMAGPART (arg0);
12370 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12372 real1 = TREE_OPERAND (arg1, 0);
12373 imag1 = TREE_OPERAND (arg1, 1);
12375 else
12377 real1 = TREE_REALPART (arg1);
12378 imag1 = TREE_IMAGPART (arg1);
12381 rcond = fold_binary_loc (loc, code, type, real0, real1);
12382 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12384 if (integer_zerop (rcond))
12386 if (code == EQ_EXPR)
12387 return omit_two_operands_loc (loc, type, boolean_false_node,
12388 imag0, imag1);
12389 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12391 else
12393 if (code == NE_EXPR)
12394 return omit_two_operands_loc (loc, type, boolean_true_node,
12395 imag0, imag1);
12396 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12400 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12401 if (icond && TREE_CODE (icond) == INTEGER_CST)
12403 if (integer_zerop (icond))
12405 if (code == EQ_EXPR)
12406 return omit_two_operands_loc (loc, type, boolean_false_node,
12407 real0, real1);
12408 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12410 else
12412 if (code == NE_EXPR)
12413 return omit_two_operands_loc (loc, type, boolean_true_node,
12414 real0, real1);
12415 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12420 return NULL_TREE;
12422 case LT_EXPR:
12423 case GT_EXPR:
12424 case LE_EXPR:
12425 case GE_EXPR:
12426 tem = fold_comparison (loc, code, type, op0, op1);
12427 if (tem != NULL_TREE)
12428 return tem;
12430 /* Transform comparisons of the form X +- C CMP X. */
12431 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12432 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12433 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12434 && !HONOR_SNANS (arg0))
12436 tree arg01 = TREE_OPERAND (arg0, 1);
12437 enum tree_code code0 = TREE_CODE (arg0);
12438 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12440 /* (X - c) > X becomes false. */
12441 if (code == GT_EXPR
12442 && ((code0 == MINUS_EXPR && is_positive >= 0)
12443 || (code0 == PLUS_EXPR && is_positive <= 0)))
12444 return constant_boolean_node (0, type);
12446 /* Likewise (X + c) < X becomes false. */
12447 if (code == LT_EXPR
12448 && ((code0 == PLUS_EXPR && is_positive >= 0)
12449 || (code0 == MINUS_EXPR && is_positive <= 0)))
12450 return constant_boolean_node (0, type);
12452 /* Convert (X - c) <= X to true. */
12453 if (!HONOR_NANS (arg1)
12454 && code == LE_EXPR
12455 && ((code0 == MINUS_EXPR && is_positive >= 0)
12456 || (code0 == PLUS_EXPR && is_positive <= 0)))
12457 return constant_boolean_node (1, type);
12459 /* Convert (X + c) >= X to true. */
12460 if (!HONOR_NANS (arg1)
12461 && code == GE_EXPR
12462 && ((code0 == PLUS_EXPR && is_positive >= 0)
12463 || (code0 == MINUS_EXPR && is_positive <= 0)))
12464 return constant_boolean_node (1, type);
12467 /* If we are comparing an ABS_EXPR with a constant, we can
12468 convert all the cases into explicit comparisons, but they may
12469 well not be faster than doing the ABS and one comparison.
12470 But ABS (X) <= C is a range comparison, which becomes a subtraction
12471 and a comparison, and is probably faster. */
12472 if (code == LE_EXPR
12473 && TREE_CODE (arg1) == INTEGER_CST
12474 && TREE_CODE (arg0) == ABS_EXPR
12475 && ! TREE_SIDE_EFFECTS (arg0)
12476 && (tem = negate_expr (arg1)) != 0
12477 && TREE_CODE (tem) == INTEGER_CST
12478 && !TREE_OVERFLOW (tem))
12479 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12480 build2 (GE_EXPR, type,
12481 TREE_OPERAND (arg0, 0), tem),
12482 build2 (LE_EXPR, type,
12483 TREE_OPERAND (arg0, 0), arg1));
12485 /* Convert ABS_EXPR<x> >= 0 to true. */
12486 strict_overflow_p = false;
12487 if (code == GE_EXPR
12488 && (integer_zerop (arg1)
12489 || (! HONOR_NANS (arg0)
12490 && real_zerop (arg1)))
12491 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12493 if (strict_overflow_p)
12494 fold_overflow_warning (("assuming signed overflow does not occur "
12495 "when simplifying comparison of "
12496 "absolute value and zero"),
12497 WARN_STRICT_OVERFLOW_CONDITIONAL);
12498 return omit_one_operand_loc (loc, type,
12499 constant_boolean_node (true, type),
12500 arg0);
12503 /* Convert ABS_EXPR<x> < 0 to false. */
12504 strict_overflow_p = false;
12505 if (code == LT_EXPR
12506 && (integer_zerop (arg1) || real_zerop (arg1))
12507 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12509 if (strict_overflow_p)
12510 fold_overflow_warning (("assuming signed overflow does not occur "
12511 "when simplifying comparison of "
12512 "absolute value and zero"),
12513 WARN_STRICT_OVERFLOW_CONDITIONAL);
12514 return omit_one_operand_loc (loc, type,
12515 constant_boolean_node (false, type),
12516 arg0);
12519 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12520 and similarly for >= into !=. */
12521 if ((code == LT_EXPR || code == GE_EXPR)
12522 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12523 && TREE_CODE (arg1) == LSHIFT_EXPR
12524 && integer_onep (TREE_OPERAND (arg1, 0)))
12525 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12526 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12527 TREE_OPERAND (arg1, 1)),
12528 build_zero_cst (TREE_TYPE (arg0)));
12530 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12531 otherwise Y might be >= # of bits in X's type and thus e.g.
12532 (unsigned char) (1 << Y) for Y 15 might be 0.
12533 If the cast is widening, then 1 << Y should have unsigned type,
12534 otherwise if Y is number of bits in the signed shift type minus 1,
12535 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12536 31 might be 0xffffffff80000000. */
12537 if ((code == LT_EXPR || code == GE_EXPR)
12538 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12539 || VECTOR_INTEGER_TYPE_P (TREE_TYPE (arg0)))
12540 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12541 && CONVERT_EXPR_P (arg1)
12542 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12543 && (element_precision (TREE_TYPE (arg1))
12544 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12545 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12546 || (element_precision (TREE_TYPE (arg1))
12547 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12548 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12550 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12551 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12552 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12553 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12554 build_zero_cst (TREE_TYPE (arg0)));
12557 return NULL_TREE;
12559 case UNORDERED_EXPR:
12560 case ORDERED_EXPR:
12561 case UNLT_EXPR:
12562 case UNLE_EXPR:
12563 case UNGT_EXPR:
12564 case UNGE_EXPR:
12565 case UNEQ_EXPR:
12566 case LTGT_EXPR:
12567 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12569 tree targ0 = strip_float_extensions (arg0);
12570 tree targ1 = strip_float_extensions (arg1);
12571 tree newtype = TREE_TYPE (targ0);
12573 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12574 newtype = TREE_TYPE (targ1);
12576 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12577 return fold_build2_loc (loc, code, type,
12578 fold_convert_loc (loc, newtype, targ0),
12579 fold_convert_loc (loc, newtype, targ1));
12582 return NULL_TREE;
12584 case COMPOUND_EXPR:
12585 /* When pedantic, a compound expression can be neither an lvalue
12586 nor an integer constant expression. */
12587 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12588 return NULL_TREE;
12589 /* Don't let (0, 0) be null pointer constant. */
12590 tem = integer_zerop (arg1) ? build1_loc (loc, NOP_EXPR, type, arg1)
12591 : fold_convert_loc (loc, type, arg1);
12592 return tem;
12594 case ASSERT_EXPR:
12595 /* An ASSERT_EXPR should never be passed to fold_binary. */
12596 gcc_unreachable ();
12598 default:
12599 return NULL_TREE;
12600 } /* switch (code) */
12603 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12604 ((A & N) + B) & M -> (A + B) & M
12605 Similarly if (N & M) == 0,
12606 ((A | N) + B) & M -> (A + B) & M
12607 and for - instead of + (or unary - instead of +)
12608 and/or ^ instead of |.
12609 If B is constant and (B & M) == 0, fold into A & M.
12611 This function is a helper for match.pd patterns. Return non-NULL
12612 type in which the simplified operation should be performed only
12613 if any optimization is possible.
12615 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12616 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12617 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12618 +/-. */
12619 tree
12620 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12621 tree arg00, enum tree_code code00, tree arg000, tree arg001,
12622 tree arg01, enum tree_code code01, tree arg010, tree arg011,
12623 tree *pmop)
12625 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12626 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12627 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12628 if (~cst1 == 0
12629 || (cst1 & (cst1 + 1)) != 0
12630 || !INTEGRAL_TYPE_P (type)
12631 || (!TYPE_OVERFLOW_WRAPS (type)
12632 && TREE_CODE (type) != INTEGER_TYPE)
12633 || (wi::max_value (type) & cst1) != cst1)
12634 return NULL_TREE;
12636 enum tree_code codes[2] = { code00, code01 };
12637 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12638 int which = 0;
12639 wide_int cst0;
12641 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12642 arg1 (M) is == (1LL << cst) - 1.
12643 Store C into PMOP[0] and D into PMOP[1]. */
12644 pmop[0] = arg00;
12645 pmop[1] = arg01;
12646 which = code != NEGATE_EXPR;
12648 for (; which >= 0; which--)
12649 switch (codes[which])
12651 case BIT_AND_EXPR:
12652 case BIT_IOR_EXPR:
12653 case BIT_XOR_EXPR:
12654 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12655 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12656 if (codes[which] == BIT_AND_EXPR)
12658 if (cst0 != cst1)
12659 break;
12661 else if (cst0 != 0)
12662 break;
12663 /* If C or D is of the form (A & N) where
12664 (N & M) == M, or of the form (A | N) or
12665 (A ^ N) where (N & M) == 0, replace it with A. */
12666 pmop[which] = arg0xx[2 * which];
12667 break;
12668 case ERROR_MARK:
12669 if (TREE_CODE (pmop[which]) != INTEGER_CST)
12670 break;
12671 /* If C or D is a N where (N & M) == 0, it can be
12672 omitted (replaced with 0). */
12673 if ((code == PLUS_EXPR
12674 || (code == MINUS_EXPR && which == 0))
12675 && (cst1 & wi::to_wide (pmop[which])) == 0)
12676 pmop[which] = build_int_cst (type, 0);
12677 /* Similarly, with C - N where (-N & M) == 0. */
12678 if (code == MINUS_EXPR
12679 && which == 1
12680 && (cst1 & -wi::to_wide (pmop[which])) == 0)
12681 pmop[which] = build_int_cst (type, 0);
12682 break;
12683 default:
12684 gcc_unreachable ();
12687 /* Only build anything new if we optimized one or both arguments above. */
12688 if (pmop[0] == arg00 && pmop[1] == arg01)
12689 return NULL_TREE;
12691 if (TYPE_OVERFLOW_WRAPS (type))
12692 return type;
12693 else
12694 return unsigned_type_for (type);
12697 /* Used by contains_label_[p1]. */
12699 struct contains_label_data
12701 hash_set<tree> *pset;
12702 bool inside_switch_p;
12705 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12706 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12707 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12709 static tree
12710 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12712 contains_label_data *d = (contains_label_data *) data;
12713 switch (TREE_CODE (*tp))
12715 case LABEL_EXPR:
12716 return *tp;
12718 case CASE_LABEL_EXPR:
12719 if (!d->inside_switch_p)
12720 return *tp;
12721 return NULL_TREE;
12723 case SWITCH_EXPR:
12724 if (!d->inside_switch_p)
12726 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12727 return *tp;
12728 d->inside_switch_p = true;
12729 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12730 return *tp;
12731 d->inside_switch_p = false;
12732 *walk_subtrees = 0;
12734 return NULL_TREE;
12736 case GOTO_EXPR:
12737 *walk_subtrees = 0;
12738 return NULL_TREE;
12740 default:
12741 return NULL_TREE;
12745 /* Return whether the sub-tree ST contains a label which is accessible from
12746 outside the sub-tree. */
12748 static bool
12749 contains_label_p (tree st)
12751 hash_set<tree> pset;
12752 contains_label_data data = { &pset, false };
12753 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12756 /* Fold a ternary expression of code CODE and type TYPE with operands
12757 OP0, OP1, and OP2. Return the folded expression if folding is
12758 successful. Otherwise, return NULL_TREE. */
12760 tree
12761 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12762 tree op0, tree op1, tree op2)
12764 tree tem;
12765 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12766 enum tree_code_class kind = TREE_CODE_CLASS (code);
12768 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12769 && TREE_CODE_LENGTH (code) == 3);
12771 /* If this is a commutative operation, and OP0 is a constant, move it
12772 to OP1 to reduce the number of tests below. */
12773 if (commutative_ternary_tree_code (code)
12774 && tree_swap_operands_p (op0, op1))
12775 return fold_build3_loc (loc, code, type, op1, op0, op2);
12777 tem = generic_simplify (loc, code, type, op0, op1, op2);
12778 if (tem)
12779 return tem;
12781 /* Strip any conversions that don't change the mode. This is safe
12782 for every expression, except for a comparison expression because
12783 its signedness is derived from its operands. So, in the latter
12784 case, only strip conversions that don't change the signedness.
12786 Note that this is done as an internal manipulation within the
12787 constant folder, in order to find the simplest representation of
12788 the arguments so that their form can be studied. In any cases,
12789 the appropriate type conversions should be put back in the tree
12790 that will get out of the constant folder. */
12791 if (op0)
12793 arg0 = op0;
12794 STRIP_NOPS (arg0);
12797 if (op1)
12799 arg1 = op1;
12800 STRIP_NOPS (arg1);
12803 if (op2)
12805 arg2 = op2;
12806 STRIP_NOPS (arg2);
12809 switch (code)
12811 case COMPONENT_REF:
12812 if (TREE_CODE (arg0) == CONSTRUCTOR
12813 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12815 unsigned HOST_WIDE_INT idx;
12816 tree field, value;
12817 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12818 if (field == arg1)
12819 return value;
12821 return NULL_TREE;
12823 case COND_EXPR:
12824 case VEC_COND_EXPR:
12825 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12826 so all simple results must be passed through pedantic_non_lvalue. */
12827 if (TREE_CODE (arg0) == INTEGER_CST)
12829 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12830 tem = integer_zerop (arg0) ? op2 : op1;
12831 /* Only optimize constant conditions when the selected branch
12832 has the same type as the COND_EXPR. This avoids optimizing
12833 away "c ? x : throw", where the throw has a void type.
12834 Avoid throwing away that operand which contains label. */
12835 if ((!TREE_SIDE_EFFECTS (unused_op)
12836 || !contains_label_p (unused_op))
12837 && (! VOID_TYPE_P (TREE_TYPE (tem))
12838 || VOID_TYPE_P (type)))
12839 return protected_set_expr_location_unshare (tem, loc);
12840 return NULL_TREE;
12842 else if (TREE_CODE (arg0) == VECTOR_CST)
12844 unsigned HOST_WIDE_INT nelts;
12845 if ((TREE_CODE (arg1) == VECTOR_CST
12846 || TREE_CODE (arg1) == CONSTRUCTOR)
12847 && (TREE_CODE (arg2) == VECTOR_CST
12848 || TREE_CODE (arg2) == CONSTRUCTOR)
12849 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12851 vec_perm_builder sel (nelts, nelts, 1);
12852 for (unsigned int i = 0; i < nelts; i++)
12854 tree val = VECTOR_CST_ELT (arg0, i);
12855 if (integer_all_onesp (val))
12856 sel.quick_push (i);
12857 else if (integer_zerop (val))
12858 sel.quick_push (nelts + i);
12859 else /* Currently unreachable. */
12860 return NULL_TREE;
12862 vec_perm_indices indices (sel, 2, nelts);
12863 tree t = fold_vec_perm (type, arg1, arg2, indices);
12864 if (t != NULL_TREE)
12865 return t;
12869 /* If we have A op B ? A : C, we may be able to convert this to a
12870 simpler expression, depending on the operation and the values
12871 of B and C. Signed zeros prevent all of these transformations,
12872 for reasons given above each one.
12874 Also try swapping the arguments and inverting the conditional. */
12875 if (COMPARISON_CLASS_P (arg0)
12876 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
12877 && !HONOR_SIGNED_ZEROS (op1))
12879 tem = fold_cond_expr_with_comparison (loc, type, TREE_CODE (arg0),
12880 TREE_OPERAND (arg0, 0),
12881 TREE_OPERAND (arg0, 1),
12882 op1, op2);
12883 if (tem)
12884 return tem;
12887 if (COMPARISON_CLASS_P (arg0)
12888 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
12889 && !HONOR_SIGNED_ZEROS (op2))
12891 enum tree_code comp_code = TREE_CODE (arg0);
12892 tree arg00 = TREE_OPERAND (arg0, 0);
12893 tree arg01 = TREE_OPERAND (arg0, 1);
12894 comp_code = invert_tree_comparison (comp_code, HONOR_NANS (arg00));
12895 if (comp_code != ERROR_MARK)
12896 tem = fold_cond_expr_with_comparison (loc, type, comp_code,
12897 arg00,
12898 arg01,
12899 op2, op1);
12900 if (tem)
12901 return tem;
12904 /* If the second operand is simpler than the third, swap them
12905 since that produces better jump optimization results. */
12906 if (truth_value_p (TREE_CODE (arg0))
12907 && tree_swap_operands_p (op1, op2))
12909 location_t loc0 = expr_location_or (arg0, loc);
12910 /* See if this can be inverted. If it can't, possibly because
12911 it was a floating-point inequality comparison, don't do
12912 anything. */
12913 tem = fold_invert_truthvalue (loc0, arg0);
12914 if (tem)
12915 return fold_build3_loc (loc, code, type, tem, op2, op1);
12918 /* Convert A ? 1 : 0 to simply A. */
12919 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12920 : (integer_onep (op1)
12921 && !VECTOR_TYPE_P (type)))
12922 && integer_zerop (op2)
12923 /* If we try to convert OP0 to our type, the
12924 call to fold will try to move the conversion inside
12925 a COND, which will recurse. In that case, the COND_EXPR
12926 is probably the best choice, so leave it alone. */
12927 && type == TREE_TYPE (arg0))
12928 return protected_set_expr_location_unshare (arg0, loc);
12930 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12931 over COND_EXPR in cases such as floating point comparisons. */
12932 if (integer_zerop (op1)
12933 && code == COND_EXPR
12934 && integer_onep (op2)
12935 && !VECTOR_TYPE_P (type)
12936 && truth_value_p (TREE_CODE (arg0)))
12937 return fold_convert_loc (loc, type,
12938 invert_truthvalue_loc (loc, arg0));
12940 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12941 if (TREE_CODE (arg0) == LT_EXPR
12942 && integer_zerop (TREE_OPERAND (arg0, 1))
12943 && integer_zerop (op2)
12944 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12946 /* sign_bit_p looks through both zero and sign extensions,
12947 but for this optimization only sign extensions are
12948 usable. */
12949 tree tem2 = TREE_OPERAND (arg0, 0);
12950 while (tem != tem2)
12952 if (TREE_CODE (tem2) != NOP_EXPR
12953 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12955 tem = NULL_TREE;
12956 break;
12958 tem2 = TREE_OPERAND (tem2, 0);
12960 /* sign_bit_p only checks ARG1 bits within A's precision.
12961 If <sign bit of A> has wider type than A, bits outside
12962 of A's precision in <sign bit of A> need to be checked.
12963 If they are all 0, this optimization needs to be done
12964 in unsigned A's type, if they are all 1 in signed A's type,
12965 otherwise this can't be done. */
12966 if (tem
12967 && TYPE_PRECISION (TREE_TYPE (tem))
12968 < TYPE_PRECISION (TREE_TYPE (arg1))
12969 && TYPE_PRECISION (TREE_TYPE (tem))
12970 < TYPE_PRECISION (type))
12972 int inner_width, outer_width;
12973 tree tem_type;
12975 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12976 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12977 if (outer_width > TYPE_PRECISION (type))
12978 outer_width = TYPE_PRECISION (type);
12980 wide_int mask = wi::shifted_mask
12981 (inner_width, outer_width - inner_width, false,
12982 TYPE_PRECISION (TREE_TYPE (arg1)));
12984 wide_int common = mask & wi::to_wide (arg1);
12985 if (common == mask)
12987 tem_type = signed_type_for (TREE_TYPE (tem));
12988 tem = fold_convert_loc (loc, tem_type, tem);
12990 else if (common == 0)
12992 tem_type = unsigned_type_for (TREE_TYPE (tem));
12993 tem = fold_convert_loc (loc, tem_type, tem);
12995 else
12996 tem = NULL;
12999 if (tem)
13000 return
13001 fold_convert_loc (loc, type,
13002 fold_build2_loc (loc, BIT_AND_EXPR,
13003 TREE_TYPE (tem), tem,
13004 fold_convert_loc (loc,
13005 TREE_TYPE (tem),
13006 arg1)));
13009 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13010 already handled above. */
13011 if (TREE_CODE (arg0) == BIT_AND_EXPR
13012 && integer_onep (TREE_OPERAND (arg0, 1))
13013 && integer_zerop (op2)
13014 && integer_pow2p (arg1))
13016 tree tem = TREE_OPERAND (arg0, 0);
13017 STRIP_NOPS (tem);
13018 if (TREE_CODE (tem) == RSHIFT_EXPR
13019 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13020 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
13021 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
13022 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13023 fold_convert_loc (loc, type,
13024 TREE_OPERAND (tem, 0)),
13025 op1);
13028 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13029 is probably obsolete because the first operand should be a
13030 truth value (that's why we have the two cases above), but let's
13031 leave it in until we can confirm this for all front-ends. */
13032 if (integer_zerop (op2)
13033 && TREE_CODE (arg0) == NE_EXPR
13034 && integer_zerop (TREE_OPERAND (arg0, 1))
13035 && integer_pow2p (arg1)
13036 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13037 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13038 arg1, OEP_ONLY_CONST)
13039 /* operand_equal_p compares just value, not precision, so e.g.
13040 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
13041 second operand 32-bit -128, which is not a power of two (or vice
13042 versa. */
13043 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
13044 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
13046 /* Disable the transformations below for vectors, since
13047 fold_binary_op_with_conditional_arg may undo them immediately,
13048 yielding an infinite loop. */
13049 if (code == VEC_COND_EXPR)
13050 return NULL_TREE;
13052 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13053 if (integer_zerop (op2)
13054 && truth_value_p (TREE_CODE (arg0))
13055 && truth_value_p (TREE_CODE (arg1))
13056 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13057 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13058 : TRUTH_ANDIF_EXPR,
13059 type, fold_convert_loc (loc, type, arg0), op1);
13061 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13062 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13063 && truth_value_p (TREE_CODE (arg0))
13064 && truth_value_p (TREE_CODE (arg1))
13065 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13067 location_t loc0 = expr_location_or (arg0, loc);
13068 /* Only perform transformation if ARG0 is easily inverted. */
13069 tem = fold_invert_truthvalue (loc0, arg0);
13070 if (tem)
13071 return fold_build2_loc (loc, code == VEC_COND_EXPR
13072 ? BIT_IOR_EXPR
13073 : TRUTH_ORIF_EXPR,
13074 type, fold_convert_loc (loc, type, tem),
13075 op1);
13078 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13079 if (integer_zerop (arg1)
13080 && truth_value_p (TREE_CODE (arg0))
13081 && truth_value_p (TREE_CODE (op2))
13082 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13084 location_t loc0 = expr_location_or (arg0, loc);
13085 /* Only perform transformation if ARG0 is easily inverted. */
13086 tem = fold_invert_truthvalue (loc0, arg0);
13087 if (tem)
13088 return fold_build2_loc (loc, code == VEC_COND_EXPR
13089 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13090 type, fold_convert_loc (loc, type, tem),
13091 op2);
13094 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13095 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13096 && truth_value_p (TREE_CODE (arg0))
13097 && truth_value_p (TREE_CODE (op2))
13098 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13099 return fold_build2_loc (loc, code == VEC_COND_EXPR
13100 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13101 type, fold_convert_loc (loc, type, arg0), op2);
13103 return NULL_TREE;
13105 case CALL_EXPR:
13106 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13107 of fold_ternary on them. */
13108 gcc_unreachable ();
13110 case BIT_FIELD_REF:
13111 if (TREE_CODE (arg0) == VECTOR_CST
13112 && (type == TREE_TYPE (TREE_TYPE (arg0))
13113 || (VECTOR_TYPE_P (type)
13114 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
13115 && tree_fits_uhwi_p (op1)
13116 && tree_fits_uhwi_p (op2))
13118 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13119 unsigned HOST_WIDE_INT width
13120 = (TREE_CODE (eltype) == BOOLEAN_TYPE
13121 ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
13122 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13123 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13125 if (n != 0
13126 && (idx % width) == 0
13127 && (n % width) == 0
13128 && known_le ((idx + n) / width,
13129 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
13131 idx = idx / width;
13132 n = n / width;
13134 if (TREE_CODE (arg0) == VECTOR_CST)
13136 if (n == 1)
13138 tem = VECTOR_CST_ELT (arg0, idx);
13139 if (VECTOR_TYPE_P (type))
13140 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
13141 return tem;
13144 tree_vector_builder vals (type, n, 1);
13145 for (unsigned i = 0; i < n; ++i)
13146 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
13147 return vals.build ();
13152 /* On constants we can use native encode/interpret to constant
13153 fold (nearly) all BIT_FIELD_REFs. */
13154 if (CONSTANT_CLASS_P (arg0)
13155 && can_native_interpret_type_p (type)
13156 && BITS_PER_UNIT == 8
13157 && tree_fits_uhwi_p (op1)
13158 && tree_fits_uhwi_p (op2))
13160 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13161 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13162 /* Limit us to a reasonable amount of work. To relax the
13163 other limitations we need bit-shifting of the buffer
13164 and rounding up the size. */
13165 if (bitpos % BITS_PER_UNIT == 0
13166 && bitsize % BITS_PER_UNIT == 0
13167 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
13169 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
13170 unsigned HOST_WIDE_INT len
13171 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
13172 bitpos / BITS_PER_UNIT);
13173 if (len > 0
13174 && len * BITS_PER_UNIT >= bitsize)
13176 tree v = native_interpret_expr (type, b,
13177 bitsize / BITS_PER_UNIT);
13178 if (v)
13179 return v;
13184 return NULL_TREE;
13186 case VEC_PERM_EXPR:
13187 /* Perform constant folding of BIT_INSERT_EXPR. */
13188 if (TREE_CODE (arg2) == VECTOR_CST
13189 && TREE_CODE (op0) == VECTOR_CST
13190 && TREE_CODE (op1) == VECTOR_CST)
13192 /* Build a vector of integers from the tree mask. */
13193 vec_perm_builder builder;
13194 if (!tree_to_vec_perm_builder (&builder, arg2))
13195 return NULL_TREE;
13197 /* Create a vec_perm_indices for the integer vector. */
13198 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
13199 bool single_arg = (op0 == op1);
13200 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
13201 return fold_vec_perm (type, op0, op1, sel);
13203 return NULL_TREE;
13205 case BIT_INSERT_EXPR:
13206 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13207 if (TREE_CODE (arg0) == INTEGER_CST
13208 && TREE_CODE (arg1) == INTEGER_CST)
13210 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13211 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
13212 wide_int tem = (wi::to_wide (arg0)
13213 & wi::shifted_mask (bitpos, bitsize, true,
13214 TYPE_PRECISION (type)));
13215 wide_int tem2
13216 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
13217 bitsize), bitpos);
13218 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
13220 else if (TREE_CODE (arg0) == VECTOR_CST
13221 && CONSTANT_CLASS_P (arg1)
13222 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
13223 TREE_TYPE (arg1)))
13225 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13226 unsigned HOST_WIDE_INT elsize
13227 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
13228 if (bitpos % elsize == 0)
13230 unsigned k = bitpos / elsize;
13231 unsigned HOST_WIDE_INT nelts;
13232 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
13233 return arg0;
13234 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
13236 tree_vector_builder elts (type, nelts, 1);
13237 elts.quick_grow (nelts);
13238 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
13239 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
13240 return elts.build ();
13244 return NULL_TREE;
13246 default:
13247 return NULL_TREE;
13248 } /* switch (code) */
13251 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13252 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13253 constructor element index of the value returned. If the element is
13254 not found NULL_TREE is returned and *CTOR_IDX is updated to
13255 the index of the element after the ACCESS_INDEX position (which
13256 may be outside of the CTOR array). */
13258 tree
13259 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
13260 unsigned *ctor_idx)
13262 tree index_type = NULL_TREE;
13263 signop index_sgn = UNSIGNED;
13264 offset_int low_bound = 0;
13266 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
13268 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
13269 if (domain_type && TYPE_MIN_VALUE (domain_type))
13271 /* Static constructors for variably sized objects makes no sense. */
13272 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
13273 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
13274 /* ??? When it is obvious that the range is signed, treat it so. */
13275 if (TYPE_UNSIGNED (index_type)
13276 && TYPE_MAX_VALUE (domain_type)
13277 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
13278 TYPE_MIN_VALUE (domain_type)))
13280 index_sgn = SIGNED;
13281 low_bound
13282 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
13283 SIGNED);
13285 else
13287 index_sgn = TYPE_SIGN (index_type);
13288 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
13293 if (index_type)
13294 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
13295 index_sgn);
13297 offset_int index = low_bound;
13298 if (index_type)
13299 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13301 offset_int max_index = index;
13302 unsigned cnt;
13303 tree cfield, cval;
13304 bool first_p = true;
13306 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
13308 /* Array constructor might explicitly set index, or specify a range,
13309 or leave index NULL meaning that it is next index after previous
13310 one. */
13311 if (cfield)
13313 if (TREE_CODE (cfield) == INTEGER_CST)
13314 max_index = index
13315 = offset_int::from (wi::to_wide (cfield), index_sgn);
13316 else
13318 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
13319 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
13320 index_sgn);
13321 max_index
13322 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
13323 index_sgn);
13324 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
13327 else if (!first_p)
13329 index = max_index + 1;
13330 if (index_type)
13331 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13332 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
13333 max_index = index;
13335 else
13336 first_p = false;
13338 /* Do we have match? */
13339 if (wi::cmp (access_index, index, index_sgn) >= 0)
13341 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
13343 if (ctor_idx)
13344 *ctor_idx = cnt;
13345 return cval;
13348 else if (in_gimple_form)
13349 /* We're past the element we search for. Note during parsing
13350 the elements might not be sorted.
13351 ??? We should use a binary search and a flag on the
13352 CONSTRUCTOR as to whether elements are sorted in declaration
13353 order. */
13354 break;
13356 if (ctor_idx)
13357 *ctor_idx = cnt;
13358 return NULL_TREE;
13361 /* Perform constant folding and related simplification of EXPR.
13362 The related simplifications include x*1 => x, x*0 => 0, etc.,
13363 and application of the associative law.
13364 NOP_EXPR conversions may be removed freely (as long as we
13365 are careful not to change the type of the overall expression).
13366 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13367 but we can constant-fold them if they have constant operands. */
13369 #ifdef ENABLE_FOLD_CHECKING
13370 # define fold(x) fold_1 (x)
13371 static tree fold_1 (tree);
13372 static
13373 #endif
13374 tree
13375 fold (tree expr)
13377 const tree t = expr;
13378 enum tree_code code = TREE_CODE (t);
13379 enum tree_code_class kind = TREE_CODE_CLASS (code);
13380 tree tem;
13381 location_t loc = EXPR_LOCATION (expr);
13383 /* Return right away if a constant. */
13384 if (kind == tcc_constant)
13385 return t;
13387 /* CALL_EXPR-like objects with variable numbers of operands are
13388 treated specially. */
13389 if (kind == tcc_vl_exp)
13391 if (code == CALL_EXPR)
13393 tem = fold_call_expr (loc, expr, false);
13394 return tem ? tem : expr;
13396 return expr;
13399 if (IS_EXPR_CODE_CLASS (kind))
13401 tree type = TREE_TYPE (t);
13402 tree op0, op1, op2;
13404 switch (TREE_CODE_LENGTH (code))
13406 case 1:
13407 op0 = TREE_OPERAND (t, 0);
13408 tem = fold_unary_loc (loc, code, type, op0);
13409 return tem ? tem : expr;
13410 case 2:
13411 op0 = TREE_OPERAND (t, 0);
13412 op1 = TREE_OPERAND (t, 1);
13413 tem = fold_binary_loc (loc, code, type, op0, op1);
13414 return tem ? tem : expr;
13415 case 3:
13416 op0 = TREE_OPERAND (t, 0);
13417 op1 = TREE_OPERAND (t, 1);
13418 op2 = TREE_OPERAND (t, 2);
13419 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13420 return tem ? tem : expr;
13421 default:
13422 break;
13426 switch (code)
13428 case ARRAY_REF:
13430 tree op0 = TREE_OPERAND (t, 0);
13431 tree op1 = TREE_OPERAND (t, 1);
13433 if (TREE_CODE (op1) == INTEGER_CST
13434 && TREE_CODE (op0) == CONSTRUCTOR
13435 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13437 tree val = get_array_ctor_element_at_index (op0,
13438 wi::to_offset (op1));
13439 if (val)
13440 return val;
13443 return t;
13446 /* Return a VECTOR_CST if possible. */
13447 case CONSTRUCTOR:
13449 tree type = TREE_TYPE (t);
13450 if (TREE_CODE (type) != VECTOR_TYPE)
13451 return t;
13453 unsigned i;
13454 tree val;
13455 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
13456 if (! CONSTANT_CLASS_P (val))
13457 return t;
13459 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
13462 case CONST_DECL:
13463 return fold (DECL_INITIAL (t));
13465 default:
13466 return t;
13467 } /* switch (code) */
13470 #ifdef ENABLE_FOLD_CHECKING
13471 #undef fold
13473 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13474 hash_table<nofree_ptr_hash<const tree_node> > *);
13475 static void fold_check_failed (const_tree, const_tree);
13476 void print_fold_checksum (const_tree);
13478 /* When --enable-checking=fold, compute a digest of expr before
13479 and after actual fold call to see if fold did not accidentally
13480 change original expr. */
13482 tree
13483 fold (tree expr)
13485 tree ret;
13486 struct md5_ctx ctx;
13487 unsigned char checksum_before[16], checksum_after[16];
13488 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13490 md5_init_ctx (&ctx);
13491 fold_checksum_tree (expr, &ctx, &ht);
13492 md5_finish_ctx (&ctx, checksum_before);
13493 ht.empty ();
13495 ret = fold_1 (expr);
13497 md5_init_ctx (&ctx);
13498 fold_checksum_tree (expr, &ctx, &ht);
13499 md5_finish_ctx (&ctx, checksum_after);
13501 if (memcmp (checksum_before, checksum_after, 16))
13502 fold_check_failed (expr, ret);
13504 return ret;
13507 void
13508 print_fold_checksum (const_tree expr)
13510 struct md5_ctx ctx;
13511 unsigned char checksum[16], cnt;
13512 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13514 md5_init_ctx (&ctx);
13515 fold_checksum_tree (expr, &ctx, &ht);
13516 md5_finish_ctx (&ctx, checksum);
13517 for (cnt = 0; cnt < 16; ++cnt)
13518 fprintf (stderr, "%02x", checksum[cnt]);
13519 putc ('\n', stderr);
13522 static void
13523 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13525 internal_error ("fold check: original tree changed by fold");
13528 static void
13529 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13530 hash_table<nofree_ptr_hash <const tree_node> > *ht)
13532 const tree_node **slot;
13533 enum tree_code code;
13534 union tree_node *buf;
13535 int i, len;
13537 recursive_label:
13538 if (expr == NULL)
13539 return;
13540 slot = ht->find_slot (expr, INSERT);
13541 if (*slot != NULL)
13542 return;
13543 *slot = expr;
13544 code = TREE_CODE (expr);
13545 if (TREE_CODE_CLASS (code) == tcc_declaration
13546 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13548 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13549 size_t sz = tree_size (expr);
13550 buf = XALLOCAVAR (union tree_node, sz);
13551 memcpy ((char *) buf, expr, sz);
13552 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13553 buf->decl_with_vis.symtab_node = NULL;
13554 buf->base.nowarning_flag = 0;
13555 expr = (tree) buf;
13557 else if (TREE_CODE_CLASS (code) == tcc_type
13558 && (TYPE_POINTER_TO (expr)
13559 || TYPE_REFERENCE_TO (expr)
13560 || TYPE_CACHED_VALUES_P (expr)
13561 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13562 || TYPE_NEXT_VARIANT (expr)
13563 || TYPE_ALIAS_SET_KNOWN_P (expr)))
13565 /* Allow these fields to be modified. */
13566 tree tmp;
13567 size_t sz = tree_size (expr);
13568 buf = XALLOCAVAR (union tree_node, sz);
13569 memcpy ((char *) buf, expr, sz);
13570 expr = tmp = (tree) buf;
13571 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13572 TYPE_POINTER_TO (tmp) = NULL;
13573 TYPE_REFERENCE_TO (tmp) = NULL;
13574 TYPE_NEXT_VARIANT (tmp) = NULL;
13575 TYPE_ALIAS_SET (tmp) = -1;
13576 if (TYPE_CACHED_VALUES_P (tmp))
13578 TYPE_CACHED_VALUES_P (tmp) = 0;
13579 TYPE_CACHED_VALUES (tmp) = NULL;
13582 else if (warning_suppressed_p (expr) && (DECL_P (expr) || EXPR_P (expr)))
13584 /* Allow the no-warning bit to be set. Perhaps we shouldn't allow
13585 that and change builtins.c etc. instead - see PR89543. */
13586 size_t sz = tree_size (expr);
13587 buf = XALLOCAVAR (union tree_node, sz);
13588 memcpy ((char *) buf, expr, sz);
13589 buf->base.nowarning_flag = 0;
13590 expr = (tree) buf;
13592 md5_process_bytes (expr, tree_size (expr), ctx);
13593 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13594 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13595 if (TREE_CODE_CLASS (code) != tcc_type
13596 && TREE_CODE_CLASS (code) != tcc_declaration
13597 && code != TREE_LIST
13598 && code != SSA_NAME
13599 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13600 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13601 switch (TREE_CODE_CLASS (code))
13603 case tcc_constant:
13604 switch (code)
13606 case STRING_CST:
13607 md5_process_bytes (TREE_STRING_POINTER (expr),
13608 TREE_STRING_LENGTH (expr), ctx);
13609 break;
13610 case COMPLEX_CST:
13611 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13612 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13613 break;
13614 case VECTOR_CST:
13615 len = vector_cst_encoded_nelts (expr);
13616 for (i = 0; i < len; ++i)
13617 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13618 break;
13619 default:
13620 break;
13622 break;
13623 case tcc_exceptional:
13624 switch (code)
13626 case TREE_LIST:
13627 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13628 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13629 expr = TREE_CHAIN (expr);
13630 goto recursive_label;
13631 break;
13632 case TREE_VEC:
13633 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13634 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13635 break;
13636 default:
13637 break;
13639 break;
13640 case tcc_expression:
13641 case tcc_reference:
13642 case tcc_comparison:
13643 case tcc_unary:
13644 case tcc_binary:
13645 case tcc_statement:
13646 case tcc_vl_exp:
13647 len = TREE_OPERAND_LENGTH (expr);
13648 for (i = 0; i < len; ++i)
13649 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13650 break;
13651 case tcc_declaration:
13652 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13653 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13654 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13656 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13657 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13658 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13659 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13660 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13663 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13665 if (TREE_CODE (expr) == FUNCTION_DECL)
13667 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13668 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13670 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13672 break;
13673 case tcc_type:
13674 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13675 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13676 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13677 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13678 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13679 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13680 if (INTEGRAL_TYPE_P (expr)
13681 || SCALAR_FLOAT_TYPE_P (expr))
13683 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13684 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13686 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13687 if (TREE_CODE (expr) == RECORD_TYPE
13688 || TREE_CODE (expr) == UNION_TYPE
13689 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13690 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13691 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13692 break;
13693 default:
13694 break;
13698 /* Helper function for outputting the checksum of a tree T. When
13699 debugging with gdb, you can "define mynext" to be "next" followed
13700 by "call debug_fold_checksum (op0)", then just trace down till the
13701 outputs differ. */
13703 DEBUG_FUNCTION void
13704 debug_fold_checksum (const_tree t)
13706 int i;
13707 unsigned char checksum[16];
13708 struct md5_ctx ctx;
13709 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13711 md5_init_ctx (&ctx);
13712 fold_checksum_tree (t, &ctx, &ht);
13713 md5_finish_ctx (&ctx, checksum);
13714 ht.empty ();
13716 for (i = 0; i < 16; i++)
13717 fprintf (stderr, "%d ", checksum[i]);
13719 fprintf (stderr, "\n");
13722 #endif
13724 /* Fold a unary tree expression with code CODE of type TYPE with an
13725 operand OP0. LOC is the location of the resulting expression.
13726 Return a folded expression if successful. Otherwise, return a tree
13727 expression with code CODE of type TYPE with an operand OP0. */
13729 tree
13730 fold_build1_loc (location_t loc,
13731 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13733 tree tem;
13734 #ifdef ENABLE_FOLD_CHECKING
13735 unsigned char checksum_before[16], checksum_after[16];
13736 struct md5_ctx ctx;
13737 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13739 md5_init_ctx (&ctx);
13740 fold_checksum_tree (op0, &ctx, &ht);
13741 md5_finish_ctx (&ctx, checksum_before);
13742 ht.empty ();
13743 #endif
13745 tem = fold_unary_loc (loc, code, type, op0);
13746 if (!tem)
13747 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13749 #ifdef ENABLE_FOLD_CHECKING
13750 md5_init_ctx (&ctx);
13751 fold_checksum_tree (op0, &ctx, &ht);
13752 md5_finish_ctx (&ctx, checksum_after);
13754 if (memcmp (checksum_before, checksum_after, 16))
13755 fold_check_failed (op0, tem);
13756 #endif
13757 return tem;
13760 /* Fold a binary tree expression with code CODE of type TYPE with
13761 operands OP0 and OP1. LOC is the location of the resulting
13762 expression. Return a folded expression if successful. Otherwise,
13763 return a tree expression with code CODE of type TYPE with operands
13764 OP0 and OP1. */
13766 tree
13767 fold_build2_loc (location_t loc,
13768 enum tree_code code, tree type, tree op0, tree op1
13769 MEM_STAT_DECL)
13771 tree tem;
13772 #ifdef ENABLE_FOLD_CHECKING
13773 unsigned char checksum_before_op0[16],
13774 checksum_before_op1[16],
13775 checksum_after_op0[16],
13776 checksum_after_op1[16];
13777 struct md5_ctx ctx;
13778 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13780 md5_init_ctx (&ctx);
13781 fold_checksum_tree (op0, &ctx, &ht);
13782 md5_finish_ctx (&ctx, checksum_before_op0);
13783 ht.empty ();
13785 md5_init_ctx (&ctx);
13786 fold_checksum_tree (op1, &ctx, &ht);
13787 md5_finish_ctx (&ctx, checksum_before_op1);
13788 ht.empty ();
13789 #endif
13791 tem = fold_binary_loc (loc, code, type, op0, op1);
13792 if (!tem)
13793 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13795 #ifdef ENABLE_FOLD_CHECKING
13796 md5_init_ctx (&ctx);
13797 fold_checksum_tree (op0, &ctx, &ht);
13798 md5_finish_ctx (&ctx, checksum_after_op0);
13799 ht.empty ();
13801 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13802 fold_check_failed (op0, tem);
13804 md5_init_ctx (&ctx);
13805 fold_checksum_tree (op1, &ctx, &ht);
13806 md5_finish_ctx (&ctx, checksum_after_op1);
13808 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13809 fold_check_failed (op1, tem);
13810 #endif
13811 return tem;
13814 /* Fold a ternary tree expression with code CODE of type TYPE with
13815 operands OP0, OP1, and OP2. Return a folded expression if
13816 successful. Otherwise, return a tree expression with code CODE of
13817 type TYPE with operands OP0, OP1, and OP2. */
13819 tree
13820 fold_build3_loc (location_t loc, enum tree_code code, tree type,
13821 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13823 tree tem;
13824 #ifdef ENABLE_FOLD_CHECKING
13825 unsigned char checksum_before_op0[16],
13826 checksum_before_op1[16],
13827 checksum_before_op2[16],
13828 checksum_after_op0[16],
13829 checksum_after_op1[16],
13830 checksum_after_op2[16];
13831 struct md5_ctx ctx;
13832 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13834 md5_init_ctx (&ctx);
13835 fold_checksum_tree (op0, &ctx, &ht);
13836 md5_finish_ctx (&ctx, checksum_before_op0);
13837 ht.empty ();
13839 md5_init_ctx (&ctx);
13840 fold_checksum_tree (op1, &ctx, &ht);
13841 md5_finish_ctx (&ctx, checksum_before_op1);
13842 ht.empty ();
13844 md5_init_ctx (&ctx);
13845 fold_checksum_tree (op2, &ctx, &ht);
13846 md5_finish_ctx (&ctx, checksum_before_op2);
13847 ht.empty ();
13848 #endif
13850 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13851 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13852 if (!tem)
13853 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13855 #ifdef ENABLE_FOLD_CHECKING
13856 md5_init_ctx (&ctx);
13857 fold_checksum_tree (op0, &ctx, &ht);
13858 md5_finish_ctx (&ctx, checksum_after_op0);
13859 ht.empty ();
13861 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13862 fold_check_failed (op0, tem);
13864 md5_init_ctx (&ctx);
13865 fold_checksum_tree (op1, &ctx, &ht);
13866 md5_finish_ctx (&ctx, checksum_after_op1);
13867 ht.empty ();
13869 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13870 fold_check_failed (op1, tem);
13872 md5_init_ctx (&ctx);
13873 fold_checksum_tree (op2, &ctx, &ht);
13874 md5_finish_ctx (&ctx, checksum_after_op2);
13876 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13877 fold_check_failed (op2, tem);
13878 #endif
13879 return tem;
13882 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13883 arguments in ARGARRAY, and a null static chain.
13884 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13885 of type TYPE from the given operands as constructed by build_call_array. */
13887 tree
13888 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13889 int nargs, tree *argarray)
13891 tree tem;
13892 #ifdef ENABLE_FOLD_CHECKING
13893 unsigned char checksum_before_fn[16],
13894 checksum_before_arglist[16],
13895 checksum_after_fn[16],
13896 checksum_after_arglist[16];
13897 struct md5_ctx ctx;
13898 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13899 int i;
13901 md5_init_ctx (&ctx);
13902 fold_checksum_tree (fn, &ctx, &ht);
13903 md5_finish_ctx (&ctx, checksum_before_fn);
13904 ht.empty ();
13906 md5_init_ctx (&ctx);
13907 for (i = 0; i < nargs; i++)
13908 fold_checksum_tree (argarray[i], &ctx, &ht);
13909 md5_finish_ctx (&ctx, checksum_before_arglist);
13910 ht.empty ();
13911 #endif
13913 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13914 if (!tem)
13915 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13917 #ifdef ENABLE_FOLD_CHECKING
13918 md5_init_ctx (&ctx);
13919 fold_checksum_tree (fn, &ctx, &ht);
13920 md5_finish_ctx (&ctx, checksum_after_fn);
13921 ht.empty ();
13923 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13924 fold_check_failed (fn, tem);
13926 md5_init_ctx (&ctx);
13927 for (i = 0; i < nargs; i++)
13928 fold_checksum_tree (argarray[i], &ctx, &ht);
13929 md5_finish_ctx (&ctx, checksum_after_arglist);
13931 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13932 fold_check_failed (NULL_TREE, tem);
13933 #endif
13934 return tem;
13937 /* Perform constant folding and related simplification of initializer
13938 expression EXPR. These behave identically to "fold_buildN" but ignore
13939 potential run-time traps and exceptions that fold must preserve. */
13941 #define START_FOLD_INIT \
13942 int saved_signaling_nans = flag_signaling_nans;\
13943 int saved_trapping_math = flag_trapping_math;\
13944 int saved_rounding_math = flag_rounding_math;\
13945 int saved_trapv = flag_trapv;\
13946 int saved_folding_initializer = folding_initializer;\
13947 flag_signaling_nans = 0;\
13948 flag_trapping_math = 0;\
13949 flag_rounding_math = 0;\
13950 flag_trapv = 0;\
13951 folding_initializer = 1;
13953 #define END_FOLD_INIT \
13954 flag_signaling_nans = saved_signaling_nans;\
13955 flag_trapping_math = saved_trapping_math;\
13956 flag_rounding_math = saved_rounding_math;\
13957 flag_trapv = saved_trapv;\
13958 folding_initializer = saved_folding_initializer;
13960 tree
13961 fold_init (tree expr)
13963 tree result;
13964 START_FOLD_INIT;
13966 result = fold (expr);
13968 END_FOLD_INIT;
13969 return result;
13972 tree
13973 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13974 tree type, tree op)
13976 tree result;
13977 START_FOLD_INIT;
13979 result = fold_build1_loc (loc, code, type, op);
13981 END_FOLD_INIT;
13982 return result;
13985 tree
13986 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13987 tree type, tree op0, tree op1)
13989 tree result;
13990 START_FOLD_INIT;
13992 result = fold_build2_loc (loc, code, type, op0, op1);
13994 END_FOLD_INIT;
13995 return result;
13998 tree
13999 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14000 int nargs, tree *argarray)
14002 tree result;
14003 START_FOLD_INIT;
14005 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14007 END_FOLD_INIT;
14008 return result;
14011 tree
14012 fold_binary_initializer_loc (location_t loc, tree_code code, tree type,
14013 tree lhs, tree rhs)
14015 tree result;
14016 START_FOLD_INIT;
14018 result = fold_binary_loc (loc, code, type, lhs, rhs);
14020 END_FOLD_INIT;
14021 return result;
14024 #undef START_FOLD_INIT
14025 #undef END_FOLD_INIT
14027 /* Determine if first argument is a multiple of second argument. Return 0 if
14028 it is not, or we cannot easily determined it to be.
14030 An example of the sort of thing we care about (at this point; this routine
14031 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14032 fold cases do now) is discovering that
14034 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14036 is a multiple of
14038 SAVE_EXPR (J * 8)
14040 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14042 This code also handles discovering that
14044 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14046 is a multiple of 8 so we don't have to worry about dealing with a
14047 possible remainder.
14049 Note that we *look* inside a SAVE_EXPR only to determine how it was
14050 calculated; it is not safe for fold to do much of anything else with the
14051 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14052 at run time. For example, the latter example above *cannot* be implemented
14053 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14054 evaluation time of the original SAVE_EXPR is not necessarily the same at
14055 the time the new expression is evaluated. The only optimization of this
14056 sort that would be valid is changing
14058 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14060 divided by 8 to
14062 SAVE_EXPR (I) * SAVE_EXPR (J)
14064 (where the same SAVE_EXPR (J) is used in the original and the
14065 transformed version). */
14068 multiple_of_p (tree type, const_tree top, const_tree bottom)
14070 gimple *stmt;
14071 tree t1, op1, op2;
14073 if (operand_equal_p (top, bottom, 0))
14074 return 1;
14076 if (TREE_CODE (type) != INTEGER_TYPE)
14077 return 0;
14079 switch (TREE_CODE (top))
14081 case BIT_AND_EXPR:
14082 /* Bitwise and provides a power of two multiple. If the mask is
14083 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14084 if (!integer_pow2p (bottom))
14085 return 0;
14086 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14087 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
14089 case MULT_EXPR:
14090 if (TREE_CODE (bottom) == INTEGER_CST)
14092 op1 = TREE_OPERAND (top, 0);
14093 op2 = TREE_OPERAND (top, 1);
14094 if (TREE_CODE (op1) == INTEGER_CST)
14095 std::swap (op1, op2);
14096 if (TREE_CODE (op2) == INTEGER_CST)
14098 if (multiple_of_p (type, op2, bottom))
14099 return 1;
14100 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
14101 if (multiple_of_p (type, bottom, op2))
14103 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
14104 wi::to_widest (op2));
14105 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
14107 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
14108 return multiple_of_p (type, op1, op2);
14111 return multiple_of_p (type, op1, bottom);
14114 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14115 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
14117 case MINUS_EXPR:
14118 /* It is impossible to prove if op0 - op1 is multiple of bottom
14119 precisely, so be conservative here checking if both op0 and op1
14120 are multiple of bottom. Note we check the second operand first
14121 since it's usually simpler. */
14122 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14123 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
14125 case PLUS_EXPR:
14126 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
14127 as op0 - 3 if the expression has unsigned type. For example,
14128 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
14129 op1 = TREE_OPERAND (top, 1);
14130 if (TYPE_UNSIGNED (type)
14131 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
14132 op1 = fold_build1 (NEGATE_EXPR, type, op1);
14133 return (multiple_of_p (type, op1, bottom)
14134 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
14136 case LSHIFT_EXPR:
14137 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14139 op1 = TREE_OPERAND (top, 1);
14140 /* const_binop may not detect overflow correctly,
14141 so check for it explicitly here. */
14142 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
14143 wi::to_wide (op1))
14144 && (t1 = fold_convert (type,
14145 const_binop (LSHIFT_EXPR, size_one_node,
14146 op1))) != 0
14147 && !TREE_OVERFLOW (t1))
14148 return multiple_of_p (type, t1, bottom);
14150 return 0;
14152 case NOP_EXPR:
14153 /* Can't handle conversions from non-integral or wider integral type. */
14154 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14155 || (TYPE_PRECISION (type)
14156 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14157 return 0;
14159 /* fall through */
14161 case SAVE_EXPR:
14162 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14164 case COND_EXPR:
14165 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14166 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14168 case INTEGER_CST:
14169 if (TREE_CODE (bottom) != INTEGER_CST
14170 || integer_zerop (bottom)
14171 || (TYPE_UNSIGNED (type)
14172 && (tree_int_cst_sgn (top) < 0
14173 || tree_int_cst_sgn (bottom) < 0)))
14174 return 0;
14175 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14176 SIGNED);
14178 case SSA_NAME:
14179 if (TREE_CODE (bottom) == INTEGER_CST
14180 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
14181 && gimple_code (stmt) == GIMPLE_ASSIGN)
14183 enum tree_code code = gimple_assign_rhs_code (stmt);
14185 /* Check for special cases to see if top is defined as multiple
14186 of bottom:
14188 top = (X & ~(bottom - 1) ; bottom is power of 2
14192 Y = X % bottom
14193 top = X - Y. */
14194 if (code == BIT_AND_EXPR
14195 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14196 && TREE_CODE (op2) == INTEGER_CST
14197 && integer_pow2p (bottom)
14198 && wi::multiple_of_p (wi::to_widest (op2),
14199 wi::to_widest (bottom), UNSIGNED))
14200 return 1;
14202 op1 = gimple_assign_rhs1 (stmt);
14203 if (code == MINUS_EXPR
14204 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14205 && TREE_CODE (op2) == SSA_NAME
14206 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
14207 && gimple_code (stmt) == GIMPLE_ASSIGN
14208 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
14209 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
14210 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
14211 return 1;
14214 /* fall through */
14216 default:
14217 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
14218 return multiple_p (wi::to_poly_widest (top),
14219 wi::to_poly_widest (bottom));
14221 return 0;
14225 /* Return true if expression X cannot be (or contain) a NaN or infinity.
14226 This function returns true for integer expressions, and returns
14227 false if uncertain. */
14229 bool
14230 tree_expr_finite_p (const_tree x)
14232 machine_mode mode = element_mode (x);
14233 if (!HONOR_NANS (mode) && !HONOR_INFINITIES (mode))
14234 return true;
14235 switch (TREE_CODE (x))
14237 case REAL_CST:
14238 return real_isfinite (TREE_REAL_CST_PTR (x));
14239 case COMPLEX_CST:
14240 return tree_expr_finite_p (TREE_REALPART (x))
14241 && tree_expr_finite_p (TREE_IMAGPART (x));
14242 case FLOAT_EXPR:
14243 return true;
14244 case ABS_EXPR:
14245 case CONVERT_EXPR:
14246 case NON_LVALUE_EXPR:
14247 case NEGATE_EXPR:
14248 case SAVE_EXPR:
14249 return tree_expr_finite_p (TREE_OPERAND (x, 0));
14250 case MIN_EXPR:
14251 case MAX_EXPR:
14252 return tree_expr_finite_p (TREE_OPERAND (x, 0))
14253 && tree_expr_finite_p (TREE_OPERAND (x, 1));
14254 case COND_EXPR:
14255 return tree_expr_finite_p (TREE_OPERAND (x, 1))
14256 && tree_expr_finite_p (TREE_OPERAND (x, 2));
14257 case CALL_EXPR:
14258 switch (get_call_combined_fn (x))
14260 CASE_CFN_FABS:
14261 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0));
14262 CASE_CFN_FMAX:
14263 CASE_CFN_FMIN:
14264 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0))
14265 && tree_expr_finite_p (CALL_EXPR_ARG (x, 1));
14266 default:
14267 return false;
14270 default:
14271 return false;
14275 /* Return true if expression X evaluates to an infinity.
14276 This function returns false for integer expressions. */
14278 bool
14279 tree_expr_infinite_p (const_tree x)
14281 if (!HONOR_INFINITIES (x))
14282 return false;
14283 switch (TREE_CODE (x))
14285 case REAL_CST:
14286 return real_isinf (TREE_REAL_CST_PTR (x));
14287 case ABS_EXPR:
14288 case NEGATE_EXPR:
14289 case NON_LVALUE_EXPR:
14290 case SAVE_EXPR:
14291 return tree_expr_infinite_p (TREE_OPERAND (x, 0));
14292 case COND_EXPR:
14293 return tree_expr_infinite_p (TREE_OPERAND (x, 1))
14294 && tree_expr_infinite_p (TREE_OPERAND (x, 2));
14295 default:
14296 return false;
14300 /* Return true if expression X could evaluate to an infinity.
14301 This function returns false for integer expressions, and returns
14302 true if uncertain. */
14304 bool
14305 tree_expr_maybe_infinite_p (const_tree x)
14307 if (!HONOR_INFINITIES (x))
14308 return false;
14309 switch (TREE_CODE (x))
14311 case REAL_CST:
14312 return real_isinf (TREE_REAL_CST_PTR (x));
14313 case FLOAT_EXPR:
14314 return false;
14315 case ABS_EXPR:
14316 case NEGATE_EXPR:
14317 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 0));
14318 case COND_EXPR:
14319 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 1))
14320 || tree_expr_maybe_infinite_p (TREE_OPERAND (x, 2));
14321 default:
14322 return true;
14326 /* Return true if expression X evaluates to a signaling NaN.
14327 This function returns false for integer expressions. */
14329 bool
14330 tree_expr_signaling_nan_p (const_tree x)
14332 if (!HONOR_SNANS (x))
14333 return false;
14334 switch (TREE_CODE (x))
14336 case REAL_CST:
14337 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14338 case NON_LVALUE_EXPR:
14339 case SAVE_EXPR:
14340 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 0));
14341 case COND_EXPR:
14342 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 1))
14343 && tree_expr_signaling_nan_p (TREE_OPERAND (x, 2));
14344 default:
14345 return false;
14349 /* Return true if expression X could evaluate to a signaling NaN.
14350 This function returns false for integer expressions, and returns
14351 true if uncertain. */
14353 bool
14354 tree_expr_maybe_signaling_nan_p (const_tree x)
14356 if (!HONOR_SNANS (x))
14357 return false;
14358 switch (TREE_CODE (x))
14360 case REAL_CST:
14361 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14362 case FLOAT_EXPR:
14363 return false;
14364 case ABS_EXPR:
14365 case CONVERT_EXPR:
14366 case NEGATE_EXPR:
14367 case NON_LVALUE_EXPR:
14368 case SAVE_EXPR:
14369 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0));
14370 case MIN_EXPR:
14371 case MAX_EXPR:
14372 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0))
14373 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1));
14374 case COND_EXPR:
14375 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1))
14376 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 2));
14377 case CALL_EXPR:
14378 switch (get_call_combined_fn (x))
14380 CASE_CFN_FABS:
14381 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0));
14382 CASE_CFN_FMAX:
14383 CASE_CFN_FMIN:
14384 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0))
14385 || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 1));
14386 default:
14387 return true;
14389 default:
14390 return true;
14394 /* Return true if expression X evaluates to a NaN.
14395 This function returns false for integer expressions. */
14397 bool
14398 tree_expr_nan_p (const_tree x)
14400 if (!HONOR_NANS (x))
14401 return false;
14402 switch (TREE_CODE (x))
14404 case REAL_CST:
14405 return real_isnan (TREE_REAL_CST_PTR (x));
14406 case NON_LVALUE_EXPR:
14407 case SAVE_EXPR:
14408 return tree_expr_nan_p (TREE_OPERAND (x, 0));
14409 case COND_EXPR:
14410 return tree_expr_nan_p (TREE_OPERAND (x, 1))
14411 && tree_expr_nan_p (TREE_OPERAND (x, 2));
14412 default:
14413 return false;
14417 /* Return true if expression X could evaluate to a NaN.
14418 This function returns false for integer expressions, and returns
14419 true if uncertain. */
14421 bool
14422 tree_expr_maybe_nan_p (const_tree x)
14424 if (!HONOR_NANS (x))
14425 return false;
14426 switch (TREE_CODE (x))
14428 case REAL_CST:
14429 return real_isnan (TREE_REAL_CST_PTR (x));
14430 case FLOAT_EXPR:
14431 return false;
14432 case PLUS_EXPR:
14433 case MINUS_EXPR:
14434 case MULT_EXPR:
14435 return !tree_expr_finite_p (TREE_OPERAND (x, 0))
14436 || !tree_expr_finite_p (TREE_OPERAND (x, 1));
14437 case ABS_EXPR:
14438 case CONVERT_EXPR:
14439 case NEGATE_EXPR:
14440 case NON_LVALUE_EXPR:
14441 case SAVE_EXPR:
14442 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0));
14443 case MIN_EXPR:
14444 case MAX_EXPR:
14445 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0))
14446 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 1));
14447 case COND_EXPR:
14448 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 1))
14449 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 2));
14450 case CALL_EXPR:
14451 switch (get_call_combined_fn (x))
14453 CASE_CFN_FABS:
14454 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0));
14455 CASE_CFN_FMAX:
14456 CASE_CFN_FMIN:
14457 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0))
14458 || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 1));
14459 default:
14460 return true;
14462 default:
14463 return true;
14467 /* Return true if expression X could evaluate to -0.0.
14468 This function returns true if uncertain. */
14470 bool
14471 tree_expr_maybe_real_minus_zero_p (const_tree x)
14473 if (!HONOR_SIGNED_ZEROS (x))
14474 return false;
14475 switch (TREE_CODE (x))
14477 case REAL_CST:
14478 return REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (x));
14479 case INTEGER_CST:
14480 case FLOAT_EXPR:
14481 case ABS_EXPR:
14482 return false;
14483 case NON_LVALUE_EXPR:
14484 case SAVE_EXPR:
14485 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 0));
14486 case COND_EXPR:
14487 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 1))
14488 || tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 2));
14489 case CALL_EXPR:
14490 switch (get_call_combined_fn (x))
14492 CASE_CFN_FABS:
14493 return false;
14494 default:
14495 break;
14497 default:
14498 break;
14500 /* Ideally !(tree_expr_nonzero_p (X) || tree_expr_nonnegative_p (X))
14501 * but currently those predicates require tree and not const_tree. */
14502 return true;
14505 #define tree_expr_nonnegative_warnv_p(X, Y) \
14506 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14508 #define RECURSE(X) \
14509 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14511 /* Return true if CODE or TYPE is known to be non-negative. */
14513 static bool
14514 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14516 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14517 && truth_value_p (code))
14518 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14519 have a signed:1 type (where the value is -1 and 0). */
14520 return true;
14521 return false;
14524 /* Return true if (CODE OP0) is known to be non-negative. If the return
14525 value is based on the assumption that signed overflow is undefined,
14526 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14527 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14529 bool
14530 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14531 bool *strict_overflow_p, int depth)
14533 if (TYPE_UNSIGNED (type))
14534 return true;
14536 switch (code)
14538 case ABS_EXPR:
14539 /* We can't return 1 if flag_wrapv is set because
14540 ABS_EXPR<INT_MIN> = INT_MIN. */
14541 if (!ANY_INTEGRAL_TYPE_P (type))
14542 return true;
14543 if (TYPE_OVERFLOW_UNDEFINED (type))
14545 *strict_overflow_p = true;
14546 return true;
14548 break;
14550 case NON_LVALUE_EXPR:
14551 case FLOAT_EXPR:
14552 case FIX_TRUNC_EXPR:
14553 return RECURSE (op0);
14555 CASE_CONVERT:
14557 tree inner_type = TREE_TYPE (op0);
14558 tree outer_type = type;
14560 if (TREE_CODE (outer_type) == REAL_TYPE)
14562 if (TREE_CODE (inner_type) == REAL_TYPE)
14563 return RECURSE (op0);
14564 if (INTEGRAL_TYPE_P (inner_type))
14566 if (TYPE_UNSIGNED (inner_type))
14567 return true;
14568 return RECURSE (op0);
14571 else if (INTEGRAL_TYPE_P (outer_type))
14573 if (TREE_CODE (inner_type) == REAL_TYPE)
14574 return RECURSE (op0);
14575 if (INTEGRAL_TYPE_P (inner_type))
14576 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14577 && TYPE_UNSIGNED (inner_type);
14580 break;
14582 default:
14583 return tree_simple_nonnegative_warnv_p (code, type);
14586 /* We don't know sign of `t', so be conservative and return false. */
14587 return false;
14590 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14591 value is based on the assumption that signed overflow is undefined,
14592 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14593 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14595 bool
14596 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14597 tree op1, bool *strict_overflow_p,
14598 int depth)
14600 if (TYPE_UNSIGNED (type))
14601 return true;
14603 switch (code)
14605 case POINTER_PLUS_EXPR:
14606 case PLUS_EXPR:
14607 if (FLOAT_TYPE_P (type))
14608 return RECURSE (op0) && RECURSE (op1);
14610 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14611 both unsigned and at least 2 bits shorter than the result. */
14612 if (TREE_CODE (type) == INTEGER_TYPE
14613 && TREE_CODE (op0) == NOP_EXPR
14614 && TREE_CODE (op1) == NOP_EXPR)
14616 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14617 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14618 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14619 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14621 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14622 TYPE_PRECISION (inner2)) + 1;
14623 return prec < TYPE_PRECISION (type);
14626 break;
14628 case MULT_EXPR:
14629 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14631 /* x * x is always non-negative for floating point x
14632 or without overflow. */
14633 if (operand_equal_p (op0, op1, 0)
14634 || (RECURSE (op0) && RECURSE (op1)))
14636 if (ANY_INTEGRAL_TYPE_P (type)
14637 && TYPE_OVERFLOW_UNDEFINED (type))
14638 *strict_overflow_p = true;
14639 return true;
14643 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14644 both unsigned and their total bits is shorter than the result. */
14645 if (TREE_CODE (type) == INTEGER_TYPE
14646 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14647 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14649 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14650 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14651 : TREE_TYPE (op0);
14652 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14653 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14654 : TREE_TYPE (op1);
14656 bool unsigned0 = TYPE_UNSIGNED (inner0);
14657 bool unsigned1 = TYPE_UNSIGNED (inner1);
14659 if (TREE_CODE (op0) == INTEGER_CST)
14660 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14662 if (TREE_CODE (op1) == INTEGER_CST)
14663 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14665 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14666 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14668 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14669 ? tree_int_cst_min_precision (op0, UNSIGNED)
14670 : TYPE_PRECISION (inner0);
14672 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14673 ? tree_int_cst_min_precision (op1, UNSIGNED)
14674 : TYPE_PRECISION (inner1);
14676 return precision0 + precision1 < TYPE_PRECISION (type);
14679 return false;
14681 case BIT_AND_EXPR:
14682 return RECURSE (op0) || RECURSE (op1);
14684 case MAX_EXPR:
14685 /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
14686 things. */
14687 if (tree_expr_maybe_nan_p (op0) || tree_expr_maybe_nan_p (op1))
14688 return RECURSE (op0) && RECURSE (op1);
14689 return RECURSE (op0) || RECURSE (op1);
14691 case BIT_IOR_EXPR:
14692 case BIT_XOR_EXPR:
14693 case MIN_EXPR:
14694 case RDIV_EXPR:
14695 case TRUNC_DIV_EXPR:
14696 case CEIL_DIV_EXPR:
14697 case FLOOR_DIV_EXPR:
14698 case ROUND_DIV_EXPR:
14699 return RECURSE (op0) && RECURSE (op1);
14701 case TRUNC_MOD_EXPR:
14702 return RECURSE (op0);
14704 case FLOOR_MOD_EXPR:
14705 return RECURSE (op1);
14707 case CEIL_MOD_EXPR:
14708 case ROUND_MOD_EXPR:
14709 default:
14710 return tree_simple_nonnegative_warnv_p (code, type);
14713 /* We don't know sign of `t', so be conservative and return false. */
14714 return false;
14717 /* Return true if T is known to be non-negative. If the return
14718 value is based on the assumption that signed overflow is undefined,
14719 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14720 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14722 bool
14723 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14725 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14726 return true;
14728 switch (TREE_CODE (t))
14730 case INTEGER_CST:
14731 return tree_int_cst_sgn (t) >= 0;
14733 case REAL_CST:
14734 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14736 case FIXED_CST:
14737 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14739 case COND_EXPR:
14740 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14742 case SSA_NAME:
14743 /* Limit the depth of recursion to avoid quadratic behavior.
14744 This is expected to catch almost all occurrences in practice.
14745 If this code misses important cases that unbounded recursion
14746 would not, passes that need this information could be revised
14747 to provide it through dataflow propagation. */
14748 return (!name_registered_for_update_p (t)
14749 && depth < param_max_ssa_name_query_depth
14750 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
14751 strict_overflow_p, depth));
14753 default:
14754 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14758 /* Return true if T is known to be non-negative. If the return
14759 value is based on the assumption that signed overflow is undefined,
14760 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14761 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14763 bool
14764 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
14765 bool *strict_overflow_p, int depth)
14767 switch (fn)
14769 CASE_CFN_ACOS:
14770 CASE_CFN_ACOSH:
14771 CASE_CFN_CABS:
14772 CASE_CFN_COSH:
14773 CASE_CFN_ERFC:
14774 CASE_CFN_EXP:
14775 CASE_CFN_EXP10:
14776 CASE_CFN_EXP2:
14777 CASE_CFN_FABS:
14778 CASE_CFN_FDIM:
14779 CASE_CFN_HYPOT:
14780 CASE_CFN_POW10:
14781 CASE_CFN_FFS:
14782 CASE_CFN_PARITY:
14783 CASE_CFN_POPCOUNT:
14784 CASE_CFN_CLZ:
14785 CASE_CFN_CLRSB:
14786 case CFN_BUILT_IN_BSWAP16:
14787 case CFN_BUILT_IN_BSWAP32:
14788 case CFN_BUILT_IN_BSWAP64:
14789 case CFN_BUILT_IN_BSWAP128:
14790 /* Always true. */
14791 return true;
14793 CASE_CFN_SQRT:
14794 CASE_CFN_SQRT_FN:
14795 /* sqrt(-0.0) is -0.0. */
14796 if (!HONOR_SIGNED_ZEROS (type))
14797 return true;
14798 return RECURSE (arg0);
14800 CASE_CFN_ASINH:
14801 CASE_CFN_ATAN:
14802 CASE_CFN_ATANH:
14803 CASE_CFN_CBRT:
14804 CASE_CFN_CEIL:
14805 CASE_CFN_CEIL_FN:
14806 CASE_CFN_ERF:
14807 CASE_CFN_EXPM1:
14808 CASE_CFN_FLOOR:
14809 CASE_CFN_FLOOR_FN:
14810 CASE_CFN_FMOD:
14811 CASE_CFN_FREXP:
14812 CASE_CFN_ICEIL:
14813 CASE_CFN_IFLOOR:
14814 CASE_CFN_IRINT:
14815 CASE_CFN_IROUND:
14816 CASE_CFN_LCEIL:
14817 CASE_CFN_LDEXP:
14818 CASE_CFN_LFLOOR:
14819 CASE_CFN_LLCEIL:
14820 CASE_CFN_LLFLOOR:
14821 CASE_CFN_LLRINT:
14822 CASE_CFN_LLROUND:
14823 CASE_CFN_LRINT:
14824 CASE_CFN_LROUND:
14825 CASE_CFN_MODF:
14826 CASE_CFN_NEARBYINT:
14827 CASE_CFN_NEARBYINT_FN:
14828 CASE_CFN_RINT:
14829 CASE_CFN_RINT_FN:
14830 CASE_CFN_ROUND:
14831 CASE_CFN_ROUND_FN:
14832 CASE_CFN_ROUNDEVEN:
14833 CASE_CFN_ROUNDEVEN_FN:
14834 CASE_CFN_SCALB:
14835 CASE_CFN_SCALBLN:
14836 CASE_CFN_SCALBN:
14837 CASE_CFN_SIGNBIT:
14838 CASE_CFN_SIGNIFICAND:
14839 CASE_CFN_SINH:
14840 CASE_CFN_TANH:
14841 CASE_CFN_TRUNC:
14842 CASE_CFN_TRUNC_FN:
14843 /* True if the 1st argument is nonnegative. */
14844 return RECURSE (arg0);
14846 CASE_CFN_FMAX:
14847 CASE_CFN_FMAX_FN:
14848 /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
14849 things. In the presence of sNaNs, we're only guaranteed to be
14850 non-negative if both operands are non-negative. In the presence
14851 of qNaNs, we're non-negative if either operand is non-negative
14852 and can't be a qNaN, or if both operands are non-negative. */
14853 if (tree_expr_maybe_signaling_nan_p (arg0) ||
14854 tree_expr_maybe_signaling_nan_p (arg1))
14855 return RECURSE (arg0) && RECURSE (arg1);
14856 return RECURSE (arg0) ? (!tree_expr_maybe_nan_p (arg0)
14857 || RECURSE (arg1))
14858 : (RECURSE (arg1)
14859 && !tree_expr_maybe_nan_p (arg1));
14861 CASE_CFN_FMIN:
14862 CASE_CFN_FMIN_FN:
14863 /* True if the 1st AND 2nd arguments are nonnegative. */
14864 return RECURSE (arg0) && RECURSE (arg1);
14866 CASE_CFN_COPYSIGN:
14867 CASE_CFN_COPYSIGN_FN:
14868 /* True if the 2nd argument is nonnegative. */
14869 return RECURSE (arg1);
14871 CASE_CFN_POWI:
14872 /* True if the 1st argument is nonnegative or the second
14873 argument is an even integer. */
14874 if (TREE_CODE (arg1) == INTEGER_CST
14875 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14876 return true;
14877 return RECURSE (arg0);
14879 CASE_CFN_POW:
14880 /* True if the 1st argument is nonnegative or the second
14881 argument is an even integer valued real. */
14882 if (TREE_CODE (arg1) == REAL_CST)
14884 REAL_VALUE_TYPE c;
14885 HOST_WIDE_INT n;
14887 c = TREE_REAL_CST (arg1);
14888 n = real_to_integer (&c);
14889 if ((n & 1) == 0)
14891 REAL_VALUE_TYPE cint;
14892 real_from_integer (&cint, VOIDmode, n, SIGNED);
14893 if (real_identical (&c, &cint))
14894 return true;
14897 return RECURSE (arg0);
14899 default:
14900 break;
14902 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
14905 /* Return true if T is known to be non-negative. If the return
14906 value is based on the assumption that signed overflow is undefined,
14907 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14908 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14910 static bool
14911 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14913 enum tree_code code = TREE_CODE (t);
14914 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14915 return true;
14917 switch (code)
14919 case TARGET_EXPR:
14921 tree temp = TARGET_EXPR_SLOT (t);
14922 t = TARGET_EXPR_INITIAL (t);
14924 /* If the initializer is non-void, then it's a normal expression
14925 that will be assigned to the slot. */
14926 if (!VOID_TYPE_P (t))
14927 return RECURSE (t);
14929 /* Otherwise, the initializer sets the slot in some way. One common
14930 way is an assignment statement at the end of the initializer. */
14931 while (1)
14933 if (TREE_CODE (t) == BIND_EXPR)
14934 t = expr_last (BIND_EXPR_BODY (t));
14935 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14936 || TREE_CODE (t) == TRY_CATCH_EXPR)
14937 t = expr_last (TREE_OPERAND (t, 0));
14938 else if (TREE_CODE (t) == STATEMENT_LIST)
14939 t = expr_last (t);
14940 else
14941 break;
14943 if (TREE_CODE (t) == MODIFY_EXPR
14944 && TREE_OPERAND (t, 0) == temp)
14945 return RECURSE (TREE_OPERAND (t, 1));
14947 return false;
14950 case CALL_EXPR:
14952 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14953 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14955 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14956 get_call_combined_fn (t),
14957 arg0,
14958 arg1,
14959 strict_overflow_p, depth);
14961 case COMPOUND_EXPR:
14962 case MODIFY_EXPR:
14963 return RECURSE (TREE_OPERAND (t, 1));
14965 case BIND_EXPR:
14966 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
14968 case SAVE_EXPR:
14969 return RECURSE (TREE_OPERAND (t, 0));
14971 default:
14972 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14976 #undef RECURSE
14977 #undef tree_expr_nonnegative_warnv_p
14979 /* Return true if T is known to be non-negative. If the return
14980 value is based on the assumption that signed overflow is undefined,
14981 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14982 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14984 bool
14985 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14987 enum tree_code code;
14988 if (t == error_mark_node)
14989 return false;
14991 code = TREE_CODE (t);
14992 switch (TREE_CODE_CLASS (code))
14994 case tcc_binary:
14995 case tcc_comparison:
14996 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14997 TREE_TYPE (t),
14998 TREE_OPERAND (t, 0),
14999 TREE_OPERAND (t, 1),
15000 strict_overflow_p, depth);
15002 case tcc_unary:
15003 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15004 TREE_TYPE (t),
15005 TREE_OPERAND (t, 0),
15006 strict_overflow_p, depth);
15008 case tcc_constant:
15009 case tcc_declaration:
15010 case tcc_reference:
15011 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15013 default:
15014 break;
15017 switch (code)
15019 case TRUTH_AND_EXPR:
15020 case TRUTH_OR_EXPR:
15021 case TRUTH_XOR_EXPR:
15022 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15023 TREE_TYPE (t),
15024 TREE_OPERAND (t, 0),
15025 TREE_OPERAND (t, 1),
15026 strict_overflow_p, depth);
15027 case TRUTH_NOT_EXPR:
15028 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15029 TREE_TYPE (t),
15030 TREE_OPERAND (t, 0),
15031 strict_overflow_p, depth);
15033 case COND_EXPR:
15034 case CONSTRUCTOR:
15035 case OBJ_TYPE_REF:
15036 case ASSERT_EXPR:
15037 case ADDR_EXPR:
15038 case WITH_SIZE_EXPR:
15039 case SSA_NAME:
15040 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15042 default:
15043 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
15047 /* Return true if `t' is known to be non-negative. Handle warnings
15048 about undefined signed overflow. */
15050 bool
15051 tree_expr_nonnegative_p (tree t)
15053 bool ret, strict_overflow_p;
15055 strict_overflow_p = false;
15056 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15057 if (strict_overflow_p)
15058 fold_overflow_warning (("assuming signed overflow does not occur when "
15059 "determining that expression is always "
15060 "non-negative"),
15061 WARN_STRICT_OVERFLOW_MISC);
15062 return ret;
15066 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15067 For floating point we further ensure that T is not denormal.
15068 Similar logic is present in nonzero_address in rtlanal.h.
15070 If the return value is based on the assumption that signed overflow
15071 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15072 change *STRICT_OVERFLOW_P. */
15074 bool
15075 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15076 bool *strict_overflow_p)
15078 switch (code)
15080 case ABS_EXPR:
15081 return tree_expr_nonzero_warnv_p (op0,
15082 strict_overflow_p);
15084 case NOP_EXPR:
15086 tree inner_type = TREE_TYPE (op0);
15087 tree outer_type = type;
15089 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15090 && tree_expr_nonzero_warnv_p (op0,
15091 strict_overflow_p));
15093 break;
15095 case NON_LVALUE_EXPR:
15096 return tree_expr_nonzero_warnv_p (op0,
15097 strict_overflow_p);
15099 default:
15100 break;
15103 return false;
15106 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15107 For floating point we further ensure that T is not denormal.
15108 Similar logic is present in nonzero_address in rtlanal.h.
15110 If the return value is based on the assumption that signed overflow
15111 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15112 change *STRICT_OVERFLOW_P. */
15114 bool
15115 tree_binary_nonzero_warnv_p (enum tree_code code,
15116 tree type,
15117 tree op0,
15118 tree op1, bool *strict_overflow_p)
15120 bool sub_strict_overflow_p;
15121 switch (code)
15123 case POINTER_PLUS_EXPR:
15124 case PLUS_EXPR:
15125 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15127 /* With the presence of negative values it is hard
15128 to say something. */
15129 sub_strict_overflow_p = false;
15130 if (!tree_expr_nonnegative_warnv_p (op0,
15131 &sub_strict_overflow_p)
15132 || !tree_expr_nonnegative_warnv_p (op1,
15133 &sub_strict_overflow_p))
15134 return false;
15135 /* One of operands must be positive and the other non-negative. */
15136 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15137 overflows, on a twos-complement machine the sum of two
15138 nonnegative numbers can never be zero. */
15139 return (tree_expr_nonzero_warnv_p (op0,
15140 strict_overflow_p)
15141 || tree_expr_nonzero_warnv_p (op1,
15142 strict_overflow_p));
15144 break;
15146 case MULT_EXPR:
15147 if (TYPE_OVERFLOW_UNDEFINED (type))
15149 if (tree_expr_nonzero_warnv_p (op0,
15150 strict_overflow_p)
15151 && tree_expr_nonzero_warnv_p (op1,
15152 strict_overflow_p))
15154 *strict_overflow_p = true;
15155 return true;
15158 break;
15160 case MIN_EXPR:
15161 sub_strict_overflow_p = false;
15162 if (tree_expr_nonzero_warnv_p (op0,
15163 &sub_strict_overflow_p)
15164 && tree_expr_nonzero_warnv_p (op1,
15165 &sub_strict_overflow_p))
15167 if (sub_strict_overflow_p)
15168 *strict_overflow_p = true;
15170 break;
15172 case MAX_EXPR:
15173 sub_strict_overflow_p = false;
15174 if (tree_expr_nonzero_warnv_p (op0,
15175 &sub_strict_overflow_p))
15177 if (sub_strict_overflow_p)
15178 *strict_overflow_p = true;
15180 /* When both operands are nonzero, then MAX must be too. */
15181 if (tree_expr_nonzero_warnv_p (op1,
15182 strict_overflow_p))
15183 return true;
15185 /* MAX where operand 0 is positive is positive. */
15186 return tree_expr_nonnegative_warnv_p (op0,
15187 strict_overflow_p);
15189 /* MAX where operand 1 is positive is positive. */
15190 else if (tree_expr_nonzero_warnv_p (op1,
15191 &sub_strict_overflow_p)
15192 && tree_expr_nonnegative_warnv_p (op1,
15193 &sub_strict_overflow_p))
15195 if (sub_strict_overflow_p)
15196 *strict_overflow_p = true;
15197 return true;
15199 break;
15201 case BIT_IOR_EXPR:
15202 return (tree_expr_nonzero_warnv_p (op1,
15203 strict_overflow_p)
15204 || tree_expr_nonzero_warnv_p (op0,
15205 strict_overflow_p));
15207 default:
15208 break;
15211 return false;
15214 /* Return true when T is an address and is known to be nonzero.
15215 For floating point we further ensure that T is not denormal.
15216 Similar logic is present in nonzero_address in rtlanal.h.
15218 If the return value is based on the assumption that signed overflow
15219 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15220 change *STRICT_OVERFLOW_P. */
15222 bool
15223 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15225 bool sub_strict_overflow_p;
15226 switch (TREE_CODE (t))
15228 case INTEGER_CST:
15229 return !integer_zerop (t);
15231 case ADDR_EXPR:
15233 tree base = TREE_OPERAND (t, 0);
15235 if (!DECL_P (base))
15236 base = get_base_address (base);
15238 if (base && TREE_CODE (base) == TARGET_EXPR)
15239 base = TARGET_EXPR_SLOT (base);
15241 if (!base)
15242 return false;
15244 /* For objects in symbol table check if we know they are non-zero.
15245 Don't do anything for variables and functions before symtab is built;
15246 it is quite possible that they will be declared weak later. */
15247 int nonzero_addr = maybe_nonzero_address (base);
15248 if (nonzero_addr >= 0)
15249 return nonzero_addr;
15251 /* Constants are never weak. */
15252 if (CONSTANT_CLASS_P (base))
15253 return true;
15255 return false;
15258 case COND_EXPR:
15259 sub_strict_overflow_p = false;
15260 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15261 &sub_strict_overflow_p)
15262 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15263 &sub_strict_overflow_p))
15265 if (sub_strict_overflow_p)
15266 *strict_overflow_p = true;
15267 return true;
15269 break;
15271 case SSA_NAME:
15272 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
15273 break;
15274 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
15276 default:
15277 break;
15279 return false;
15282 #define integer_valued_real_p(X) \
15283 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15285 #define RECURSE(X) \
15286 ((integer_valued_real_p) (X, depth + 1))
15288 /* Return true if the floating point result of (CODE OP0) has an
15289 integer value. We also allow +Inf, -Inf and NaN to be considered
15290 integer values. Return false for signaling NaN.
15292 DEPTH is the current nesting depth of the query. */
15294 bool
15295 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
15297 switch (code)
15299 case FLOAT_EXPR:
15300 return true;
15302 case ABS_EXPR:
15303 return RECURSE (op0);
15305 CASE_CONVERT:
15307 tree type = TREE_TYPE (op0);
15308 if (TREE_CODE (type) == INTEGER_TYPE)
15309 return true;
15310 if (TREE_CODE (type) == REAL_TYPE)
15311 return RECURSE (op0);
15312 break;
15315 default:
15316 break;
15318 return false;
15321 /* Return true if the floating point result of (CODE OP0 OP1) has an
15322 integer value. We also allow +Inf, -Inf and NaN to be considered
15323 integer values. Return false for signaling NaN.
15325 DEPTH is the current nesting depth of the query. */
15327 bool
15328 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
15330 switch (code)
15332 case PLUS_EXPR:
15333 case MINUS_EXPR:
15334 case MULT_EXPR:
15335 case MIN_EXPR:
15336 case MAX_EXPR:
15337 return RECURSE (op0) && RECURSE (op1);
15339 default:
15340 break;
15342 return false;
15345 /* Return true if the floating point result of calling FNDECL with arguments
15346 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15347 considered integer values. Return false for signaling NaN. If FNDECL
15348 takes fewer than 2 arguments, the remaining ARGn are null.
15350 DEPTH is the current nesting depth of the query. */
15352 bool
15353 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
15355 switch (fn)
15357 CASE_CFN_CEIL:
15358 CASE_CFN_CEIL_FN:
15359 CASE_CFN_FLOOR:
15360 CASE_CFN_FLOOR_FN:
15361 CASE_CFN_NEARBYINT:
15362 CASE_CFN_NEARBYINT_FN:
15363 CASE_CFN_RINT:
15364 CASE_CFN_RINT_FN:
15365 CASE_CFN_ROUND:
15366 CASE_CFN_ROUND_FN:
15367 CASE_CFN_ROUNDEVEN:
15368 CASE_CFN_ROUNDEVEN_FN:
15369 CASE_CFN_TRUNC:
15370 CASE_CFN_TRUNC_FN:
15371 return true;
15373 CASE_CFN_FMIN:
15374 CASE_CFN_FMIN_FN:
15375 CASE_CFN_FMAX:
15376 CASE_CFN_FMAX_FN:
15377 return RECURSE (arg0) && RECURSE (arg1);
15379 default:
15380 break;
15382 return false;
15385 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15386 has an integer value. We also allow +Inf, -Inf and NaN to be
15387 considered integer values. Return false for signaling NaN.
15389 DEPTH is the current nesting depth of the query. */
15391 bool
15392 integer_valued_real_single_p (tree t, int depth)
15394 switch (TREE_CODE (t))
15396 case REAL_CST:
15397 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
15399 case COND_EXPR:
15400 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15402 case SSA_NAME:
15403 /* Limit the depth of recursion to avoid quadratic behavior.
15404 This is expected to catch almost all occurrences in practice.
15405 If this code misses important cases that unbounded recursion
15406 would not, passes that need this information could be revised
15407 to provide it through dataflow propagation. */
15408 return (!name_registered_for_update_p (t)
15409 && depth < param_max_ssa_name_query_depth
15410 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
15411 depth));
15413 default:
15414 break;
15416 return false;
15419 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15420 has an integer value. We also allow +Inf, -Inf and NaN to be
15421 considered integer values. Return false for signaling NaN.
15423 DEPTH is the current nesting depth of the query. */
15425 static bool
15426 integer_valued_real_invalid_p (tree t, int depth)
15428 switch (TREE_CODE (t))
15430 case COMPOUND_EXPR:
15431 case MODIFY_EXPR:
15432 case BIND_EXPR:
15433 return RECURSE (TREE_OPERAND (t, 1));
15435 case SAVE_EXPR:
15436 return RECURSE (TREE_OPERAND (t, 0));
15438 default:
15439 break;
15441 return false;
15444 #undef RECURSE
15445 #undef integer_valued_real_p
15447 /* Return true if the floating point expression T has an integer value.
15448 We also allow +Inf, -Inf and NaN to be considered integer values.
15449 Return false for signaling NaN.
15451 DEPTH is the current nesting depth of the query. */
15453 bool
15454 integer_valued_real_p (tree t, int depth)
15456 if (t == error_mark_node)
15457 return false;
15459 STRIP_ANY_LOCATION_WRAPPER (t);
15461 tree_code code = TREE_CODE (t);
15462 switch (TREE_CODE_CLASS (code))
15464 case tcc_binary:
15465 case tcc_comparison:
15466 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
15467 TREE_OPERAND (t, 1), depth);
15469 case tcc_unary:
15470 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
15472 case tcc_constant:
15473 case tcc_declaration:
15474 case tcc_reference:
15475 return integer_valued_real_single_p (t, depth);
15477 default:
15478 break;
15481 switch (code)
15483 case COND_EXPR:
15484 case SSA_NAME:
15485 return integer_valued_real_single_p (t, depth);
15487 case CALL_EXPR:
15489 tree arg0 = (call_expr_nargs (t) > 0
15490 ? CALL_EXPR_ARG (t, 0)
15491 : NULL_TREE);
15492 tree arg1 = (call_expr_nargs (t) > 1
15493 ? CALL_EXPR_ARG (t, 1)
15494 : NULL_TREE);
15495 return integer_valued_real_call_p (get_call_combined_fn (t),
15496 arg0, arg1, depth);
15499 default:
15500 return integer_valued_real_invalid_p (t, depth);
15504 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15505 attempt to fold the expression to a constant without modifying TYPE,
15506 OP0 or OP1.
15508 If the expression could be simplified to a constant, then return
15509 the constant. If the expression would not be simplified to a
15510 constant, then return NULL_TREE. */
15512 tree
15513 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15515 tree tem = fold_binary (code, type, op0, op1);
15516 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15519 /* Given the components of a unary expression CODE, TYPE and OP0,
15520 attempt to fold the expression to a constant without modifying
15521 TYPE or OP0.
15523 If the expression could be simplified to a constant, then return
15524 the constant. If the expression would not be simplified to a
15525 constant, then return NULL_TREE. */
15527 tree
15528 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15530 tree tem = fold_unary (code, type, op0);
15531 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15534 /* If EXP represents referencing an element in a constant string
15535 (either via pointer arithmetic or array indexing), return the
15536 tree representing the value accessed, otherwise return NULL. */
15538 tree
15539 fold_read_from_constant_string (tree exp)
15541 if ((TREE_CODE (exp) == INDIRECT_REF
15542 || TREE_CODE (exp) == ARRAY_REF)
15543 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15545 tree exp1 = TREE_OPERAND (exp, 0);
15546 tree index;
15547 tree string;
15548 location_t loc = EXPR_LOCATION (exp);
15550 if (TREE_CODE (exp) == INDIRECT_REF)
15551 string = string_constant (exp1, &index, NULL, NULL);
15552 else
15554 tree low_bound = array_ref_low_bound (exp);
15555 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15557 /* Optimize the special-case of a zero lower bound.
15559 We convert the low_bound to sizetype to avoid some problems
15560 with constant folding. (E.g. suppose the lower bound is 1,
15561 and its mode is QI. Without the conversion,l (ARRAY
15562 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15563 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15564 if (! integer_zerop (low_bound))
15565 index = size_diffop_loc (loc, index,
15566 fold_convert_loc (loc, sizetype, low_bound));
15568 string = exp1;
15571 scalar_int_mode char_mode;
15572 if (string
15573 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15574 && TREE_CODE (string) == STRING_CST
15575 && tree_fits_uhwi_p (index)
15576 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15577 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
15578 &char_mode)
15579 && GET_MODE_SIZE (char_mode) == 1)
15580 return build_int_cst_type (TREE_TYPE (exp),
15581 (TREE_STRING_POINTER (string)
15582 [TREE_INT_CST_LOW (index)]));
15584 return NULL;
15587 /* Folds a read from vector element at IDX of vector ARG. */
15589 tree
15590 fold_read_from_vector (tree arg, poly_uint64 idx)
15592 unsigned HOST_WIDE_INT i;
15593 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
15594 && known_ge (idx, 0u)
15595 && idx.is_constant (&i))
15597 if (TREE_CODE (arg) == VECTOR_CST)
15598 return VECTOR_CST_ELT (arg, i);
15599 else if (TREE_CODE (arg) == CONSTRUCTOR)
15601 if (CONSTRUCTOR_NELTS (arg)
15602 && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg, 0)->value)))
15603 return NULL_TREE;
15604 if (i >= CONSTRUCTOR_NELTS (arg))
15605 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
15606 return CONSTRUCTOR_ELT (arg, i)->value;
15609 return NULL_TREE;
15612 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15613 an integer constant, real, or fixed-point constant.
15615 TYPE is the type of the result. */
15617 static tree
15618 fold_negate_const (tree arg0, tree type)
15620 tree t = NULL_TREE;
15622 switch (TREE_CODE (arg0))
15624 case REAL_CST:
15625 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15626 break;
15628 case FIXED_CST:
15630 FIXED_VALUE_TYPE f;
15631 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15632 &(TREE_FIXED_CST (arg0)), NULL,
15633 TYPE_SATURATING (type));
15634 t = build_fixed (type, f);
15635 /* Propagate overflow flags. */
15636 if (overflow_p | TREE_OVERFLOW (arg0))
15637 TREE_OVERFLOW (t) = 1;
15638 break;
15641 default:
15642 if (poly_int_tree_p (arg0))
15644 wi::overflow_type overflow;
15645 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
15646 t = force_fit_type (type, res, 1,
15647 (overflow && ! TYPE_UNSIGNED (type))
15648 || TREE_OVERFLOW (arg0));
15649 break;
15652 gcc_unreachable ();
15655 return t;
15658 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15659 an integer constant or real constant.
15661 TYPE is the type of the result. */
15663 tree
15664 fold_abs_const (tree arg0, tree type)
15666 tree t = NULL_TREE;
15668 switch (TREE_CODE (arg0))
15670 case INTEGER_CST:
15672 /* If the value is unsigned or non-negative, then the absolute value
15673 is the same as the ordinary value. */
15674 wide_int val = wi::to_wide (arg0);
15675 wi::overflow_type overflow = wi::OVF_NONE;
15676 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
15679 /* If the value is negative, then the absolute value is
15680 its negation. */
15681 else
15682 val = wi::neg (val, &overflow);
15684 /* Force to the destination type, set TREE_OVERFLOW for signed
15685 TYPE only. */
15686 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
15688 break;
15690 case REAL_CST:
15691 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15692 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15693 else
15694 t = arg0;
15695 break;
15697 default:
15698 gcc_unreachable ();
15701 return t;
15704 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15705 constant. TYPE is the type of the result. */
15707 static tree
15708 fold_not_const (const_tree arg0, tree type)
15710 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15712 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
15715 /* Given CODE, a relational operator, the target type, TYPE and two
15716 constant operands OP0 and OP1, return the result of the
15717 relational operation. If the result is not a compile time
15718 constant, then return NULL_TREE. */
15720 static tree
15721 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15723 int result, invert;
15725 /* From here on, the only cases we handle are when the result is
15726 known to be a constant. */
15728 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15730 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15731 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15733 /* Handle the cases where either operand is a NaN. */
15734 if (real_isnan (c0) || real_isnan (c1))
15736 switch (code)
15738 case EQ_EXPR:
15739 case ORDERED_EXPR:
15740 result = 0;
15741 break;
15743 case NE_EXPR:
15744 case UNORDERED_EXPR:
15745 case UNLT_EXPR:
15746 case UNLE_EXPR:
15747 case UNGT_EXPR:
15748 case UNGE_EXPR:
15749 case UNEQ_EXPR:
15750 result = 1;
15751 break;
15753 case LT_EXPR:
15754 case LE_EXPR:
15755 case GT_EXPR:
15756 case GE_EXPR:
15757 case LTGT_EXPR:
15758 if (flag_trapping_math)
15759 return NULL_TREE;
15760 result = 0;
15761 break;
15763 default:
15764 gcc_unreachable ();
15767 return constant_boolean_node (result, type);
15770 return constant_boolean_node (real_compare (code, c0, c1), type);
15773 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15775 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15776 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15777 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15780 /* Handle equality/inequality of complex constants. */
15781 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15783 tree rcond = fold_relational_const (code, type,
15784 TREE_REALPART (op0),
15785 TREE_REALPART (op1));
15786 tree icond = fold_relational_const (code, type,
15787 TREE_IMAGPART (op0),
15788 TREE_IMAGPART (op1));
15789 if (code == EQ_EXPR)
15790 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15791 else if (code == NE_EXPR)
15792 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15793 else
15794 return NULL_TREE;
15797 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15799 if (!VECTOR_TYPE_P (type))
15801 /* Have vector comparison with scalar boolean result. */
15802 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
15803 && known_eq (VECTOR_CST_NELTS (op0),
15804 VECTOR_CST_NELTS (op1)));
15805 unsigned HOST_WIDE_INT nunits;
15806 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
15807 return NULL_TREE;
15808 for (unsigned i = 0; i < nunits; i++)
15810 tree elem0 = VECTOR_CST_ELT (op0, i);
15811 tree elem1 = VECTOR_CST_ELT (op1, i);
15812 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
15813 if (tmp == NULL_TREE)
15814 return NULL_TREE;
15815 if (integer_zerop (tmp))
15816 return constant_boolean_node (code == NE_EXPR, type);
15818 return constant_boolean_node (code == EQ_EXPR, type);
15820 tree_vector_builder elts;
15821 if (!elts.new_binary_operation (type, op0, op1, false))
15822 return NULL_TREE;
15823 unsigned int count = elts.encoded_nelts ();
15824 for (unsigned i = 0; i < count; i++)
15826 tree elem_type = TREE_TYPE (type);
15827 tree elem0 = VECTOR_CST_ELT (op0, i);
15828 tree elem1 = VECTOR_CST_ELT (op1, i);
15830 tree tem = fold_relational_const (code, elem_type,
15831 elem0, elem1);
15833 if (tem == NULL_TREE)
15834 return NULL_TREE;
15836 elts.quick_push (build_int_cst (elem_type,
15837 integer_zerop (tem) ? 0 : -1));
15840 return elts.build ();
15843 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15845 To compute GT, swap the arguments and do LT.
15846 To compute GE, do LT and invert the result.
15847 To compute LE, swap the arguments, do LT and invert the result.
15848 To compute NE, do EQ and invert the result.
15850 Therefore, the code below must handle only EQ and LT. */
15852 if (code == LE_EXPR || code == GT_EXPR)
15854 std::swap (op0, op1);
15855 code = swap_tree_comparison (code);
15858 /* Note that it is safe to invert for real values here because we
15859 have already handled the one case that it matters. */
15861 invert = 0;
15862 if (code == NE_EXPR || code == GE_EXPR)
15864 invert = 1;
15865 code = invert_tree_comparison (code, false);
15868 /* Compute a result for LT or EQ if args permit;
15869 Otherwise return T. */
15870 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15872 if (code == EQ_EXPR)
15873 result = tree_int_cst_equal (op0, op1);
15874 else
15875 result = tree_int_cst_lt (op0, op1);
15877 else
15878 return NULL_TREE;
15880 if (invert)
15881 result ^= 1;
15882 return constant_boolean_node (result, type);
15885 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15886 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15887 itself. */
15889 tree
15890 fold_build_cleanup_point_expr (tree type, tree expr)
15892 /* If the expression does not have side effects then we don't have to wrap
15893 it with a cleanup point expression. */
15894 if (!TREE_SIDE_EFFECTS (expr))
15895 return expr;
15897 /* If the expression is a return, check to see if the expression inside the
15898 return has no side effects or the right hand side of the modify expression
15899 inside the return. If either don't have side effects set we don't need to
15900 wrap the expression in a cleanup point expression. Note we don't check the
15901 left hand side of the modify because it should always be a return decl. */
15902 if (TREE_CODE (expr) == RETURN_EXPR)
15904 tree op = TREE_OPERAND (expr, 0);
15905 if (!op || !TREE_SIDE_EFFECTS (op))
15906 return expr;
15907 op = TREE_OPERAND (op, 1);
15908 if (!TREE_SIDE_EFFECTS (op))
15909 return expr;
15912 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
15915 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15916 of an indirection through OP0, or NULL_TREE if no simplification is
15917 possible. */
15919 tree
15920 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15922 tree sub = op0;
15923 tree subtype;
15924 poly_uint64 const_op01;
15926 STRIP_NOPS (sub);
15927 subtype = TREE_TYPE (sub);
15928 if (!POINTER_TYPE_P (subtype)
15929 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
15930 return NULL_TREE;
15932 if (TREE_CODE (sub) == ADDR_EXPR)
15934 tree op = TREE_OPERAND (sub, 0);
15935 tree optype = TREE_TYPE (op);
15937 /* *&CONST_DECL -> to the value of the const decl. */
15938 if (TREE_CODE (op) == CONST_DECL)
15939 return DECL_INITIAL (op);
15940 /* *&p => p; make sure to handle *&"str"[cst] here. */
15941 if (type == optype)
15943 tree fop = fold_read_from_constant_string (op);
15944 if (fop)
15945 return fop;
15946 else
15947 return op;
15949 /* *(foo *)&fooarray => fooarray[0] */
15950 else if (TREE_CODE (optype) == ARRAY_TYPE
15951 && type == TREE_TYPE (optype)
15952 && (!in_gimple_form
15953 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15955 tree type_domain = TYPE_DOMAIN (optype);
15956 tree min_val = size_zero_node;
15957 if (type_domain && TYPE_MIN_VALUE (type_domain))
15958 min_val = TYPE_MIN_VALUE (type_domain);
15959 if (in_gimple_form
15960 && TREE_CODE (min_val) != INTEGER_CST)
15961 return NULL_TREE;
15962 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15963 NULL_TREE, NULL_TREE);
15965 /* *(foo *)&complexfoo => __real__ complexfoo */
15966 else if (TREE_CODE (optype) == COMPLEX_TYPE
15967 && type == TREE_TYPE (optype))
15968 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15969 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15970 else if (VECTOR_TYPE_P (optype)
15971 && type == TREE_TYPE (optype))
15973 tree part_width = TYPE_SIZE (type);
15974 tree index = bitsize_int (0);
15975 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
15976 index);
15980 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15981 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
15983 tree op00 = TREE_OPERAND (sub, 0);
15984 tree op01 = TREE_OPERAND (sub, 1);
15986 STRIP_NOPS (op00);
15987 if (TREE_CODE (op00) == ADDR_EXPR)
15989 tree op00type;
15990 op00 = TREE_OPERAND (op00, 0);
15991 op00type = TREE_TYPE (op00);
15993 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15994 if (VECTOR_TYPE_P (op00type)
15995 && type == TREE_TYPE (op00type)
15996 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
15997 but we want to treat offsets with MSB set as negative.
15998 For the code below negative offsets are invalid and
15999 TYPE_SIZE of the element is something unsigned, so
16000 check whether op01 fits into poly_int64, which implies
16001 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
16002 then just use poly_uint64 because we want to treat the
16003 value as unsigned. */
16004 && tree_fits_poly_int64_p (op01))
16006 tree part_width = TYPE_SIZE (type);
16007 poly_uint64 max_offset
16008 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
16009 * TYPE_VECTOR_SUBPARTS (op00type));
16010 if (known_lt (const_op01, max_offset))
16012 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
16013 return fold_build3_loc (loc,
16014 BIT_FIELD_REF, type, op00,
16015 part_width, index);
16018 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16019 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16020 && type == TREE_TYPE (op00type))
16022 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
16023 const_op01))
16024 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16026 /* ((foo *)&fooarray)[1] => fooarray[1] */
16027 else if (TREE_CODE (op00type) == ARRAY_TYPE
16028 && type == TREE_TYPE (op00type))
16030 tree type_domain = TYPE_DOMAIN (op00type);
16031 tree min_val = size_zero_node;
16032 if (type_domain && TYPE_MIN_VALUE (type_domain))
16033 min_val = TYPE_MIN_VALUE (type_domain);
16034 poly_uint64 type_size, index;
16035 if (poly_int_tree_p (min_val)
16036 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
16037 && multiple_p (const_op01, type_size, &index))
16039 poly_offset_int off = index + wi::to_poly_offset (min_val);
16040 op01 = wide_int_to_tree (sizetype, off);
16041 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16042 NULL_TREE, NULL_TREE);
16048 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16049 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16050 && type == TREE_TYPE (TREE_TYPE (subtype))
16051 && (!in_gimple_form
16052 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16054 tree type_domain;
16055 tree min_val = size_zero_node;
16056 sub = build_fold_indirect_ref_loc (loc, sub);
16057 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16058 if (type_domain && TYPE_MIN_VALUE (type_domain))
16059 min_val = TYPE_MIN_VALUE (type_domain);
16060 if (in_gimple_form
16061 && TREE_CODE (min_val) != INTEGER_CST)
16062 return NULL_TREE;
16063 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16064 NULL_TREE);
16067 return NULL_TREE;
16070 /* Builds an expression for an indirection through T, simplifying some
16071 cases. */
16073 tree
16074 build_fold_indirect_ref_loc (location_t loc, tree t)
16076 tree type = TREE_TYPE (TREE_TYPE (t));
16077 tree sub = fold_indirect_ref_1 (loc, type, t);
16079 if (sub)
16080 return sub;
16082 return build1_loc (loc, INDIRECT_REF, type, t);
16085 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16087 tree
16088 fold_indirect_ref_loc (location_t loc, tree t)
16090 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16092 if (sub)
16093 return sub;
16094 else
16095 return t;
16098 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16099 whose result is ignored. The type of the returned tree need not be
16100 the same as the original expression. */
16102 tree
16103 fold_ignored_result (tree t)
16105 if (!TREE_SIDE_EFFECTS (t))
16106 return integer_zero_node;
16108 for (;;)
16109 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16111 case tcc_unary:
16112 t = TREE_OPERAND (t, 0);
16113 break;
16115 case tcc_binary:
16116 case tcc_comparison:
16117 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16118 t = TREE_OPERAND (t, 0);
16119 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16120 t = TREE_OPERAND (t, 1);
16121 else
16122 return t;
16123 break;
16125 case tcc_expression:
16126 switch (TREE_CODE (t))
16128 case COMPOUND_EXPR:
16129 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16130 return t;
16131 t = TREE_OPERAND (t, 0);
16132 break;
16134 case COND_EXPR:
16135 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16136 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16137 return t;
16138 t = TREE_OPERAND (t, 0);
16139 break;
16141 default:
16142 return t;
16144 break;
16146 default:
16147 return t;
16151 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16153 tree
16154 round_up_loc (location_t loc, tree value, unsigned int divisor)
16156 tree div = NULL_TREE;
16158 if (divisor == 1)
16159 return value;
16161 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16162 have to do anything. Only do this when we are not given a const,
16163 because in that case, this check is more expensive than just
16164 doing it. */
16165 if (TREE_CODE (value) != INTEGER_CST)
16167 div = build_int_cst (TREE_TYPE (value), divisor);
16169 if (multiple_of_p (TREE_TYPE (value), value, div))
16170 return value;
16173 /* If divisor is a power of two, simplify this to bit manipulation. */
16174 if (pow2_or_zerop (divisor))
16176 if (TREE_CODE (value) == INTEGER_CST)
16178 wide_int val = wi::to_wide (value);
16179 bool overflow_p;
16181 if ((val & (divisor - 1)) == 0)
16182 return value;
16184 overflow_p = TREE_OVERFLOW (value);
16185 val += divisor - 1;
16186 val &= (int) -divisor;
16187 if (val == 0)
16188 overflow_p = true;
16190 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16192 else
16194 tree t;
16196 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16197 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16198 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16199 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16202 else
16204 if (!div)
16205 div = build_int_cst (TREE_TYPE (value), divisor);
16206 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16207 value = size_binop_loc (loc, MULT_EXPR, value, div);
16210 return value;
16213 /* Likewise, but round down. */
16215 tree
16216 round_down_loc (location_t loc, tree value, int divisor)
16218 tree div = NULL_TREE;
16220 gcc_assert (divisor > 0);
16221 if (divisor == 1)
16222 return value;
16224 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16225 have to do anything. Only do this when we are not given a const,
16226 because in that case, this check is more expensive than just
16227 doing it. */
16228 if (TREE_CODE (value) != INTEGER_CST)
16230 div = build_int_cst (TREE_TYPE (value), divisor);
16232 if (multiple_of_p (TREE_TYPE (value), value, div))
16233 return value;
16236 /* If divisor is a power of two, simplify this to bit manipulation. */
16237 if (pow2_or_zerop (divisor))
16239 tree t;
16241 t = build_int_cst (TREE_TYPE (value), -divisor);
16242 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16244 else
16246 if (!div)
16247 div = build_int_cst (TREE_TYPE (value), divisor);
16248 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16249 value = size_binop_loc (loc, MULT_EXPR, value, div);
16252 return value;
16255 /* Returns the pointer to the base of the object addressed by EXP and
16256 extracts the information about the offset of the access, storing it
16257 to PBITPOS and POFFSET. */
16259 static tree
16260 split_address_to_core_and_offset (tree exp,
16261 poly_int64_pod *pbitpos, tree *poffset)
16263 tree core;
16264 machine_mode mode;
16265 int unsignedp, reversep, volatilep;
16266 poly_int64 bitsize;
16267 location_t loc = EXPR_LOCATION (exp);
16269 if (TREE_CODE (exp) == ADDR_EXPR)
16271 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16272 poffset, &mode, &unsignedp, &reversep,
16273 &volatilep);
16274 core = build_fold_addr_expr_loc (loc, core);
16276 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
16278 core = TREE_OPERAND (exp, 0);
16279 STRIP_NOPS (core);
16280 *pbitpos = 0;
16281 *poffset = TREE_OPERAND (exp, 1);
16282 if (poly_int_tree_p (*poffset))
16284 poly_offset_int tem
16285 = wi::sext (wi::to_poly_offset (*poffset),
16286 TYPE_PRECISION (TREE_TYPE (*poffset)));
16287 tem <<= LOG2_BITS_PER_UNIT;
16288 if (tem.to_shwi (pbitpos))
16289 *poffset = NULL_TREE;
16292 else
16294 core = exp;
16295 *pbitpos = 0;
16296 *poffset = NULL_TREE;
16299 return core;
16302 /* Returns true if addresses of E1 and E2 differ by a constant, false
16303 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16305 bool
16306 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
16308 tree core1, core2;
16309 poly_int64 bitpos1, bitpos2;
16310 tree toffset1, toffset2, tdiff, type;
16312 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16313 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16315 poly_int64 bytepos1, bytepos2;
16316 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
16317 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
16318 || !operand_equal_p (core1, core2, 0))
16319 return false;
16321 if (toffset1 && toffset2)
16323 type = TREE_TYPE (toffset1);
16324 if (type != TREE_TYPE (toffset2))
16325 toffset2 = fold_convert (type, toffset2);
16327 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16328 if (!cst_and_fits_in_hwi (tdiff))
16329 return false;
16331 *diff = int_cst_value (tdiff);
16333 else if (toffset1 || toffset2)
16335 /* If only one of the offsets is non-constant, the difference cannot
16336 be a constant. */
16337 return false;
16339 else
16340 *diff = 0;
16342 *diff += bytepos1 - bytepos2;
16343 return true;
16346 /* Return OFF converted to a pointer offset type suitable as offset for
16347 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16348 tree
16349 convert_to_ptrofftype_loc (location_t loc, tree off)
16351 if (ptrofftype_p (TREE_TYPE (off)))
16352 return off;
16353 return fold_convert_loc (loc, sizetype, off);
16356 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16357 tree
16358 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16360 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16361 ptr, convert_to_ptrofftype_loc (loc, off));
16364 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16365 tree
16366 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16368 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16369 ptr, size_int (off));
16372 /* Return a pointer to a NUL-terminated string containing the sequence
16373 of bytes corresponding to the representation of the object referred to
16374 by SRC (or a subsequence of such bytes within it if SRC is a reference
16375 to an initialized constant array plus some constant offset).
16376 Set *STRSIZE the number of bytes in the constant sequence including
16377 the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16378 where A is the array that stores the constant sequence that SRC points
16379 to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16380 need not point to a string or even an array of characters but may point
16381 to an object of any type. */
16383 const char *
16384 getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
16386 /* The offset into the array A storing the string, and A's byte size. */
16387 tree offset_node;
16388 tree mem_size;
16390 if (strsize)
16391 *strsize = 0;
16393 if (strsize)
16394 src = byte_representation (src, &offset_node, &mem_size, NULL);
16395 else
16396 src = string_constant (src, &offset_node, &mem_size, NULL);
16397 if (!src)
16398 return NULL;
16400 unsigned HOST_WIDE_INT offset = 0;
16401 if (offset_node != NULL_TREE)
16403 if (!tree_fits_uhwi_p (offset_node))
16404 return NULL;
16405 else
16406 offset = tree_to_uhwi (offset_node);
16409 if (!tree_fits_uhwi_p (mem_size))
16410 return NULL;
16412 /* ARRAY_SIZE is the byte size of the array the constant sequence
16413 is stored in and equal to sizeof A. INIT_BYTES is the number
16414 of bytes in the constant sequence used to initialize the array,
16415 including any embedded NULs as well as the terminating NUL (for
16416 strings), but not including any trailing zeros/NULs past
16417 the terminating one appended implicitly to a string literal to
16418 zero out the remainder of the array it's stored in. For example,
16419 given:
16420 const char a[7] = "abc\0d";
16421 n = strlen (a + 1);
16422 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16423 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16424 is equal to strlen (A) + 1. */
16425 const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
16426 unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
16427 const char *string = TREE_STRING_POINTER (src);
16429 /* Ideally this would turn into a gcc_checking_assert over time. */
16430 if (init_bytes > array_size)
16431 init_bytes = array_size;
16433 if (init_bytes == 0 || offset >= array_size)
16434 return NULL;
16436 if (strsize)
16438 /* Compute and store the number of characters from the beginning
16439 of the substring at OFFSET to the end, including the terminating
16440 nul. Offsets past the initial length refer to null strings. */
16441 if (offset < init_bytes)
16442 *strsize = init_bytes - offset;
16443 else
16444 *strsize = 1;
16446 else
16448 tree eltype = TREE_TYPE (TREE_TYPE (src));
16449 /* Support only properly NUL-terminated single byte strings. */
16450 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
16451 return NULL;
16452 if (string[init_bytes - 1] != '\0')
16453 return NULL;
16456 return offset < init_bytes ? string + offset : "";
16459 /* Return a pointer to a NUL-terminated string corresponding to
16460 the expression STR referencing a constant string, possibly
16461 involving a constant offset. Return null if STR either doesn't
16462 reference a constant string or if it involves a nonconstant
16463 offset. */
16465 const char *
16466 c_getstr (tree str)
16468 return getbyterep (str, NULL);
16471 /* Given a tree T, compute which bits in T may be nonzero. */
16473 wide_int
16474 tree_nonzero_bits (const_tree t)
16476 switch (TREE_CODE (t))
16478 case INTEGER_CST:
16479 return wi::to_wide (t);
16480 case SSA_NAME:
16481 return get_nonzero_bits (t);
16482 case NON_LVALUE_EXPR:
16483 case SAVE_EXPR:
16484 return tree_nonzero_bits (TREE_OPERAND (t, 0));
16485 case BIT_AND_EXPR:
16486 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16487 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16488 case BIT_IOR_EXPR:
16489 case BIT_XOR_EXPR:
16490 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16491 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16492 case COND_EXPR:
16493 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
16494 tree_nonzero_bits (TREE_OPERAND (t, 2)));
16495 CASE_CONVERT:
16496 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16497 TYPE_PRECISION (TREE_TYPE (t)),
16498 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
16499 case PLUS_EXPR:
16500 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
16502 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
16503 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
16504 if (wi::bit_and (nzbits1, nzbits2) == 0)
16505 return wi::bit_or (nzbits1, nzbits2);
16507 break;
16508 case LSHIFT_EXPR:
16509 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16511 tree type = TREE_TYPE (t);
16512 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16513 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16514 TYPE_PRECISION (type));
16515 return wi::neg_p (arg1)
16516 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
16517 : wi::lshift (nzbits, arg1);
16519 break;
16520 case RSHIFT_EXPR:
16521 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16523 tree type = TREE_TYPE (t);
16524 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16525 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16526 TYPE_PRECISION (type));
16527 return wi::neg_p (arg1)
16528 ? wi::lshift (nzbits, -arg1)
16529 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
16531 break;
16532 default:
16533 break;
16536 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
16539 /* Helper function for address compare simplifications in match.pd.
16540 OP0 and OP1 are ADDR_EXPR operands being compared by CODE.
16541 BASE0, BASE1, OFF0 and OFF1 are set by the function.
16542 GENERIC is true if GENERIC folding and false for GIMPLE folding.
16543 Returns 0 if OP0 is known to be unequal to OP1 regardless of OFF{0,1},
16544 1 if bases are known to be equal and OP0 cmp OP1 depends on OFF0 cmp OFF1,
16545 and 2 if unknown. */
16548 address_compare (tree_code code, tree type, tree op0, tree op1,
16549 tree &base0, tree &base1, poly_int64 &off0, poly_int64 &off1,
16550 bool generic)
16552 gcc_checking_assert (TREE_CODE (op0) == ADDR_EXPR);
16553 gcc_checking_assert (TREE_CODE (op1) == ADDR_EXPR);
16554 base0 = get_addr_base_and_unit_offset (TREE_OPERAND (op0, 0), &off0);
16555 base1 = get_addr_base_and_unit_offset (TREE_OPERAND (op1, 0), &off1);
16556 if (base0 && TREE_CODE (base0) == MEM_REF)
16558 off0 += mem_ref_offset (base0).force_shwi ();
16559 base0 = TREE_OPERAND (base0, 0);
16561 if (base1 && TREE_CODE (base1) == MEM_REF)
16563 off1 += mem_ref_offset (base1).force_shwi ();
16564 base1 = TREE_OPERAND (base1, 0);
16566 if (base0 == NULL_TREE || base1 == NULL_TREE)
16567 return 2;
16569 int equal = 2;
16570 /* Punt in GENERIC on variables with value expressions;
16571 the value expressions might point to fields/elements
16572 of other vars etc. */
16573 if (generic
16574 && ((VAR_P (base0) && DECL_HAS_VALUE_EXPR_P (base0))
16575 || (VAR_P (base1) && DECL_HAS_VALUE_EXPR_P (base1))))
16576 return 2;
16577 else if (decl_in_symtab_p (base0) && decl_in_symtab_p (base1))
16579 symtab_node *node0 = symtab_node::get_create (base0);
16580 symtab_node *node1 = symtab_node::get_create (base1);
16581 equal = node0->equal_address_to (node1);
16583 else if ((DECL_P (base0)
16584 || TREE_CODE (base0) == SSA_NAME
16585 || TREE_CODE (base0) == STRING_CST)
16586 && (DECL_P (base1)
16587 || TREE_CODE (base1) == SSA_NAME
16588 || TREE_CODE (base1) == STRING_CST))
16589 equal = (base0 == base1);
16590 if (equal == 1)
16592 if (code == EQ_EXPR
16593 || code == NE_EXPR
16594 /* If the offsets are equal we can ignore overflow. */
16595 || known_eq (off0, off1)
16596 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
16597 /* Or if we compare using pointers to decls or strings. */
16598 || (POINTER_TYPE_P (type)
16599 && (DECL_P (base0) || TREE_CODE (base0) == STRING_CST)))
16600 return 1;
16601 return 2;
16603 if (equal != 0)
16604 return equal;
16605 if (code != EQ_EXPR && code != NE_EXPR)
16606 return 2;
16608 HOST_WIDE_INT ioff0 = -1, ioff1 = -1;
16609 off0.is_constant (&ioff0);
16610 off1.is_constant (&ioff1);
16611 if ((DECL_P (base0) && TREE_CODE (base1) == STRING_CST)
16612 || (TREE_CODE (base0) == STRING_CST && DECL_P (base1))
16613 || (TREE_CODE (base0) == STRING_CST
16614 && TREE_CODE (base1) == STRING_CST
16615 && ioff0 >= 0 && ioff1 >= 0
16616 && ioff0 < TREE_STRING_LENGTH (base0)
16617 && ioff1 < TREE_STRING_LENGTH (base1)
16618 /* This is a too conservative test that the STRING_CSTs
16619 will not end up being string-merged. */
16620 && strncmp (TREE_STRING_POINTER (base0) + ioff0,
16621 TREE_STRING_POINTER (base1) + ioff1,
16622 MIN (TREE_STRING_LENGTH (base0) - ioff0,
16623 TREE_STRING_LENGTH (base1) - ioff1)) != 0))
16625 else if (!DECL_P (base0) || !DECL_P (base1))
16626 return 2;
16627 /* If this is a pointer comparison, ignore for now even
16628 valid equalities where one pointer is the offset zero
16629 of one object and the other to one past end of another one. */
16630 else if (!INTEGRAL_TYPE_P (type))
16632 /* Assume that automatic variables can't be adjacent to global
16633 variables. */
16634 else if (is_global_var (base0) != is_global_var (base1))
16636 else
16638 tree sz0 = DECL_SIZE_UNIT (base0);
16639 tree sz1 = DECL_SIZE_UNIT (base1);
16640 /* If sizes are unknown, e.g. VLA or not representable, punt. */
16641 if (!tree_fits_poly_int64_p (sz0) || !tree_fits_poly_int64_p (sz1))
16642 return 2;
16644 poly_int64 size0 = tree_to_poly_int64 (sz0);
16645 poly_int64 size1 = tree_to_poly_int64 (sz1);
16646 /* If one offset is pointing (or could be) to the beginning of one
16647 object and the other is pointing to one past the last byte of the
16648 other object, punt. */
16649 if (maybe_eq (off0, 0) && maybe_eq (off1, size1))
16650 equal = 2;
16651 else if (maybe_eq (off1, 0) && maybe_eq (off0, size0))
16652 equal = 2;
16653 /* If both offsets are the same, there are some cases we know that are
16654 ok. Either if we know they aren't zero, or if we know both sizes
16655 are no zero. */
16656 if (equal == 2
16657 && known_eq (off0, off1)
16658 && (known_ne (off0, 0)
16659 || (known_ne (size0, 0) && known_ne (size1, 0))))
16660 equal = 0;
16662 return equal;
16665 #if CHECKING_P
16667 namespace selftest {
16669 /* Helper functions for writing tests of folding trees. */
16671 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
16673 static void
16674 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
16675 tree constant)
16677 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
16680 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
16681 wrapping WRAPPED_EXPR. */
16683 static void
16684 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
16685 tree wrapped_expr)
16687 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
16688 ASSERT_NE (wrapped_expr, result);
16689 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
16690 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
16693 /* Verify that various arithmetic binary operations are folded
16694 correctly. */
16696 static void
16697 test_arithmetic_folding ()
16699 tree type = integer_type_node;
16700 tree x = create_tmp_var_raw (type, "x");
16701 tree zero = build_zero_cst (type);
16702 tree one = build_int_cst (type, 1);
16704 /* Addition. */
16705 /* 1 <-- (0 + 1) */
16706 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
16707 one);
16708 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
16709 one);
16711 /* (nonlvalue)x <-- (x + 0) */
16712 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
16715 /* Subtraction. */
16716 /* 0 <-- (x - x) */
16717 assert_binop_folds_to_const (x, MINUS_EXPR, x,
16718 zero);
16719 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
16722 /* Multiplication. */
16723 /* 0 <-- (x * 0) */
16724 assert_binop_folds_to_const (x, MULT_EXPR, zero,
16725 zero);
16727 /* (nonlvalue)x <-- (x * 1) */
16728 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
16732 /* Verify that various binary operations on vectors are folded
16733 correctly. */
16735 static void
16736 test_vector_folding ()
16738 tree inner_type = integer_type_node;
16739 tree type = build_vector_type (inner_type, 4);
16740 tree zero = build_zero_cst (type);
16741 tree one = build_one_cst (type);
16742 tree index = build_index_vector (type, 0, 1);
16744 /* Verify equality tests that return a scalar boolean result. */
16745 tree res_type = boolean_type_node;
16746 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
16747 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
16748 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
16749 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
16750 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
16751 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16752 index, one)));
16753 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
16754 index, index)));
16755 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16756 index, index)));
16759 /* Verify folding of VEC_DUPLICATE_EXPRs. */
16761 static void
16762 test_vec_duplicate_folding ()
16764 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
16765 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
16766 /* This will be 1 if VEC_MODE isn't a vector mode. */
16767 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
16769 tree type = build_vector_type (ssizetype, nunits);
16770 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
16771 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
16772 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
16775 /* Run all of the selftests within this file. */
16777 void
16778 fold_const_c_tests ()
16780 test_arithmetic_folding ();
16781 test_vector_folding ();
16782 test_vec_duplicate_folding ();
16785 } // namespace selftest
16787 #endif /* CHECKING_P */