Fix warnings building linux-atomic.c and fptr.c on hppa64-linux
[official-gcc.git] / gcc / fold-const.c
blobff23f12f33c9e06071eca2b7664d82ef44210dd7
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-vrp.h"
79 #include "tree-ssanames.h"
80 #include "selftest.h"
81 #include "stringpool.h"
82 #include "attribs.h"
83 #include "tree-vector-builder.h"
84 #include "vec-perm-indices.h"
85 #include "asan.h"
86 #include "gimple-range.h"
88 /* Nonzero if we are folding constants inside an initializer; zero
89 otherwise. */
90 int folding_initializer = 0;
92 /* The following constants represent a bit based encoding of GCC's
93 comparison operators. This encoding simplifies transformations
94 on relational comparison operators, such as AND and OR. */
95 enum comparison_code {
96 COMPCODE_FALSE = 0,
97 COMPCODE_LT = 1,
98 COMPCODE_EQ = 2,
99 COMPCODE_LE = 3,
100 COMPCODE_GT = 4,
101 COMPCODE_LTGT = 5,
102 COMPCODE_GE = 6,
103 COMPCODE_ORD = 7,
104 COMPCODE_UNORD = 8,
105 COMPCODE_UNLT = 9,
106 COMPCODE_UNEQ = 10,
107 COMPCODE_UNLE = 11,
108 COMPCODE_UNGT = 12,
109 COMPCODE_NE = 13,
110 COMPCODE_UNGE = 14,
111 COMPCODE_TRUE = 15
114 static bool negate_expr_p (tree);
115 static tree negate_expr (tree);
116 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
117 static enum comparison_code comparison_to_compcode (enum tree_code);
118 static enum tree_code compcode_to_comparison (enum comparison_code);
119 static bool twoval_comparison_p (tree, tree *, tree *);
120 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
121 static tree optimize_bit_field_compare (location_t, enum tree_code,
122 tree, tree, tree);
123 static bool simple_operand_p (const_tree);
124 static bool simple_operand_p_2 (tree);
125 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
126 static tree range_predecessor (tree);
127 static tree range_successor (tree);
128 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
129 static tree fold_cond_expr_with_comparison (location_t, tree, enum tree_code,
130 tree, tree, tree, tree);
131 static tree unextend (tree, int, int, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static tree fold_binary_op_with_conditional_arg (location_t,
135 enum tree_code, tree,
136 tree, tree,
137 tree, tree, int);
138 static tree fold_negate_const (tree, tree);
139 static tree fold_not_const (const_tree, tree);
140 static tree fold_relational_const (enum tree_code, tree, tree, tree);
141 static tree fold_convert_const (enum tree_code, tree, tree);
142 static tree fold_view_convert_expr (tree, tree);
143 static tree fold_negate_expr (location_t, tree);
146 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
147 Otherwise, return LOC. */
149 static location_t
150 expr_location_or (tree t, location_t loc)
152 location_t tloc = EXPR_LOCATION (t);
153 return tloc == UNKNOWN_LOCATION ? loc : tloc;
156 /* Similar to protected_set_expr_location, but never modify x in place,
157 if location can and needs to be set, unshare it. */
159 static inline tree
160 protected_set_expr_location_unshare (tree x, location_t loc)
162 if (CAN_HAVE_LOCATION_P (x)
163 && EXPR_LOCATION (x) != loc
164 && !(TREE_CODE (x) == SAVE_EXPR
165 || TREE_CODE (x) == TARGET_EXPR
166 || TREE_CODE (x) == BIND_EXPR))
168 x = copy_node (x);
169 SET_EXPR_LOCATION (x, loc);
171 return x;
174 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
175 division and returns the quotient. Otherwise returns
176 NULL_TREE. */
178 tree
179 div_if_zero_remainder (const_tree arg1, const_tree arg2)
181 widest_int quo;
183 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
184 SIGNED, &quo))
185 return wide_int_to_tree (TREE_TYPE (arg1), quo);
187 return NULL_TREE;
190 /* This is nonzero if we should defer warnings about undefined
191 overflow. This facility exists because these warnings are a
192 special case. The code to estimate loop iterations does not want
193 to issue any warnings, since it works with expressions which do not
194 occur in user code. Various bits of cleanup code call fold(), but
195 only use the result if it has certain characteristics (e.g., is a
196 constant); that code only wants to issue a warning if the result is
197 used. */
199 static int fold_deferring_overflow_warnings;
201 /* If a warning about undefined overflow is deferred, this is the
202 warning. Note that this may cause us to turn two warnings into
203 one, but that is fine since it is sufficient to only give one
204 warning per expression. */
206 static const char* fold_deferred_overflow_warning;
208 /* If a warning about undefined overflow is deferred, this is the
209 level at which the warning should be emitted. */
211 static enum warn_strict_overflow_code fold_deferred_overflow_code;
213 /* Start deferring overflow warnings. We could use a stack here to
214 permit nested calls, but at present it is not necessary. */
216 void
217 fold_defer_overflow_warnings (void)
219 ++fold_deferring_overflow_warnings;
222 /* Stop deferring overflow warnings. If there is a pending warning,
223 and ISSUE is true, then issue the warning if appropriate. STMT is
224 the statement with which the warning should be associated (used for
225 location information); STMT may be NULL. CODE is the level of the
226 warning--a warn_strict_overflow_code value. This function will use
227 the smaller of CODE and the deferred code when deciding whether to
228 issue the warning. CODE may be zero to mean to always use the
229 deferred code. */
231 void
232 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
234 const char *warnmsg;
235 location_t locus;
237 gcc_assert (fold_deferring_overflow_warnings > 0);
238 --fold_deferring_overflow_warnings;
239 if (fold_deferring_overflow_warnings > 0)
241 if (fold_deferred_overflow_warning != NULL
242 && code != 0
243 && code < (int) fold_deferred_overflow_code)
244 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
245 return;
248 warnmsg = fold_deferred_overflow_warning;
249 fold_deferred_overflow_warning = NULL;
251 if (!issue || warnmsg == NULL)
252 return;
254 if (warning_suppressed_p (stmt, OPT_Wstrict_overflow))
255 return;
257 /* Use the smallest code level when deciding to issue the
258 warning. */
259 if (code == 0 || code > (int) fold_deferred_overflow_code)
260 code = fold_deferred_overflow_code;
262 if (!issue_strict_overflow_warning (code))
263 return;
265 if (stmt == NULL)
266 locus = input_location;
267 else
268 locus = gimple_location (stmt);
269 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
272 /* Stop deferring overflow warnings, ignoring any deferred
273 warnings. */
275 void
276 fold_undefer_and_ignore_overflow_warnings (void)
278 fold_undefer_overflow_warnings (false, NULL, 0);
281 /* Whether we are deferring overflow warnings. */
283 bool
284 fold_deferring_overflow_warnings_p (void)
286 return fold_deferring_overflow_warnings > 0;
289 /* This is called when we fold something based on the fact that signed
290 overflow is undefined. */
292 void
293 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
295 if (fold_deferring_overflow_warnings > 0)
297 if (fold_deferred_overflow_warning == NULL
298 || wc < fold_deferred_overflow_code)
300 fold_deferred_overflow_warning = gmsgid;
301 fold_deferred_overflow_code = wc;
304 else if (issue_strict_overflow_warning (wc))
305 warning (OPT_Wstrict_overflow, gmsgid);
308 /* Return true if the built-in mathematical function specified by CODE
309 is odd, i.e. -f(x) == f(-x). */
311 bool
312 negate_mathfn_p (combined_fn fn)
314 switch (fn)
316 CASE_CFN_ASIN:
317 CASE_CFN_ASINH:
318 CASE_CFN_ATAN:
319 CASE_CFN_ATANH:
320 CASE_CFN_CASIN:
321 CASE_CFN_CASINH:
322 CASE_CFN_CATAN:
323 CASE_CFN_CATANH:
324 CASE_CFN_CBRT:
325 CASE_CFN_CPROJ:
326 CASE_CFN_CSIN:
327 CASE_CFN_CSINH:
328 CASE_CFN_CTAN:
329 CASE_CFN_CTANH:
330 CASE_CFN_ERF:
331 CASE_CFN_LLROUND:
332 CASE_CFN_LROUND:
333 CASE_CFN_ROUND:
334 CASE_CFN_ROUNDEVEN:
335 CASE_CFN_ROUNDEVEN_FN:
336 CASE_CFN_SIN:
337 CASE_CFN_SINH:
338 CASE_CFN_TAN:
339 CASE_CFN_TANH:
340 CASE_CFN_TRUNC:
341 return true;
343 CASE_CFN_LLRINT:
344 CASE_CFN_LRINT:
345 CASE_CFN_NEARBYINT:
346 CASE_CFN_RINT:
347 return !flag_rounding_math;
349 default:
350 break;
352 return false;
355 /* Check whether we may negate an integer constant T without causing
356 overflow. */
358 bool
359 may_negate_without_overflow_p (const_tree t)
361 tree type;
363 gcc_assert (TREE_CODE (t) == INTEGER_CST);
365 type = TREE_TYPE (t);
366 if (TYPE_UNSIGNED (type))
367 return false;
369 return !wi::only_sign_bit_p (wi::to_wide (t));
372 /* Determine whether an expression T can be cheaply negated using
373 the function negate_expr without introducing undefined overflow. */
375 static bool
376 negate_expr_p (tree t)
378 tree type;
380 if (t == 0)
381 return false;
383 type = TREE_TYPE (t);
385 STRIP_SIGN_NOPS (t);
386 switch (TREE_CODE (t))
388 case INTEGER_CST:
389 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
390 return true;
392 /* Check that -CST will not overflow type. */
393 return may_negate_without_overflow_p (t);
394 case BIT_NOT_EXPR:
395 return (INTEGRAL_TYPE_P (type)
396 && TYPE_OVERFLOW_WRAPS (type));
398 case FIXED_CST:
399 return true;
401 case NEGATE_EXPR:
402 return !TYPE_OVERFLOW_SANITIZED (type);
404 case REAL_CST:
405 /* We want to canonicalize to positive real constants. Pretend
406 that only negative ones can be easily negated. */
407 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
409 case COMPLEX_CST:
410 return negate_expr_p (TREE_REALPART (t))
411 && negate_expr_p (TREE_IMAGPART (t));
413 case VECTOR_CST:
415 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
416 return true;
418 /* Steps don't prevent negation. */
419 unsigned int count = vector_cst_encoded_nelts (t);
420 for (unsigned int i = 0; i < count; ++i)
421 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
422 return false;
424 return true;
427 case COMPLEX_EXPR:
428 return negate_expr_p (TREE_OPERAND (t, 0))
429 && negate_expr_p (TREE_OPERAND (t, 1));
431 case CONJ_EXPR:
432 return negate_expr_p (TREE_OPERAND (t, 0));
434 case PLUS_EXPR:
435 if (HONOR_SIGN_DEPENDENT_ROUNDING (type)
436 || HONOR_SIGNED_ZEROS (type)
437 || (ANY_INTEGRAL_TYPE_P (type)
438 && ! TYPE_OVERFLOW_WRAPS (type)))
439 return false;
440 /* -(A + B) -> (-B) - A. */
441 if (negate_expr_p (TREE_OPERAND (t, 1)))
442 return true;
443 /* -(A + B) -> (-A) - B. */
444 return negate_expr_p (TREE_OPERAND (t, 0));
446 case MINUS_EXPR:
447 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
448 return !HONOR_SIGN_DEPENDENT_ROUNDING (type)
449 && !HONOR_SIGNED_ZEROS (type)
450 && (! ANY_INTEGRAL_TYPE_P (type)
451 || TYPE_OVERFLOW_WRAPS (type));
453 case MULT_EXPR:
454 if (TYPE_UNSIGNED (type))
455 break;
456 /* INT_MIN/n * n doesn't overflow while negating one operand it does
457 if n is a (negative) power of two. */
458 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
459 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
460 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
461 && (wi::popcount
462 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
463 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
464 && (wi::popcount
465 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
466 break;
468 /* Fall through. */
470 case RDIV_EXPR:
471 if (! HONOR_SIGN_DEPENDENT_ROUNDING (t))
472 return negate_expr_p (TREE_OPERAND (t, 1))
473 || negate_expr_p (TREE_OPERAND (t, 0));
474 break;
476 case TRUNC_DIV_EXPR:
477 case ROUND_DIV_EXPR:
478 case EXACT_DIV_EXPR:
479 if (TYPE_UNSIGNED (type))
480 break;
481 /* In general we can't negate A in A / B, because if A is INT_MIN and
482 B is not 1 we change the sign of the result. */
483 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
484 && negate_expr_p (TREE_OPERAND (t, 0)))
485 return true;
486 /* In general we can't negate B in A / B, because if A is INT_MIN and
487 B is 1, we may turn this into INT_MIN / -1 which is undefined
488 and actually traps on some architectures. */
489 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
490 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
491 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
492 && ! integer_onep (TREE_OPERAND (t, 1))))
493 return negate_expr_p (TREE_OPERAND (t, 1));
494 break;
496 case NOP_EXPR:
497 /* Negate -((double)float) as (double)(-float). */
498 if (TREE_CODE (type) == REAL_TYPE)
500 tree tem = strip_float_extensions (t);
501 if (tem != t)
502 return negate_expr_p (tem);
504 break;
506 case CALL_EXPR:
507 /* Negate -f(x) as f(-x). */
508 if (negate_mathfn_p (get_call_combined_fn (t)))
509 return negate_expr_p (CALL_EXPR_ARG (t, 0));
510 break;
512 case RSHIFT_EXPR:
513 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
514 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
516 tree op1 = TREE_OPERAND (t, 1);
517 if (wi::to_wide (op1) == element_precision (type) - 1)
518 return true;
520 break;
522 default:
523 break;
525 return false;
528 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
529 simplification is possible.
530 If negate_expr_p would return true for T, NULL_TREE will never be
531 returned. */
533 static tree
534 fold_negate_expr_1 (location_t loc, tree t)
536 tree type = TREE_TYPE (t);
537 tree tem;
539 switch (TREE_CODE (t))
541 /* Convert - (~A) to A + 1. */
542 case BIT_NOT_EXPR:
543 if (INTEGRAL_TYPE_P (type))
544 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
545 build_one_cst (type));
546 break;
548 case INTEGER_CST:
549 tem = fold_negate_const (t, type);
550 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
551 || (ANY_INTEGRAL_TYPE_P (type)
552 && !TYPE_OVERFLOW_TRAPS (type)
553 && TYPE_OVERFLOW_WRAPS (type))
554 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
555 return tem;
556 break;
558 case POLY_INT_CST:
559 case REAL_CST:
560 case FIXED_CST:
561 tem = fold_negate_const (t, type);
562 return tem;
564 case COMPLEX_CST:
566 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
567 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
568 if (rpart && ipart)
569 return build_complex (type, rpart, ipart);
571 break;
573 case VECTOR_CST:
575 tree_vector_builder elts;
576 elts.new_unary_operation (type, t, true);
577 unsigned int count = elts.encoded_nelts ();
578 for (unsigned int i = 0; i < count; ++i)
580 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
581 if (elt == NULL_TREE)
582 return NULL_TREE;
583 elts.quick_push (elt);
586 return elts.build ();
589 case COMPLEX_EXPR:
590 if (negate_expr_p (t))
591 return fold_build2_loc (loc, COMPLEX_EXPR, type,
592 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
593 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
594 break;
596 case CONJ_EXPR:
597 if (negate_expr_p (t))
598 return fold_build1_loc (loc, CONJ_EXPR, type,
599 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
600 break;
602 case NEGATE_EXPR:
603 if (!TYPE_OVERFLOW_SANITIZED (type))
604 return TREE_OPERAND (t, 0);
605 break;
607 case PLUS_EXPR:
608 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
609 && !HONOR_SIGNED_ZEROS (type))
611 /* -(A + B) -> (-B) - A. */
612 if (negate_expr_p (TREE_OPERAND (t, 1)))
614 tem = negate_expr (TREE_OPERAND (t, 1));
615 return fold_build2_loc (loc, MINUS_EXPR, type,
616 tem, TREE_OPERAND (t, 0));
619 /* -(A + B) -> (-A) - B. */
620 if (negate_expr_p (TREE_OPERAND (t, 0)))
622 tem = negate_expr (TREE_OPERAND (t, 0));
623 return fold_build2_loc (loc, MINUS_EXPR, type,
624 tem, TREE_OPERAND (t, 1));
627 break;
629 case MINUS_EXPR:
630 /* - (A - B) -> B - A */
631 if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
632 && !HONOR_SIGNED_ZEROS (type))
633 return fold_build2_loc (loc, MINUS_EXPR, type,
634 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
635 break;
637 case MULT_EXPR:
638 if (TYPE_UNSIGNED (type))
639 break;
641 /* Fall through. */
643 case RDIV_EXPR:
644 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
646 tem = TREE_OPERAND (t, 1);
647 if (negate_expr_p (tem))
648 return fold_build2_loc (loc, TREE_CODE (t), type,
649 TREE_OPERAND (t, 0), negate_expr (tem));
650 tem = TREE_OPERAND (t, 0);
651 if (negate_expr_p (tem))
652 return fold_build2_loc (loc, TREE_CODE (t), type,
653 negate_expr (tem), TREE_OPERAND (t, 1));
655 break;
657 case TRUNC_DIV_EXPR:
658 case ROUND_DIV_EXPR:
659 case EXACT_DIV_EXPR:
660 if (TYPE_UNSIGNED (type))
661 break;
662 /* In general we can't negate A in A / B, because if A is INT_MIN and
663 B is not 1 we change the sign of the result. */
664 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
665 && negate_expr_p (TREE_OPERAND (t, 0)))
666 return fold_build2_loc (loc, TREE_CODE (t), type,
667 negate_expr (TREE_OPERAND (t, 0)),
668 TREE_OPERAND (t, 1));
669 /* In general we can't negate B in A / B, because if A is INT_MIN and
670 B is 1, we may turn this into INT_MIN / -1 which is undefined
671 and actually traps on some architectures. */
672 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
673 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
674 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
675 && ! integer_onep (TREE_OPERAND (t, 1))))
676 && negate_expr_p (TREE_OPERAND (t, 1)))
677 return fold_build2_loc (loc, TREE_CODE (t), type,
678 TREE_OPERAND (t, 0),
679 negate_expr (TREE_OPERAND (t, 1)));
680 break;
682 case NOP_EXPR:
683 /* Convert -((double)float) into (double)(-float). */
684 if (TREE_CODE (type) == REAL_TYPE)
686 tem = strip_float_extensions (t);
687 if (tem != t && negate_expr_p (tem))
688 return fold_convert_loc (loc, type, negate_expr (tem));
690 break;
692 case CALL_EXPR:
693 /* Negate -f(x) as f(-x). */
694 if (negate_mathfn_p (get_call_combined_fn (t))
695 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
697 tree fndecl, arg;
699 fndecl = get_callee_fndecl (t);
700 arg = negate_expr (CALL_EXPR_ARG (t, 0));
701 return build_call_expr_loc (loc, fndecl, 1, arg);
703 break;
705 case RSHIFT_EXPR:
706 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
707 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
709 tree op1 = TREE_OPERAND (t, 1);
710 if (wi::to_wide (op1) == element_precision (type) - 1)
712 tree ntype = TYPE_UNSIGNED (type)
713 ? signed_type_for (type)
714 : unsigned_type_for (type);
715 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
716 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
717 return fold_convert_loc (loc, type, temp);
720 break;
722 default:
723 break;
726 return NULL_TREE;
729 /* A wrapper for fold_negate_expr_1. */
731 static tree
732 fold_negate_expr (location_t loc, tree t)
734 tree type = TREE_TYPE (t);
735 STRIP_SIGN_NOPS (t);
736 tree tem = fold_negate_expr_1 (loc, t);
737 if (tem == NULL_TREE)
738 return NULL_TREE;
739 return fold_convert_loc (loc, type, tem);
742 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
743 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
744 return NULL_TREE. */
746 static tree
747 negate_expr (tree t)
749 tree type, tem;
750 location_t loc;
752 if (t == NULL_TREE)
753 return NULL_TREE;
755 loc = EXPR_LOCATION (t);
756 type = TREE_TYPE (t);
757 STRIP_SIGN_NOPS (t);
759 tem = fold_negate_expr (loc, t);
760 if (!tem)
761 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
762 return fold_convert_loc (loc, type, tem);
765 /* Split a tree IN into a constant, literal and variable parts that could be
766 combined with CODE to make IN. "constant" means an expression with
767 TREE_CONSTANT but that isn't an actual constant. CODE must be a
768 commutative arithmetic operation. Store the constant part into *CONP,
769 the literal in *LITP and return the variable part. If a part isn't
770 present, set it to null. If the tree does not decompose in this way,
771 return the entire tree as the variable part and the other parts as null.
773 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
774 case, we negate an operand that was subtracted. Except if it is a
775 literal for which we use *MINUS_LITP instead.
777 If NEGATE_P is true, we are negating all of IN, again except a literal
778 for which we use *MINUS_LITP instead. If a variable part is of pointer
779 type, it is negated after converting to TYPE. This prevents us from
780 generating illegal MINUS pointer expression. LOC is the location of
781 the converted variable part.
783 If IN is itself a literal or constant, return it as appropriate.
785 Note that we do not guarantee that any of the three values will be the
786 same type as IN, but they will have the same signedness and mode. */
788 static tree
789 split_tree (tree in, tree type, enum tree_code code,
790 tree *minus_varp, tree *conp, tree *minus_conp,
791 tree *litp, tree *minus_litp, int negate_p)
793 tree var = 0;
794 *minus_varp = 0;
795 *conp = 0;
796 *minus_conp = 0;
797 *litp = 0;
798 *minus_litp = 0;
800 /* Strip any conversions that don't change the machine mode or signedness. */
801 STRIP_SIGN_NOPS (in);
803 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
804 || TREE_CODE (in) == FIXED_CST)
805 *litp = in;
806 else if (TREE_CODE (in) == code
807 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
808 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
809 /* We can associate addition and subtraction together (even
810 though the C standard doesn't say so) for integers because
811 the value is not affected. For reals, the value might be
812 affected, so we can't. */
813 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
814 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
815 || (code == MINUS_EXPR
816 && (TREE_CODE (in) == PLUS_EXPR
817 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
819 tree op0 = TREE_OPERAND (in, 0);
820 tree op1 = TREE_OPERAND (in, 1);
821 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
822 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
824 /* First see if either of the operands is a literal, then a constant. */
825 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
826 || TREE_CODE (op0) == FIXED_CST)
827 *litp = op0, op0 = 0;
828 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
829 || TREE_CODE (op1) == FIXED_CST)
830 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
832 if (op0 != 0 && TREE_CONSTANT (op0))
833 *conp = op0, op0 = 0;
834 else if (op1 != 0 && TREE_CONSTANT (op1))
835 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
837 /* If we haven't dealt with either operand, this is not a case we can
838 decompose. Otherwise, VAR is either of the ones remaining, if any. */
839 if (op0 != 0 && op1 != 0)
840 var = in;
841 else if (op0 != 0)
842 var = op0;
843 else
844 var = op1, neg_var_p = neg1_p;
846 /* Now do any needed negations. */
847 if (neg_litp_p)
848 *minus_litp = *litp, *litp = 0;
849 if (neg_conp_p && *conp)
850 *minus_conp = *conp, *conp = 0;
851 if (neg_var_p && var)
852 *minus_varp = var, var = 0;
854 else if (TREE_CONSTANT (in))
855 *conp = in;
856 else if (TREE_CODE (in) == BIT_NOT_EXPR
857 && code == PLUS_EXPR)
859 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
860 when IN is constant. */
861 *litp = build_minus_one_cst (type);
862 *minus_varp = TREE_OPERAND (in, 0);
864 else
865 var = in;
867 if (negate_p)
869 if (*litp)
870 *minus_litp = *litp, *litp = 0;
871 else if (*minus_litp)
872 *litp = *minus_litp, *minus_litp = 0;
873 if (*conp)
874 *minus_conp = *conp, *conp = 0;
875 else if (*minus_conp)
876 *conp = *minus_conp, *minus_conp = 0;
877 if (var)
878 *minus_varp = var, var = 0;
879 else if (*minus_varp)
880 var = *minus_varp, *minus_varp = 0;
883 if (*litp
884 && TREE_OVERFLOW_P (*litp))
885 *litp = drop_tree_overflow (*litp);
886 if (*minus_litp
887 && TREE_OVERFLOW_P (*minus_litp))
888 *minus_litp = drop_tree_overflow (*minus_litp);
890 return var;
893 /* Re-associate trees split by the above function. T1 and T2 are
894 either expressions to associate or null. Return the new
895 expression, if any. LOC is the location of the new expression. If
896 we build an operation, do it in TYPE and with CODE. */
898 static tree
899 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
901 if (t1 == 0)
903 gcc_assert (t2 == 0 || code != MINUS_EXPR);
904 return t2;
906 else if (t2 == 0)
907 return t1;
909 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
910 try to fold this since we will have infinite recursion. But do
911 deal with any NEGATE_EXPRs. */
912 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
913 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
914 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
916 if (code == PLUS_EXPR)
918 if (TREE_CODE (t1) == NEGATE_EXPR)
919 return build2_loc (loc, MINUS_EXPR, type,
920 fold_convert_loc (loc, type, t2),
921 fold_convert_loc (loc, type,
922 TREE_OPERAND (t1, 0)));
923 else if (TREE_CODE (t2) == NEGATE_EXPR)
924 return build2_loc (loc, MINUS_EXPR, type,
925 fold_convert_loc (loc, type, t1),
926 fold_convert_loc (loc, type,
927 TREE_OPERAND (t2, 0)));
928 else if (integer_zerop (t2))
929 return fold_convert_loc (loc, type, t1);
931 else if (code == MINUS_EXPR)
933 if (integer_zerop (t2))
934 return fold_convert_loc (loc, type, t1);
937 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
938 fold_convert_loc (loc, type, t2));
941 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
942 fold_convert_loc (loc, type, t2));
945 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
946 for use in int_const_binop, size_binop and size_diffop. */
948 static bool
949 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
951 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
952 return false;
953 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
954 return false;
956 switch (code)
958 case LSHIFT_EXPR:
959 case RSHIFT_EXPR:
960 case LROTATE_EXPR:
961 case RROTATE_EXPR:
962 return true;
964 default:
965 break;
968 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
969 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
970 && TYPE_MODE (type1) == TYPE_MODE (type2);
973 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
974 a new constant in RES. Return FALSE if we don't know how to
975 evaluate CODE at compile-time. */
977 bool
978 wide_int_binop (wide_int &res,
979 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
980 signop sign, wi::overflow_type *overflow)
982 wide_int tmp;
983 *overflow = wi::OVF_NONE;
984 switch (code)
986 case BIT_IOR_EXPR:
987 res = wi::bit_or (arg1, arg2);
988 break;
990 case BIT_XOR_EXPR:
991 res = wi::bit_xor (arg1, arg2);
992 break;
994 case BIT_AND_EXPR:
995 res = wi::bit_and (arg1, arg2);
996 break;
998 case LSHIFT_EXPR:
999 if (wi::neg_p (arg2))
1000 return false;
1001 res = wi::lshift (arg1, arg2);
1002 break;
1004 case RSHIFT_EXPR:
1005 if (wi::neg_p (arg2))
1006 return false;
1007 /* It's unclear from the C standard whether shifts can overflow.
1008 The following code ignores overflow; perhaps a C standard
1009 interpretation ruling is needed. */
1010 res = wi::rshift (arg1, arg2, sign);
1011 break;
1013 case RROTATE_EXPR:
1014 case LROTATE_EXPR:
1015 if (wi::neg_p (arg2))
1017 tmp = -arg2;
1018 if (code == RROTATE_EXPR)
1019 code = LROTATE_EXPR;
1020 else
1021 code = RROTATE_EXPR;
1023 else
1024 tmp = arg2;
1026 if (code == RROTATE_EXPR)
1027 res = wi::rrotate (arg1, tmp);
1028 else
1029 res = wi::lrotate (arg1, tmp);
1030 break;
1032 case PLUS_EXPR:
1033 res = wi::add (arg1, arg2, sign, overflow);
1034 break;
1036 case MINUS_EXPR:
1037 res = wi::sub (arg1, arg2, sign, overflow);
1038 break;
1040 case MULT_EXPR:
1041 res = wi::mul (arg1, arg2, sign, overflow);
1042 break;
1044 case MULT_HIGHPART_EXPR:
1045 res = wi::mul_high (arg1, arg2, sign);
1046 break;
1048 case TRUNC_DIV_EXPR:
1049 case EXACT_DIV_EXPR:
1050 if (arg2 == 0)
1051 return false;
1052 res = wi::div_trunc (arg1, arg2, sign, overflow);
1053 break;
1055 case FLOOR_DIV_EXPR:
1056 if (arg2 == 0)
1057 return false;
1058 res = wi::div_floor (arg1, arg2, sign, overflow);
1059 break;
1061 case CEIL_DIV_EXPR:
1062 if (arg2 == 0)
1063 return false;
1064 res = wi::div_ceil (arg1, arg2, sign, overflow);
1065 break;
1067 case ROUND_DIV_EXPR:
1068 if (arg2 == 0)
1069 return false;
1070 res = wi::div_round (arg1, arg2, sign, overflow);
1071 break;
1073 case TRUNC_MOD_EXPR:
1074 if (arg2 == 0)
1075 return false;
1076 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1077 break;
1079 case FLOOR_MOD_EXPR:
1080 if (arg2 == 0)
1081 return false;
1082 res = wi::mod_floor (arg1, arg2, sign, overflow);
1083 break;
1085 case CEIL_MOD_EXPR:
1086 if (arg2 == 0)
1087 return false;
1088 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1089 break;
1091 case ROUND_MOD_EXPR:
1092 if (arg2 == 0)
1093 return false;
1094 res = wi::mod_round (arg1, arg2, sign, overflow);
1095 break;
1097 case MIN_EXPR:
1098 res = wi::min (arg1, arg2, sign);
1099 break;
1101 case MAX_EXPR:
1102 res = wi::max (arg1, arg2, sign);
1103 break;
1105 default:
1106 return false;
1108 return true;
1111 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1112 produce a new constant in RES. Return FALSE if we don't know how
1113 to evaluate CODE at compile-time. */
1115 static bool
1116 poly_int_binop (poly_wide_int &res, enum tree_code code,
1117 const_tree arg1, const_tree arg2,
1118 signop sign, wi::overflow_type *overflow)
1120 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1121 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1122 switch (code)
1124 case PLUS_EXPR:
1125 res = wi::add (wi::to_poly_wide (arg1),
1126 wi::to_poly_wide (arg2), sign, overflow);
1127 break;
1129 case MINUS_EXPR:
1130 res = wi::sub (wi::to_poly_wide (arg1),
1131 wi::to_poly_wide (arg2), sign, overflow);
1132 break;
1134 case MULT_EXPR:
1135 if (TREE_CODE (arg2) == INTEGER_CST)
1136 res = wi::mul (wi::to_poly_wide (arg1),
1137 wi::to_wide (arg2), sign, overflow);
1138 else if (TREE_CODE (arg1) == INTEGER_CST)
1139 res = wi::mul (wi::to_poly_wide (arg2),
1140 wi::to_wide (arg1), sign, overflow);
1141 else
1142 return NULL_TREE;
1143 break;
1145 case LSHIFT_EXPR:
1146 if (TREE_CODE (arg2) == INTEGER_CST)
1147 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1148 else
1149 return false;
1150 break;
1152 case BIT_IOR_EXPR:
1153 if (TREE_CODE (arg2) != INTEGER_CST
1154 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1155 &res))
1156 return false;
1157 break;
1159 default:
1160 return false;
1162 return true;
1165 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1166 produce a new constant. Return NULL_TREE if we don't know how to
1167 evaluate CODE at compile-time. */
1169 tree
1170 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1171 int overflowable)
1173 poly_wide_int poly_res;
1174 tree type = TREE_TYPE (arg1);
1175 signop sign = TYPE_SIGN (type);
1176 wi::overflow_type overflow = wi::OVF_NONE;
1178 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1180 wide_int warg1 = wi::to_wide (arg1), res;
1181 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1182 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1183 return NULL_TREE;
1184 poly_res = res;
1186 else if (!poly_int_tree_p (arg1)
1187 || !poly_int_tree_p (arg2)
1188 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1189 return NULL_TREE;
1190 return force_fit_type (type, poly_res, overflowable,
1191 (((sign == SIGNED || overflowable == -1)
1192 && overflow)
1193 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1196 /* Return true if binary operation OP distributes over addition in operand
1197 OPNO, with the other operand being held constant. OPNO counts from 1. */
1199 static bool
1200 distributes_over_addition_p (tree_code op, int opno)
1202 switch (op)
1204 case PLUS_EXPR:
1205 case MINUS_EXPR:
1206 case MULT_EXPR:
1207 return true;
1209 case LSHIFT_EXPR:
1210 return opno == 1;
1212 default:
1213 return false;
1217 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1218 constant. We assume ARG1 and ARG2 have the same data type, or at least
1219 are the same kind of constant and the same machine mode. Return zero if
1220 combining the constants is not allowed in the current operating mode. */
1222 static tree
1223 const_binop (enum tree_code code, tree arg1, tree arg2)
1225 /* Sanity check for the recursive cases. */
1226 if (!arg1 || !arg2)
1227 return NULL_TREE;
1229 STRIP_NOPS (arg1);
1230 STRIP_NOPS (arg2);
1232 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1234 if (code == POINTER_PLUS_EXPR)
1235 return int_const_binop (PLUS_EXPR,
1236 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1238 return int_const_binop (code, arg1, arg2);
1241 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1243 machine_mode mode;
1244 REAL_VALUE_TYPE d1;
1245 REAL_VALUE_TYPE d2;
1246 REAL_VALUE_TYPE value;
1247 REAL_VALUE_TYPE result;
1248 bool inexact;
1249 tree t, type;
1251 /* The following codes are handled by real_arithmetic. */
1252 switch (code)
1254 case PLUS_EXPR:
1255 case MINUS_EXPR:
1256 case MULT_EXPR:
1257 case RDIV_EXPR:
1258 case MIN_EXPR:
1259 case MAX_EXPR:
1260 break;
1262 default:
1263 return NULL_TREE;
1266 d1 = TREE_REAL_CST (arg1);
1267 d2 = TREE_REAL_CST (arg2);
1269 type = TREE_TYPE (arg1);
1270 mode = TYPE_MODE (type);
1272 /* Don't perform operation if we honor signaling NaNs and
1273 either operand is a signaling NaN. */
1274 if (HONOR_SNANS (mode)
1275 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1276 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1277 return NULL_TREE;
1279 /* Don't perform operation if it would raise a division
1280 by zero exception. */
1281 if (code == RDIV_EXPR
1282 && real_equal (&d2, &dconst0)
1283 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1284 return NULL_TREE;
1286 /* If either operand is a NaN, just return it. Otherwise, set up
1287 for floating-point trap; we return an overflow. */
1288 if (REAL_VALUE_ISNAN (d1))
1290 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1291 is off. */
1292 d1.signalling = 0;
1293 t = build_real (type, d1);
1294 return t;
1296 else if (REAL_VALUE_ISNAN (d2))
1298 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1299 is off. */
1300 d2.signalling = 0;
1301 t = build_real (type, d2);
1302 return t;
1305 inexact = real_arithmetic (&value, code, &d1, &d2);
1306 real_convert (&result, mode, &value);
1308 /* Don't constant fold this floating point operation if
1309 the result has overflowed and flag_trapping_math. */
1310 if (flag_trapping_math
1311 && MODE_HAS_INFINITIES (mode)
1312 && REAL_VALUE_ISINF (result)
1313 && !REAL_VALUE_ISINF (d1)
1314 && !REAL_VALUE_ISINF (d2))
1315 return NULL_TREE;
1317 /* Don't constant fold this floating point operation if the
1318 result may dependent upon the run-time rounding mode and
1319 flag_rounding_math is set, or if GCC's software emulation
1320 is unable to accurately represent the result. */
1321 if ((flag_rounding_math
1322 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1323 && (inexact || !real_identical (&result, &value)))
1324 return NULL_TREE;
1326 t = build_real (type, result);
1328 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1329 return t;
1332 if (TREE_CODE (arg1) == FIXED_CST)
1334 FIXED_VALUE_TYPE f1;
1335 FIXED_VALUE_TYPE f2;
1336 FIXED_VALUE_TYPE result;
1337 tree t, type;
1338 int sat_p;
1339 bool overflow_p;
1341 /* The following codes are handled by fixed_arithmetic. */
1342 switch (code)
1344 case PLUS_EXPR:
1345 case MINUS_EXPR:
1346 case MULT_EXPR:
1347 case TRUNC_DIV_EXPR:
1348 if (TREE_CODE (arg2) != FIXED_CST)
1349 return NULL_TREE;
1350 f2 = TREE_FIXED_CST (arg2);
1351 break;
1353 case LSHIFT_EXPR:
1354 case RSHIFT_EXPR:
1356 if (TREE_CODE (arg2) != INTEGER_CST)
1357 return NULL_TREE;
1358 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1359 f2.data.high = w2.elt (1);
1360 f2.data.low = w2.ulow ();
1361 f2.mode = SImode;
1363 break;
1365 default:
1366 return NULL_TREE;
1369 f1 = TREE_FIXED_CST (arg1);
1370 type = TREE_TYPE (arg1);
1371 sat_p = TYPE_SATURATING (type);
1372 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1373 t = build_fixed (type, result);
1374 /* Propagate overflow flags. */
1375 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1376 TREE_OVERFLOW (t) = 1;
1377 return t;
1380 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1382 tree type = TREE_TYPE (arg1);
1383 tree r1 = TREE_REALPART (arg1);
1384 tree i1 = TREE_IMAGPART (arg1);
1385 tree r2 = TREE_REALPART (arg2);
1386 tree i2 = TREE_IMAGPART (arg2);
1387 tree real, imag;
1389 switch (code)
1391 case PLUS_EXPR:
1392 case MINUS_EXPR:
1393 real = const_binop (code, r1, r2);
1394 imag = const_binop (code, i1, i2);
1395 break;
1397 case MULT_EXPR:
1398 if (COMPLEX_FLOAT_TYPE_P (type))
1399 return do_mpc_arg2 (arg1, arg2, type,
1400 /* do_nonfinite= */ folding_initializer,
1401 mpc_mul);
1403 real = const_binop (MINUS_EXPR,
1404 const_binop (MULT_EXPR, r1, r2),
1405 const_binop (MULT_EXPR, i1, i2));
1406 imag = const_binop (PLUS_EXPR,
1407 const_binop (MULT_EXPR, r1, i2),
1408 const_binop (MULT_EXPR, i1, r2));
1409 break;
1411 case RDIV_EXPR:
1412 if (COMPLEX_FLOAT_TYPE_P (type))
1413 return do_mpc_arg2 (arg1, arg2, type,
1414 /* do_nonfinite= */ folding_initializer,
1415 mpc_div);
1416 /* Fallthru. */
1417 case TRUNC_DIV_EXPR:
1418 case CEIL_DIV_EXPR:
1419 case FLOOR_DIV_EXPR:
1420 case ROUND_DIV_EXPR:
1421 if (flag_complex_method == 0)
1423 /* Keep this algorithm in sync with
1424 tree-complex.c:expand_complex_div_straight().
1426 Expand complex division to scalars, straightforward algorithm.
1427 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1428 t = br*br + bi*bi
1430 tree magsquared
1431 = const_binop (PLUS_EXPR,
1432 const_binop (MULT_EXPR, r2, r2),
1433 const_binop (MULT_EXPR, i2, i2));
1434 tree t1
1435 = const_binop (PLUS_EXPR,
1436 const_binop (MULT_EXPR, r1, r2),
1437 const_binop (MULT_EXPR, i1, i2));
1438 tree t2
1439 = const_binop (MINUS_EXPR,
1440 const_binop (MULT_EXPR, i1, r2),
1441 const_binop (MULT_EXPR, r1, i2));
1443 real = const_binop (code, t1, magsquared);
1444 imag = const_binop (code, t2, magsquared);
1446 else
1448 /* Keep this algorithm in sync with
1449 tree-complex.c:expand_complex_div_wide().
1451 Expand complex division to scalars, modified algorithm to minimize
1452 overflow with wide input ranges. */
1453 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1454 fold_abs_const (r2, TREE_TYPE (type)),
1455 fold_abs_const (i2, TREE_TYPE (type)));
1457 if (integer_nonzerop (compare))
1459 /* In the TRUE branch, we compute
1460 ratio = br/bi;
1461 div = (br * ratio) + bi;
1462 tr = (ar * ratio) + ai;
1463 ti = (ai * ratio) - ar;
1464 tr = tr / div;
1465 ti = ti / div; */
1466 tree ratio = const_binop (code, r2, i2);
1467 tree div = const_binop (PLUS_EXPR, i2,
1468 const_binop (MULT_EXPR, r2, ratio));
1469 real = const_binop (MULT_EXPR, r1, ratio);
1470 real = const_binop (PLUS_EXPR, real, i1);
1471 real = const_binop (code, real, div);
1473 imag = const_binop (MULT_EXPR, i1, ratio);
1474 imag = const_binop (MINUS_EXPR, imag, r1);
1475 imag = const_binop (code, imag, div);
1477 else
1479 /* In the FALSE branch, we compute
1480 ratio = d/c;
1481 divisor = (d * ratio) + c;
1482 tr = (b * ratio) + a;
1483 ti = b - (a * ratio);
1484 tr = tr / div;
1485 ti = ti / div; */
1486 tree ratio = const_binop (code, i2, r2);
1487 tree div = const_binop (PLUS_EXPR, r2,
1488 const_binop (MULT_EXPR, i2, ratio));
1490 real = const_binop (MULT_EXPR, i1, ratio);
1491 real = const_binop (PLUS_EXPR, real, r1);
1492 real = const_binop (code, real, div);
1494 imag = const_binop (MULT_EXPR, r1, ratio);
1495 imag = const_binop (MINUS_EXPR, i1, imag);
1496 imag = const_binop (code, imag, div);
1499 break;
1501 default:
1502 return NULL_TREE;
1505 if (real && imag)
1506 return build_complex (type, real, imag);
1509 if (TREE_CODE (arg1) == VECTOR_CST
1510 && TREE_CODE (arg2) == VECTOR_CST
1511 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1512 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1514 tree type = TREE_TYPE (arg1);
1515 bool step_ok_p;
1516 if (VECTOR_CST_STEPPED_P (arg1)
1517 && VECTOR_CST_STEPPED_P (arg2))
1518 /* We can operate directly on the encoding if:
1520 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1521 implies
1522 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1524 Addition and subtraction are the supported operators
1525 for which this is true. */
1526 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1527 else if (VECTOR_CST_STEPPED_P (arg1))
1528 /* We can operate directly on stepped encodings if:
1530 a3 - a2 == a2 - a1
1531 implies:
1532 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1534 which is true if (x -> x op c) distributes over addition. */
1535 step_ok_p = distributes_over_addition_p (code, 1);
1536 else
1537 /* Similarly in reverse. */
1538 step_ok_p = distributes_over_addition_p (code, 2);
1539 tree_vector_builder elts;
1540 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1541 return NULL_TREE;
1542 unsigned int count = elts.encoded_nelts ();
1543 for (unsigned int i = 0; i < count; ++i)
1545 tree elem1 = VECTOR_CST_ELT (arg1, i);
1546 tree elem2 = VECTOR_CST_ELT (arg2, i);
1548 tree elt = const_binop (code, elem1, elem2);
1550 /* It is possible that const_binop cannot handle the given
1551 code and return NULL_TREE */
1552 if (elt == NULL_TREE)
1553 return NULL_TREE;
1554 elts.quick_push (elt);
1557 return elts.build ();
1560 /* Shifts allow a scalar offset for a vector. */
1561 if (TREE_CODE (arg1) == VECTOR_CST
1562 && TREE_CODE (arg2) == INTEGER_CST)
1564 tree type = TREE_TYPE (arg1);
1565 bool step_ok_p = distributes_over_addition_p (code, 1);
1566 tree_vector_builder elts;
1567 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1568 return NULL_TREE;
1569 unsigned int count = elts.encoded_nelts ();
1570 for (unsigned int i = 0; i < count; ++i)
1572 tree elem1 = VECTOR_CST_ELT (arg1, i);
1574 tree elt = const_binop (code, elem1, arg2);
1576 /* It is possible that const_binop cannot handle the given
1577 code and return NULL_TREE. */
1578 if (elt == NULL_TREE)
1579 return NULL_TREE;
1580 elts.quick_push (elt);
1583 return elts.build ();
1585 return NULL_TREE;
1588 /* Overload that adds a TYPE parameter to be able to dispatch
1589 to fold_relational_const. */
1591 tree
1592 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1594 if (TREE_CODE_CLASS (code) == tcc_comparison)
1595 return fold_relational_const (code, type, arg1, arg2);
1597 /* ??? Until we make the const_binop worker take the type of the
1598 result as argument put those cases that need it here. */
1599 switch (code)
1601 case VEC_SERIES_EXPR:
1602 if (CONSTANT_CLASS_P (arg1)
1603 && CONSTANT_CLASS_P (arg2))
1604 return build_vec_series (type, arg1, arg2);
1605 return NULL_TREE;
1607 case COMPLEX_EXPR:
1608 if ((TREE_CODE (arg1) == REAL_CST
1609 && TREE_CODE (arg2) == REAL_CST)
1610 || (TREE_CODE (arg1) == INTEGER_CST
1611 && TREE_CODE (arg2) == INTEGER_CST))
1612 return build_complex (type, arg1, arg2);
1613 return NULL_TREE;
1615 case POINTER_DIFF_EXPR:
1616 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1618 poly_offset_int res = (wi::to_poly_offset (arg1)
1619 - wi::to_poly_offset (arg2));
1620 return force_fit_type (type, res, 1,
1621 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1623 return NULL_TREE;
1625 case VEC_PACK_TRUNC_EXPR:
1626 case VEC_PACK_FIX_TRUNC_EXPR:
1627 case VEC_PACK_FLOAT_EXPR:
1629 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1631 if (TREE_CODE (arg1) != VECTOR_CST
1632 || TREE_CODE (arg2) != VECTOR_CST)
1633 return NULL_TREE;
1635 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1636 return NULL_TREE;
1638 out_nelts = in_nelts * 2;
1639 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1640 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1642 tree_vector_builder elts (type, out_nelts, 1);
1643 for (i = 0; i < out_nelts; i++)
1645 tree elt = (i < in_nelts
1646 ? VECTOR_CST_ELT (arg1, i)
1647 : VECTOR_CST_ELT (arg2, i - in_nelts));
1648 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1649 ? NOP_EXPR
1650 : code == VEC_PACK_FLOAT_EXPR
1651 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1652 TREE_TYPE (type), elt);
1653 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1654 return NULL_TREE;
1655 elts.quick_push (elt);
1658 return elts.build ();
1661 case VEC_WIDEN_MULT_LO_EXPR:
1662 case VEC_WIDEN_MULT_HI_EXPR:
1663 case VEC_WIDEN_MULT_EVEN_EXPR:
1664 case VEC_WIDEN_MULT_ODD_EXPR:
1666 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1668 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1669 return NULL_TREE;
1671 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1672 return NULL_TREE;
1673 out_nelts = in_nelts / 2;
1674 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1675 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1677 if (code == VEC_WIDEN_MULT_LO_EXPR)
1678 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1679 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1680 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1681 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1682 scale = 1, ofs = 0;
1683 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1684 scale = 1, ofs = 1;
1686 tree_vector_builder elts (type, out_nelts, 1);
1687 for (out = 0; out < out_nelts; out++)
1689 unsigned int in = (out << scale) + ofs;
1690 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1691 VECTOR_CST_ELT (arg1, in));
1692 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1693 VECTOR_CST_ELT (arg2, in));
1695 if (t1 == NULL_TREE || t2 == NULL_TREE)
1696 return NULL_TREE;
1697 tree elt = const_binop (MULT_EXPR, t1, t2);
1698 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1699 return NULL_TREE;
1700 elts.quick_push (elt);
1703 return elts.build ();
1706 default:;
1709 if (TREE_CODE_CLASS (code) != tcc_binary)
1710 return NULL_TREE;
1712 /* Make sure type and arg0 have the same saturating flag. */
1713 gcc_checking_assert (TYPE_SATURATING (type)
1714 == TYPE_SATURATING (TREE_TYPE (arg1)));
1716 return const_binop (code, arg1, arg2);
1719 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1720 Return zero if computing the constants is not possible. */
1722 tree
1723 const_unop (enum tree_code code, tree type, tree arg0)
1725 /* Don't perform the operation, other than NEGATE and ABS, if
1726 flag_signaling_nans is on and the operand is a signaling NaN. */
1727 if (TREE_CODE (arg0) == REAL_CST
1728 && HONOR_SNANS (arg0)
1729 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1730 && code != NEGATE_EXPR
1731 && code != ABS_EXPR
1732 && code != ABSU_EXPR)
1733 return NULL_TREE;
1735 switch (code)
1737 CASE_CONVERT:
1738 case FLOAT_EXPR:
1739 case FIX_TRUNC_EXPR:
1740 case FIXED_CONVERT_EXPR:
1741 return fold_convert_const (code, type, arg0);
1743 case ADDR_SPACE_CONVERT_EXPR:
1744 /* If the source address is 0, and the source address space
1745 cannot have a valid object at 0, fold to dest type null. */
1746 if (integer_zerop (arg0)
1747 && !(targetm.addr_space.zero_address_valid
1748 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1749 return fold_convert_const (code, type, arg0);
1750 break;
1752 case VIEW_CONVERT_EXPR:
1753 return fold_view_convert_expr (type, arg0);
1755 case NEGATE_EXPR:
1757 /* Can't call fold_negate_const directly here as that doesn't
1758 handle all cases and we might not be able to negate some
1759 constants. */
1760 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1761 if (tem && CONSTANT_CLASS_P (tem))
1762 return tem;
1763 break;
1766 case ABS_EXPR:
1767 case ABSU_EXPR:
1768 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1769 return fold_abs_const (arg0, type);
1770 break;
1772 case CONJ_EXPR:
1773 if (TREE_CODE (arg0) == COMPLEX_CST)
1775 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1776 TREE_TYPE (type));
1777 return build_complex (type, TREE_REALPART (arg0), ipart);
1779 break;
1781 case BIT_NOT_EXPR:
1782 if (TREE_CODE (arg0) == INTEGER_CST)
1783 return fold_not_const (arg0, type);
1784 else if (POLY_INT_CST_P (arg0))
1785 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1786 /* Perform BIT_NOT_EXPR on each element individually. */
1787 else if (TREE_CODE (arg0) == VECTOR_CST)
1789 tree elem;
1791 /* This can cope with stepped encodings because ~x == -1 - x. */
1792 tree_vector_builder elements;
1793 elements.new_unary_operation (type, arg0, true);
1794 unsigned int i, count = elements.encoded_nelts ();
1795 for (i = 0; i < count; ++i)
1797 elem = VECTOR_CST_ELT (arg0, i);
1798 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1799 if (elem == NULL_TREE)
1800 break;
1801 elements.quick_push (elem);
1803 if (i == count)
1804 return elements.build ();
1806 break;
1808 case TRUTH_NOT_EXPR:
1809 if (TREE_CODE (arg0) == INTEGER_CST)
1810 return constant_boolean_node (integer_zerop (arg0), type);
1811 break;
1813 case REALPART_EXPR:
1814 if (TREE_CODE (arg0) == COMPLEX_CST)
1815 return fold_convert (type, TREE_REALPART (arg0));
1816 break;
1818 case IMAGPART_EXPR:
1819 if (TREE_CODE (arg0) == COMPLEX_CST)
1820 return fold_convert (type, TREE_IMAGPART (arg0));
1821 break;
1823 case VEC_UNPACK_LO_EXPR:
1824 case VEC_UNPACK_HI_EXPR:
1825 case VEC_UNPACK_FLOAT_LO_EXPR:
1826 case VEC_UNPACK_FLOAT_HI_EXPR:
1827 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1828 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1830 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1831 enum tree_code subcode;
1833 if (TREE_CODE (arg0) != VECTOR_CST)
1834 return NULL_TREE;
1836 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1837 return NULL_TREE;
1838 out_nelts = in_nelts / 2;
1839 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1841 unsigned int offset = 0;
1842 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1843 || code == VEC_UNPACK_FLOAT_LO_EXPR
1844 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1845 offset = out_nelts;
1847 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1848 subcode = NOP_EXPR;
1849 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1850 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1851 subcode = FLOAT_EXPR;
1852 else
1853 subcode = FIX_TRUNC_EXPR;
1855 tree_vector_builder elts (type, out_nelts, 1);
1856 for (i = 0; i < out_nelts; i++)
1858 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1859 VECTOR_CST_ELT (arg0, i + offset));
1860 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1861 return NULL_TREE;
1862 elts.quick_push (elt);
1865 return elts.build ();
1868 case VEC_DUPLICATE_EXPR:
1869 if (CONSTANT_CLASS_P (arg0))
1870 return build_vector_from_val (type, arg0);
1871 return NULL_TREE;
1873 default:
1874 break;
1877 return NULL_TREE;
1880 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1881 indicates which particular sizetype to create. */
1883 tree
1884 size_int_kind (poly_int64 number, enum size_type_kind kind)
1886 return build_int_cst (sizetype_tab[(int) kind], number);
1889 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1890 is a tree code. The type of the result is taken from the operands.
1891 Both must be equivalent integer types, ala int_binop_types_match_p.
1892 If the operands are constant, so is the result. */
1894 tree
1895 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1897 tree type = TREE_TYPE (arg0);
1899 if (arg0 == error_mark_node || arg1 == error_mark_node)
1900 return error_mark_node;
1902 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1903 TREE_TYPE (arg1)));
1905 /* Handle the special case of two poly_int constants faster. */
1906 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1908 /* And some specific cases even faster than that. */
1909 if (code == PLUS_EXPR)
1911 if (integer_zerop (arg0)
1912 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1913 return arg1;
1914 if (integer_zerop (arg1)
1915 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1916 return arg0;
1918 else if (code == MINUS_EXPR)
1920 if (integer_zerop (arg1)
1921 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1922 return arg0;
1924 else if (code == MULT_EXPR)
1926 if (integer_onep (arg0)
1927 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1928 return arg1;
1931 /* Handle general case of two integer constants. For sizetype
1932 constant calculations we always want to know about overflow,
1933 even in the unsigned case. */
1934 tree res = int_const_binop (code, arg0, arg1, -1);
1935 if (res != NULL_TREE)
1936 return res;
1939 return fold_build2_loc (loc, code, type, arg0, arg1);
1942 /* Given two values, either both of sizetype or both of bitsizetype,
1943 compute the difference between the two values. Return the value
1944 in signed type corresponding to the type of the operands. */
1946 tree
1947 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1949 tree type = TREE_TYPE (arg0);
1950 tree ctype;
1952 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1953 TREE_TYPE (arg1)));
1955 /* If the type is already signed, just do the simple thing. */
1956 if (!TYPE_UNSIGNED (type))
1957 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1959 if (type == sizetype)
1960 ctype = ssizetype;
1961 else if (type == bitsizetype)
1962 ctype = sbitsizetype;
1963 else
1964 ctype = signed_type_for (type);
1966 /* If either operand is not a constant, do the conversions to the signed
1967 type and subtract. The hardware will do the right thing with any
1968 overflow in the subtraction. */
1969 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1970 return size_binop_loc (loc, MINUS_EXPR,
1971 fold_convert_loc (loc, ctype, arg0),
1972 fold_convert_loc (loc, ctype, arg1));
1974 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1975 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1976 overflow) and negate (which can't either). Special-case a result
1977 of zero while we're here. */
1978 if (tree_int_cst_equal (arg0, arg1))
1979 return build_int_cst (ctype, 0);
1980 else if (tree_int_cst_lt (arg1, arg0))
1981 return fold_convert_loc (loc, ctype,
1982 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1983 else
1984 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1985 fold_convert_loc (loc, ctype,
1986 size_binop_loc (loc,
1987 MINUS_EXPR,
1988 arg1, arg0)));
1991 /* A subroutine of fold_convert_const handling conversions of an
1992 INTEGER_CST to another integer type. */
1994 static tree
1995 fold_convert_const_int_from_int (tree type, const_tree arg1)
1997 /* Given an integer constant, make new constant with new type,
1998 appropriately sign-extended or truncated. Use widest_int
1999 so that any extension is done according ARG1's type. */
2000 return force_fit_type (type, wi::to_widest (arg1),
2001 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2002 TREE_OVERFLOW (arg1));
2005 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2006 to an integer type. */
2008 static tree
2009 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2011 bool overflow = false;
2012 tree t;
2014 /* The following code implements the floating point to integer
2015 conversion rules required by the Java Language Specification,
2016 that IEEE NaNs are mapped to zero and values that overflow
2017 the target precision saturate, i.e. values greater than
2018 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2019 are mapped to INT_MIN. These semantics are allowed by the
2020 C and C++ standards that simply state that the behavior of
2021 FP-to-integer conversion is unspecified upon overflow. */
2023 wide_int val;
2024 REAL_VALUE_TYPE r;
2025 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2027 switch (code)
2029 case FIX_TRUNC_EXPR:
2030 real_trunc (&r, VOIDmode, &x);
2031 break;
2033 default:
2034 gcc_unreachable ();
2037 /* If R is NaN, return zero and show we have an overflow. */
2038 if (REAL_VALUE_ISNAN (r))
2040 overflow = true;
2041 val = wi::zero (TYPE_PRECISION (type));
2044 /* See if R is less than the lower bound or greater than the
2045 upper bound. */
2047 if (! overflow)
2049 tree lt = TYPE_MIN_VALUE (type);
2050 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2051 if (real_less (&r, &l))
2053 overflow = true;
2054 val = wi::to_wide (lt);
2058 if (! overflow)
2060 tree ut = TYPE_MAX_VALUE (type);
2061 if (ut)
2063 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2064 if (real_less (&u, &r))
2066 overflow = true;
2067 val = wi::to_wide (ut);
2072 if (! overflow)
2073 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2075 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2076 return t;
2079 /* A subroutine of fold_convert_const handling conversions of a
2080 FIXED_CST to an integer type. */
2082 static tree
2083 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2085 tree t;
2086 double_int temp, temp_trunc;
2087 scalar_mode mode;
2089 /* Right shift FIXED_CST to temp by fbit. */
2090 temp = TREE_FIXED_CST (arg1).data;
2091 mode = TREE_FIXED_CST (arg1).mode;
2092 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2094 temp = temp.rshift (GET_MODE_FBIT (mode),
2095 HOST_BITS_PER_DOUBLE_INT,
2096 SIGNED_FIXED_POINT_MODE_P (mode));
2098 /* Left shift temp to temp_trunc by fbit. */
2099 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2100 HOST_BITS_PER_DOUBLE_INT,
2101 SIGNED_FIXED_POINT_MODE_P (mode));
2103 else
2105 temp = double_int_zero;
2106 temp_trunc = double_int_zero;
2109 /* If FIXED_CST is negative, we need to round the value toward 0.
2110 By checking if the fractional bits are not zero to add 1 to temp. */
2111 if (SIGNED_FIXED_POINT_MODE_P (mode)
2112 && temp_trunc.is_negative ()
2113 && TREE_FIXED_CST (arg1).data != temp_trunc)
2114 temp += double_int_one;
2116 /* Given a fixed-point constant, make new constant with new type,
2117 appropriately sign-extended or truncated. */
2118 t = force_fit_type (type, temp, -1,
2119 (temp.is_negative ()
2120 && (TYPE_UNSIGNED (type)
2121 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2122 | TREE_OVERFLOW (arg1));
2124 return t;
2127 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2128 to another floating point type. */
2130 static tree
2131 fold_convert_const_real_from_real (tree type, const_tree arg1)
2133 REAL_VALUE_TYPE value;
2134 tree t;
2136 /* Don't perform the operation if flag_signaling_nans is on
2137 and the operand is a signaling NaN. */
2138 if (HONOR_SNANS (arg1)
2139 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2140 return NULL_TREE;
2142 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2143 t = build_real (type, value);
2145 /* If converting an infinity or NAN to a representation that doesn't
2146 have one, set the overflow bit so that we can produce some kind of
2147 error message at the appropriate point if necessary. It's not the
2148 most user-friendly message, but it's better than nothing. */
2149 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2150 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2151 TREE_OVERFLOW (t) = 1;
2152 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2153 && !MODE_HAS_NANS (TYPE_MODE (type)))
2154 TREE_OVERFLOW (t) = 1;
2155 /* Regular overflow, conversion produced an infinity in a mode that
2156 can't represent them. */
2157 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2158 && REAL_VALUE_ISINF (value)
2159 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2160 TREE_OVERFLOW (t) = 1;
2161 else
2162 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2163 return t;
2166 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2167 to a floating point type. */
2169 static tree
2170 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2172 REAL_VALUE_TYPE value;
2173 tree t;
2175 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2176 &TREE_FIXED_CST (arg1));
2177 t = build_real (type, value);
2179 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2180 return t;
2183 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2184 to another fixed-point type. */
2186 static tree
2187 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2189 FIXED_VALUE_TYPE value;
2190 tree t;
2191 bool overflow_p;
2193 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2194 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2195 t = build_fixed (type, value);
2197 /* Propagate overflow flags. */
2198 if (overflow_p | TREE_OVERFLOW (arg1))
2199 TREE_OVERFLOW (t) = 1;
2200 return t;
2203 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2204 to a fixed-point type. */
2206 static tree
2207 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2209 FIXED_VALUE_TYPE value;
2210 tree t;
2211 bool overflow_p;
2212 double_int di;
2214 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2216 di.low = TREE_INT_CST_ELT (arg1, 0);
2217 if (TREE_INT_CST_NUNITS (arg1) == 1)
2218 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2219 else
2220 di.high = TREE_INT_CST_ELT (arg1, 1);
2222 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2223 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2224 TYPE_SATURATING (type));
2225 t = build_fixed (type, value);
2227 /* Propagate overflow flags. */
2228 if (overflow_p | TREE_OVERFLOW (arg1))
2229 TREE_OVERFLOW (t) = 1;
2230 return t;
2233 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2234 to a fixed-point type. */
2236 static tree
2237 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2239 FIXED_VALUE_TYPE value;
2240 tree t;
2241 bool overflow_p;
2243 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2244 &TREE_REAL_CST (arg1),
2245 TYPE_SATURATING (type));
2246 t = build_fixed (type, value);
2248 /* Propagate overflow flags. */
2249 if (overflow_p | TREE_OVERFLOW (arg1))
2250 TREE_OVERFLOW (t) = 1;
2251 return t;
2254 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2255 type TYPE. If no simplification can be done return NULL_TREE. */
2257 static tree
2258 fold_convert_const (enum tree_code code, tree type, tree arg1)
2260 tree arg_type = TREE_TYPE (arg1);
2261 if (arg_type == type)
2262 return arg1;
2264 /* We can't widen types, since the runtime value could overflow the
2265 original type before being extended to the new type. */
2266 if (POLY_INT_CST_P (arg1)
2267 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2268 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2269 return build_poly_int_cst (type,
2270 poly_wide_int::from (poly_int_cst_value (arg1),
2271 TYPE_PRECISION (type),
2272 TYPE_SIGN (arg_type)));
2274 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2275 || TREE_CODE (type) == OFFSET_TYPE)
2277 if (TREE_CODE (arg1) == INTEGER_CST)
2278 return fold_convert_const_int_from_int (type, arg1);
2279 else if (TREE_CODE (arg1) == REAL_CST)
2280 return fold_convert_const_int_from_real (code, type, arg1);
2281 else if (TREE_CODE (arg1) == FIXED_CST)
2282 return fold_convert_const_int_from_fixed (type, arg1);
2284 else if (TREE_CODE (type) == REAL_TYPE)
2286 if (TREE_CODE (arg1) == INTEGER_CST)
2287 return build_real_from_int_cst (type, arg1);
2288 else if (TREE_CODE (arg1) == REAL_CST)
2289 return fold_convert_const_real_from_real (type, arg1);
2290 else if (TREE_CODE (arg1) == FIXED_CST)
2291 return fold_convert_const_real_from_fixed (type, arg1);
2293 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2295 if (TREE_CODE (arg1) == FIXED_CST)
2296 return fold_convert_const_fixed_from_fixed (type, arg1);
2297 else if (TREE_CODE (arg1) == INTEGER_CST)
2298 return fold_convert_const_fixed_from_int (type, arg1);
2299 else if (TREE_CODE (arg1) == REAL_CST)
2300 return fold_convert_const_fixed_from_real (type, arg1);
2302 else if (TREE_CODE (type) == VECTOR_TYPE)
2304 if (TREE_CODE (arg1) == VECTOR_CST
2305 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2307 tree elttype = TREE_TYPE (type);
2308 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2309 /* We can't handle steps directly when extending, since the
2310 values need to wrap at the original precision first. */
2311 bool step_ok_p
2312 = (INTEGRAL_TYPE_P (elttype)
2313 && INTEGRAL_TYPE_P (arg1_elttype)
2314 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2315 tree_vector_builder v;
2316 if (!v.new_unary_operation (type, arg1, step_ok_p))
2317 return NULL_TREE;
2318 unsigned int len = v.encoded_nelts ();
2319 for (unsigned int i = 0; i < len; ++i)
2321 tree elt = VECTOR_CST_ELT (arg1, i);
2322 tree cvt = fold_convert_const (code, elttype, elt);
2323 if (cvt == NULL_TREE)
2324 return NULL_TREE;
2325 v.quick_push (cvt);
2327 return v.build ();
2330 return NULL_TREE;
2333 /* Construct a vector of zero elements of vector type TYPE. */
2335 static tree
2336 build_zero_vector (tree type)
2338 tree t;
2340 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2341 return build_vector_from_val (type, t);
2344 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2346 bool
2347 fold_convertible_p (const_tree type, const_tree arg)
2349 tree orig = TREE_TYPE (arg);
2351 if (type == orig)
2352 return true;
2354 if (TREE_CODE (arg) == ERROR_MARK
2355 || TREE_CODE (type) == ERROR_MARK
2356 || TREE_CODE (orig) == ERROR_MARK)
2357 return false;
2359 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2360 return true;
2362 switch (TREE_CODE (type))
2364 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2365 case POINTER_TYPE: case REFERENCE_TYPE:
2366 case OFFSET_TYPE:
2367 return (INTEGRAL_TYPE_P (orig)
2368 || (POINTER_TYPE_P (orig)
2369 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2370 || TREE_CODE (orig) == OFFSET_TYPE);
2372 case REAL_TYPE:
2373 case FIXED_POINT_TYPE:
2374 case VOID_TYPE:
2375 return TREE_CODE (type) == TREE_CODE (orig);
2377 case VECTOR_TYPE:
2378 return (VECTOR_TYPE_P (orig)
2379 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2380 TYPE_VECTOR_SUBPARTS (orig))
2381 && fold_convertible_p (TREE_TYPE (type), TREE_TYPE (orig)));
2383 default:
2384 return false;
2388 /* Convert expression ARG to type TYPE. Used by the middle-end for
2389 simple conversions in preference to calling the front-end's convert. */
2391 tree
2392 fold_convert_loc (location_t loc, tree type, tree arg)
2394 tree orig = TREE_TYPE (arg);
2395 tree tem;
2397 if (type == orig)
2398 return arg;
2400 if (TREE_CODE (arg) == ERROR_MARK
2401 || TREE_CODE (type) == ERROR_MARK
2402 || TREE_CODE (orig) == ERROR_MARK)
2403 return error_mark_node;
2405 switch (TREE_CODE (type))
2407 case POINTER_TYPE:
2408 case REFERENCE_TYPE:
2409 /* Handle conversions between pointers to different address spaces. */
2410 if (POINTER_TYPE_P (orig)
2411 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2412 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2413 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2414 /* fall through */
2416 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2417 case OFFSET_TYPE:
2418 if (TREE_CODE (arg) == INTEGER_CST)
2420 tem = fold_convert_const (NOP_EXPR, type, arg);
2421 if (tem != NULL_TREE)
2422 return tem;
2424 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2425 || TREE_CODE (orig) == OFFSET_TYPE)
2426 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2427 if (TREE_CODE (orig) == COMPLEX_TYPE)
2428 return fold_convert_loc (loc, type,
2429 fold_build1_loc (loc, REALPART_EXPR,
2430 TREE_TYPE (orig), arg));
2431 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2432 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2433 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2435 case REAL_TYPE:
2436 if (TREE_CODE (arg) == INTEGER_CST)
2438 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2439 if (tem != NULL_TREE)
2440 return tem;
2442 else if (TREE_CODE (arg) == REAL_CST)
2444 tem = fold_convert_const (NOP_EXPR, type, arg);
2445 if (tem != NULL_TREE)
2446 return tem;
2448 else if (TREE_CODE (arg) == FIXED_CST)
2450 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2451 if (tem != NULL_TREE)
2452 return tem;
2455 switch (TREE_CODE (orig))
2457 case INTEGER_TYPE:
2458 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2459 case POINTER_TYPE: case REFERENCE_TYPE:
2460 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2462 case REAL_TYPE:
2463 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2465 case FIXED_POINT_TYPE:
2466 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2468 case COMPLEX_TYPE:
2469 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2470 return fold_convert_loc (loc, type, tem);
2472 default:
2473 gcc_unreachable ();
2476 case FIXED_POINT_TYPE:
2477 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2478 || TREE_CODE (arg) == REAL_CST)
2480 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2481 if (tem != NULL_TREE)
2482 goto fold_convert_exit;
2485 switch (TREE_CODE (orig))
2487 case FIXED_POINT_TYPE:
2488 case INTEGER_TYPE:
2489 case ENUMERAL_TYPE:
2490 case BOOLEAN_TYPE:
2491 case REAL_TYPE:
2492 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2494 case COMPLEX_TYPE:
2495 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2496 return fold_convert_loc (loc, type, tem);
2498 default:
2499 gcc_unreachable ();
2502 case COMPLEX_TYPE:
2503 switch (TREE_CODE (orig))
2505 case INTEGER_TYPE:
2506 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2507 case POINTER_TYPE: case REFERENCE_TYPE:
2508 case REAL_TYPE:
2509 case FIXED_POINT_TYPE:
2510 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2511 fold_convert_loc (loc, TREE_TYPE (type), arg),
2512 fold_convert_loc (loc, TREE_TYPE (type),
2513 integer_zero_node));
2514 case COMPLEX_TYPE:
2516 tree rpart, ipart;
2518 if (TREE_CODE (arg) == COMPLEX_EXPR)
2520 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2521 TREE_OPERAND (arg, 0));
2522 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2523 TREE_OPERAND (arg, 1));
2524 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2527 arg = save_expr (arg);
2528 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2529 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2530 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2531 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2532 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2535 default:
2536 gcc_unreachable ();
2539 case VECTOR_TYPE:
2540 if (integer_zerop (arg))
2541 return build_zero_vector (type);
2542 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2543 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2544 || TREE_CODE (orig) == VECTOR_TYPE);
2545 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2547 case VOID_TYPE:
2548 tem = fold_ignored_result (arg);
2549 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2551 default:
2552 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2553 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2554 gcc_unreachable ();
2556 fold_convert_exit:
2557 protected_set_expr_location_unshare (tem, loc);
2558 return tem;
2561 /* Return false if expr can be assumed not to be an lvalue, true
2562 otherwise. */
2564 static bool
2565 maybe_lvalue_p (const_tree x)
2567 /* We only need to wrap lvalue tree codes. */
2568 switch (TREE_CODE (x))
2570 case VAR_DECL:
2571 case PARM_DECL:
2572 case RESULT_DECL:
2573 case LABEL_DECL:
2574 case FUNCTION_DECL:
2575 case SSA_NAME:
2577 case COMPONENT_REF:
2578 case MEM_REF:
2579 case INDIRECT_REF:
2580 case ARRAY_REF:
2581 case ARRAY_RANGE_REF:
2582 case BIT_FIELD_REF:
2583 case OBJ_TYPE_REF:
2585 case REALPART_EXPR:
2586 case IMAGPART_EXPR:
2587 case PREINCREMENT_EXPR:
2588 case PREDECREMENT_EXPR:
2589 case SAVE_EXPR:
2590 case TRY_CATCH_EXPR:
2591 case WITH_CLEANUP_EXPR:
2592 case COMPOUND_EXPR:
2593 case MODIFY_EXPR:
2594 case TARGET_EXPR:
2595 case COND_EXPR:
2596 case BIND_EXPR:
2597 case VIEW_CONVERT_EXPR:
2598 break;
2600 default:
2601 /* Assume the worst for front-end tree codes. */
2602 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2603 break;
2604 return false;
2607 return true;
2610 /* Return an expr equal to X but certainly not valid as an lvalue. */
2612 tree
2613 non_lvalue_loc (location_t loc, tree x)
2615 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2616 us. */
2617 if (in_gimple_form)
2618 return x;
2620 if (! maybe_lvalue_p (x))
2621 return x;
2622 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2625 /* Given a tree comparison code, return the code that is the logical inverse.
2626 It is generally not safe to do this for floating-point comparisons, except
2627 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2628 ERROR_MARK in this case. */
2630 enum tree_code
2631 invert_tree_comparison (enum tree_code code, bool honor_nans)
2633 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2634 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2635 return ERROR_MARK;
2637 switch (code)
2639 case EQ_EXPR:
2640 return NE_EXPR;
2641 case NE_EXPR:
2642 return EQ_EXPR;
2643 case GT_EXPR:
2644 return honor_nans ? UNLE_EXPR : LE_EXPR;
2645 case GE_EXPR:
2646 return honor_nans ? UNLT_EXPR : LT_EXPR;
2647 case LT_EXPR:
2648 return honor_nans ? UNGE_EXPR : GE_EXPR;
2649 case LE_EXPR:
2650 return honor_nans ? UNGT_EXPR : GT_EXPR;
2651 case LTGT_EXPR:
2652 return UNEQ_EXPR;
2653 case UNEQ_EXPR:
2654 return LTGT_EXPR;
2655 case UNGT_EXPR:
2656 return LE_EXPR;
2657 case UNGE_EXPR:
2658 return LT_EXPR;
2659 case UNLT_EXPR:
2660 return GE_EXPR;
2661 case UNLE_EXPR:
2662 return GT_EXPR;
2663 case ORDERED_EXPR:
2664 return UNORDERED_EXPR;
2665 case UNORDERED_EXPR:
2666 return ORDERED_EXPR;
2667 default:
2668 gcc_unreachable ();
2672 /* Similar, but return the comparison that results if the operands are
2673 swapped. This is safe for floating-point. */
2675 enum tree_code
2676 swap_tree_comparison (enum tree_code code)
2678 switch (code)
2680 case EQ_EXPR:
2681 case NE_EXPR:
2682 case ORDERED_EXPR:
2683 case UNORDERED_EXPR:
2684 case LTGT_EXPR:
2685 case UNEQ_EXPR:
2686 return code;
2687 case GT_EXPR:
2688 return LT_EXPR;
2689 case GE_EXPR:
2690 return LE_EXPR;
2691 case LT_EXPR:
2692 return GT_EXPR;
2693 case LE_EXPR:
2694 return GE_EXPR;
2695 case UNGT_EXPR:
2696 return UNLT_EXPR;
2697 case UNGE_EXPR:
2698 return UNLE_EXPR;
2699 case UNLT_EXPR:
2700 return UNGT_EXPR;
2701 case UNLE_EXPR:
2702 return UNGE_EXPR;
2703 default:
2704 gcc_unreachable ();
2709 /* Convert a comparison tree code from an enum tree_code representation
2710 into a compcode bit-based encoding. This function is the inverse of
2711 compcode_to_comparison. */
2713 static enum comparison_code
2714 comparison_to_compcode (enum tree_code code)
2716 switch (code)
2718 case LT_EXPR:
2719 return COMPCODE_LT;
2720 case EQ_EXPR:
2721 return COMPCODE_EQ;
2722 case LE_EXPR:
2723 return COMPCODE_LE;
2724 case GT_EXPR:
2725 return COMPCODE_GT;
2726 case NE_EXPR:
2727 return COMPCODE_NE;
2728 case GE_EXPR:
2729 return COMPCODE_GE;
2730 case ORDERED_EXPR:
2731 return COMPCODE_ORD;
2732 case UNORDERED_EXPR:
2733 return COMPCODE_UNORD;
2734 case UNLT_EXPR:
2735 return COMPCODE_UNLT;
2736 case UNEQ_EXPR:
2737 return COMPCODE_UNEQ;
2738 case UNLE_EXPR:
2739 return COMPCODE_UNLE;
2740 case UNGT_EXPR:
2741 return COMPCODE_UNGT;
2742 case LTGT_EXPR:
2743 return COMPCODE_LTGT;
2744 case UNGE_EXPR:
2745 return COMPCODE_UNGE;
2746 default:
2747 gcc_unreachable ();
2751 /* Convert a compcode bit-based encoding of a comparison operator back
2752 to GCC's enum tree_code representation. This function is the
2753 inverse of comparison_to_compcode. */
2755 static enum tree_code
2756 compcode_to_comparison (enum comparison_code code)
2758 switch (code)
2760 case COMPCODE_LT:
2761 return LT_EXPR;
2762 case COMPCODE_EQ:
2763 return EQ_EXPR;
2764 case COMPCODE_LE:
2765 return LE_EXPR;
2766 case COMPCODE_GT:
2767 return GT_EXPR;
2768 case COMPCODE_NE:
2769 return NE_EXPR;
2770 case COMPCODE_GE:
2771 return GE_EXPR;
2772 case COMPCODE_ORD:
2773 return ORDERED_EXPR;
2774 case COMPCODE_UNORD:
2775 return UNORDERED_EXPR;
2776 case COMPCODE_UNLT:
2777 return UNLT_EXPR;
2778 case COMPCODE_UNEQ:
2779 return UNEQ_EXPR;
2780 case COMPCODE_UNLE:
2781 return UNLE_EXPR;
2782 case COMPCODE_UNGT:
2783 return UNGT_EXPR;
2784 case COMPCODE_LTGT:
2785 return LTGT_EXPR;
2786 case COMPCODE_UNGE:
2787 return UNGE_EXPR;
2788 default:
2789 gcc_unreachable ();
2793 /* Return true if COND1 tests the opposite condition of COND2. */
2795 bool
2796 inverse_conditions_p (const_tree cond1, const_tree cond2)
2798 return (COMPARISON_CLASS_P (cond1)
2799 && COMPARISON_CLASS_P (cond2)
2800 && (invert_tree_comparison
2801 (TREE_CODE (cond1),
2802 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2803 && operand_equal_p (TREE_OPERAND (cond1, 0),
2804 TREE_OPERAND (cond2, 0), 0)
2805 && operand_equal_p (TREE_OPERAND (cond1, 1),
2806 TREE_OPERAND (cond2, 1), 0));
2809 /* Return a tree for the comparison which is the combination of
2810 doing the AND or OR (depending on CODE) of the two operations LCODE
2811 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2812 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2813 if this makes the transformation invalid. */
2815 tree
2816 combine_comparisons (location_t loc,
2817 enum tree_code code, enum tree_code lcode,
2818 enum tree_code rcode, tree truth_type,
2819 tree ll_arg, tree lr_arg)
2821 bool honor_nans = HONOR_NANS (ll_arg);
2822 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2823 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2824 int compcode;
2826 switch (code)
2828 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2829 compcode = lcompcode & rcompcode;
2830 break;
2832 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2833 compcode = lcompcode | rcompcode;
2834 break;
2836 default:
2837 return NULL_TREE;
2840 if (!honor_nans)
2842 /* Eliminate unordered comparisons, as well as LTGT and ORD
2843 which are not used unless the mode has NaNs. */
2844 compcode &= ~COMPCODE_UNORD;
2845 if (compcode == COMPCODE_LTGT)
2846 compcode = COMPCODE_NE;
2847 else if (compcode == COMPCODE_ORD)
2848 compcode = COMPCODE_TRUE;
2850 else if (flag_trapping_math)
2852 /* Check that the original operation and the optimized ones will trap
2853 under the same condition. */
2854 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2855 && (lcompcode != COMPCODE_EQ)
2856 && (lcompcode != COMPCODE_ORD);
2857 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2858 && (rcompcode != COMPCODE_EQ)
2859 && (rcompcode != COMPCODE_ORD);
2860 bool trap = (compcode & COMPCODE_UNORD) == 0
2861 && (compcode != COMPCODE_EQ)
2862 && (compcode != COMPCODE_ORD);
2864 /* In a short-circuited boolean expression the LHS might be
2865 such that the RHS, if evaluated, will never trap. For
2866 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2867 if neither x nor y is NaN. (This is a mixed blessing: for
2868 example, the expression above will never trap, hence
2869 optimizing it to x < y would be invalid). */
2870 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2871 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2872 rtrap = false;
2874 /* If the comparison was short-circuited, and only the RHS
2875 trapped, we may now generate a spurious trap. */
2876 if (rtrap && !ltrap
2877 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2878 return NULL_TREE;
2880 /* If we changed the conditions that cause a trap, we lose. */
2881 if ((ltrap || rtrap) != trap)
2882 return NULL_TREE;
2885 if (compcode == COMPCODE_TRUE)
2886 return constant_boolean_node (true, truth_type);
2887 else if (compcode == COMPCODE_FALSE)
2888 return constant_boolean_node (false, truth_type);
2889 else
2891 enum tree_code tcode;
2893 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2894 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2898 /* Return nonzero if two operands (typically of the same tree node)
2899 are necessarily equal. FLAGS modifies behavior as follows:
2901 If OEP_ONLY_CONST is set, only return nonzero for constants.
2902 This function tests whether the operands are indistinguishable;
2903 it does not test whether they are equal using C's == operation.
2904 The distinction is important for IEEE floating point, because
2905 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2906 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2908 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2909 even though it may hold multiple values during a function.
2910 This is because a GCC tree node guarantees that nothing else is
2911 executed between the evaluation of its "operands" (which may often
2912 be evaluated in arbitrary order). Hence if the operands themselves
2913 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2914 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2915 unset means assuming isochronic (or instantaneous) tree equivalence.
2916 Unless comparing arbitrary expression trees, such as from different
2917 statements, this flag can usually be left unset.
2919 If OEP_PURE_SAME is set, then pure functions with identical arguments
2920 are considered the same. It is used when the caller has other ways
2921 to ensure that global memory is unchanged in between.
2923 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2924 not values of expressions.
2926 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2927 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2929 If OEP_BITWISE is set, then require the values to be bitwise identical
2930 rather than simply numerically equal. Do not take advantage of things
2931 like math-related flags or undefined behavior; only return true for
2932 values that are provably bitwise identical in all circumstances.
2934 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2935 any operand with side effect. This is unnecesarily conservative in the
2936 case we know that arg0 and arg1 are in disjoint code paths (such as in
2937 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2938 addresses with TREE_CONSTANT flag set so we know that &var == &var
2939 even if var is volatile. */
2941 bool
2942 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
2943 unsigned int flags)
2945 bool r;
2946 if (verify_hash_value (arg0, arg1, flags, &r))
2947 return r;
2949 STRIP_ANY_LOCATION_WRAPPER (arg0);
2950 STRIP_ANY_LOCATION_WRAPPER (arg1);
2952 /* If either is ERROR_MARK, they aren't equal. */
2953 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2954 || TREE_TYPE (arg0) == error_mark_node
2955 || TREE_TYPE (arg1) == error_mark_node)
2956 return false;
2958 /* Similar, if either does not have a type (like a template id),
2959 they aren't equal. */
2960 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2961 return false;
2963 /* Bitwise identity makes no sense if the values have different layouts. */
2964 if ((flags & OEP_BITWISE)
2965 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
2966 return false;
2968 /* We cannot consider pointers to different address space equal. */
2969 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2970 && POINTER_TYPE_P (TREE_TYPE (arg1))
2971 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2972 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2973 return false;
2975 /* Check equality of integer constants before bailing out due to
2976 precision differences. */
2977 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2979 /* Address of INTEGER_CST is not defined; check that we did not forget
2980 to drop the OEP_ADDRESS_OF flags. */
2981 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2982 return tree_int_cst_equal (arg0, arg1);
2985 if (!(flags & OEP_ADDRESS_OF))
2987 /* If both types don't have the same signedness, then we can't consider
2988 them equal. We must check this before the STRIP_NOPS calls
2989 because they may change the signedness of the arguments. As pointers
2990 strictly don't have a signedness, require either two pointers or
2991 two non-pointers as well. */
2992 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2993 || POINTER_TYPE_P (TREE_TYPE (arg0))
2994 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2995 return false;
2997 /* If both types don't have the same precision, then it is not safe
2998 to strip NOPs. */
2999 if (element_precision (TREE_TYPE (arg0))
3000 != element_precision (TREE_TYPE (arg1)))
3001 return false;
3003 STRIP_NOPS (arg0);
3004 STRIP_NOPS (arg1);
3006 #if 0
3007 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3008 sanity check once the issue is solved. */
3009 else
3010 /* Addresses of conversions and SSA_NAMEs (and many other things)
3011 are not defined. Check that we did not forget to drop the
3012 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3013 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3014 && TREE_CODE (arg0) != SSA_NAME);
3015 #endif
3017 /* In case both args are comparisons but with different comparison
3018 code, try to swap the comparison operands of one arg to produce
3019 a match and compare that variant. */
3020 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3021 && COMPARISON_CLASS_P (arg0)
3022 && COMPARISON_CLASS_P (arg1))
3024 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3026 if (TREE_CODE (arg0) == swap_code)
3027 return operand_equal_p (TREE_OPERAND (arg0, 0),
3028 TREE_OPERAND (arg1, 1), flags)
3029 && operand_equal_p (TREE_OPERAND (arg0, 1),
3030 TREE_OPERAND (arg1, 0), flags);
3033 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3035 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3036 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3038 else if (flags & OEP_ADDRESS_OF)
3040 /* If we are interested in comparing addresses ignore
3041 MEM_REF wrappings of the base that can appear just for
3042 TBAA reasons. */
3043 if (TREE_CODE (arg0) == MEM_REF
3044 && DECL_P (arg1)
3045 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3046 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3047 && integer_zerop (TREE_OPERAND (arg0, 1)))
3048 return true;
3049 else if (TREE_CODE (arg1) == MEM_REF
3050 && DECL_P (arg0)
3051 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3052 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3053 && integer_zerop (TREE_OPERAND (arg1, 1)))
3054 return true;
3055 return false;
3057 else
3058 return false;
3061 /* When not checking adddresses, this is needed for conversions and for
3062 COMPONENT_REF. Might as well play it safe and always test this. */
3063 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3064 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3065 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3066 && !(flags & OEP_ADDRESS_OF)))
3067 return false;
3069 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3070 We don't care about side effects in that case because the SAVE_EXPR
3071 takes care of that for us. In all other cases, two expressions are
3072 equal if they have no side effects. If we have two identical
3073 expressions with side effects that should be treated the same due
3074 to the only side effects being identical SAVE_EXPR's, that will
3075 be detected in the recursive calls below.
3076 If we are taking an invariant address of two identical objects
3077 they are necessarily equal as well. */
3078 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3079 && (TREE_CODE (arg0) == SAVE_EXPR
3080 || (flags & OEP_MATCH_SIDE_EFFECTS)
3081 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3082 return true;
3084 /* Next handle constant cases, those for which we can return 1 even
3085 if ONLY_CONST is set. */
3086 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3087 switch (TREE_CODE (arg0))
3089 case INTEGER_CST:
3090 return tree_int_cst_equal (arg0, arg1);
3092 case FIXED_CST:
3093 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3094 TREE_FIXED_CST (arg1));
3096 case REAL_CST:
3097 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3098 return true;
3100 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3102 /* If we do not distinguish between signed and unsigned zero,
3103 consider them equal. */
3104 if (real_zerop (arg0) && real_zerop (arg1))
3105 return true;
3107 return false;
3109 case VECTOR_CST:
3111 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3112 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3113 return false;
3115 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3116 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3117 return false;
3119 unsigned int count = vector_cst_encoded_nelts (arg0);
3120 for (unsigned int i = 0; i < count; ++i)
3121 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3122 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3123 return false;
3124 return true;
3127 case COMPLEX_CST:
3128 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3129 flags)
3130 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3131 flags));
3133 case STRING_CST:
3134 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3135 && ! memcmp (TREE_STRING_POINTER (arg0),
3136 TREE_STRING_POINTER (arg1),
3137 TREE_STRING_LENGTH (arg0)));
3139 case ADDR_EXPR:
3140 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3141 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3142 flags | OEP_ADDRESS_OF
3143 | OEP_MATCH_SIDE_EFFECTS);
3144 case CONSTRUCTOR:
3145 /* In GIMPLE empty constructors are allowed in initializers of
3146 aggregates. */
3147 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3148 default:
3149 break;
3152 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3153 two instances of undefined behavior will give identical results. */
3154 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3155 return false;
3157 /* Define macros to test an operand from arg0 and arg1 for equality and a
3158 variant that allows null and views null as being different from any
3159 non-null value. In the latter case, if either is null, the both
3160 must be; otherwise, do the normal comparison. */
3161 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3162 TREE_OPERAND (arg1, N), flags)
3164 #define OP_SAME_WITH_NULL(N) \
3165 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3166 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3168 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3170 case tcc_unary:
3171 /* Two conversions are equal only if signedness and modes match. */
3172 switch (TREE_CODE (arg0))
3174 CASE_CONVERT:
3175 case FIX_TRUNC_EXPR:
3176 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3177 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3178 return false;
3179 break;
3180 default:
3181 break;
3184 return OP_SAME (0);
3187 case tcc_comparison:
3188 case tcc_binary:
3189 if (OP_SAME (0) && OP_SAME (1))
3190 return true;
3192 /* For commutative ops, allow the other order. */
3193 return (commutative_tree_code (TREE_CODE (arg0))
3194 && operand_equal_p (TREE_OPERAND (arg0, 0),
3195 TREE_OPERAND (arg1, 1), flags)
3196 && operand_equal_p (TREE_OPERAND (arg0, 1),
3197 TREE_OPERAND (arg1, 0), flags));
3199 case tcc_reference:
3200 /* If either of the pointer (or reference) expressions we are
3201 dereferencing contain a side effect, these cannot be equal,
3202 but their addresses can be. */
3203 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3204 && (TREE_SIDE_EFFECTS (arg0)
3205 || TREE_SIDE_EFFECTS (arg1)))
3206 return false;
3208 switch (TREE_CODE (arg0))
3210 case INDIRECT_REF:
3211 if (!(flags & OEP_ADDRESS_OF))
3213 if (TYPE_ALIGN (TREE_TYPE (arg0))
3214 != TYPE_ALIGN (TREE_TYPE (arg1)))
3215 return false;
3216 /* Verify that the access types are compatible. */
3217 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3218 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3219 return false;
3221 flags &= ~OEP_ADDRESS_OF;
3222 return OP_SAME (0);
3224 case IMAGPART_EXPR:
3225 /* Require the same offset. */
3226 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3227 TYPE_SIZE (TREE_TYPE (arg1)),
3228 flags & ~OEP_ADDRESS_OF))
3229 return false;
3231 /* Fallthru. */
3232 case REALPART_EXPR:
3233 case VIEW_CONVERT_EXPR:
3234 return OP_SAME (0);
3236 case TARGET_MEM_REF:
3237 case MEM_REF:
3238 if (!(flags & OEP_ADDRESS_OF))
3240 /* Require equal access sizes */
3241 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3242 && (!TYPE_SIZE (TREE_TYPE (arg0))
3243 || !TYPE_SIZE (TREE_TYPE (arg1))
3244 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3245 TYPE_SIZE (TREE_TYPE (arg1)),
3246 flags)))
3247 return false;
3248 /* Verify that access happens in similar types. */
3249 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3250 return false;
3251 /* Verify that accesses are TBAA compatible. */
3252 if (!alias_ptr_types_compatible_p
3253 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3254 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3255 || (MR_DEPENDENCE_CLIQUE (arg0)
3256 != MR_DEPENDENCE_CLIQUE (arg1))
3257 || (MR_DEPENDENCE_BASE (arg0)
3258 != MR_DEPENDENCE_BASE (arg1)))
3259 return false;
3260 /* Verify that alignment is compatible. */
3261 if (TYPE_ALIGN (TREE_TYPE (arg0))
3262 != TYPE_ALIGN (TREE_TYPE (arg1)))
3263 return false;
3265 flags &= ~OEP_ADDRESS_OF;
3266 return (OP_SAME (0) && OP_SAME (1)
3267 /* TARGET_MEM_REF require equal extra operands. */
3268 && (TREE_CODE (arg0) != TARGET_MEM_REF
3269 || (OP_SAME_WITH_NULL (2)
3270 && OP_SAME_WITH_NULL (3)
3271 && OP_SAME_WITH_NULL (4))));
3273 case ARRAY_REF:
3274 case ARRAY_RANGE_REF:
3275 if (!OP_SAME (0))
3276 return false;
3277 flags &= ~OEP_ADDRESS_OF;
3278 /* Compare the array index by value if it is constant first as we
3279 may have different types but same value here. */
3280 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3281 TREE_OPERAND (arg1, 1))
3282 || OP_SAME (1))
3283 && OP_SAME_WITH_NULL (2)
3284 && OP_SAME_WITH_NULL (3)
3285 /* Compare low bound and element size as with OEP_ADDRESS_OF
3286 we have to account for the offset of the ref. */
3287 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3288 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3289 || (operand_equal_p (array_ref_low_bound
3290 (CONST_CAST_TREE (arg0)),
3291 array_ref_low_bound
3292 (CONST_CAST_TREE (arg1)), flags)
3293 && operand_equal_p (array_ref_element_size
3294 (CONST_CAST_TREE (arg0)),
3295 array_ref_element_size
3296 (CONST_CAST_TREE (arg1)),
3297 flags))));
3299 case COMPONENT_REF:
3300 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3301 may be NULL when we're called to compare MEM_EXPRs. */
3302 if (!OP_SAME_WITH_NULL (0))
3303 return false;
3305 bool compare_address = flags & OEP_ADDRESS_OF;
3307 /* Most of time we only need to compare FIELD_DECLs for equality.
3308 However when determining address look into actual offsets.
3309 These may match for unions and unshared record types. */
3310 flags &= ~OEP_ADDRESS_OF;
3311 if (!OP_SAME (1))
3313 if (compare_address
3314 && (flags & OEP_ADDRESS_OF_SAME_FIELD) == 0)
3316 if (TREE_OPERAND (arg0, 2)
3317 || TREE_OPERAND (arg1, 2))
3318 return OP_SAME_WITH_NULL (2);
3319 tree field0 = TREE_OPERAND (arg0, 1);
3320 tree field1 = TREE_OPERAND (arg1, 1);
3322 if (!operand_equal_p (DECL_FIELD_OFFSET (field0),
3323 DECL_FIELD_OFFSET (field1), flags)
3324 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3325 DECL_FIELD_BIT_OFFSET (field1),
3326 flags))
3327 return false;
3329 else
3330 return false;
3333 return OP_SAME_WITH_NULL (2);
3335 case BIT_FIELD_REF:
3336 if (!OP_SAME (0))
3337 return false;
3338 flags &= ~OEP_ADDRESS_OF;
3339 return OP_SAME (1) && OP_SAME (2);
3341 default:
3342 return false;
3345 case tcc_expression:
3346 switch (TREE_CODE (arg0))
3348 case ADDR_EXPR:
3349 /* Be sure we pass right ADDRESS_OF flag. */
3350 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3351 return operand_equal_p (TREE_OPERAND (arg0, 0),
3352 TREE_OPERAND (arg1, 0),
3353 flags | OEP_ADDRESS_OF);
3355 case TRUTH_NOT_EXPR:
3356 return OP_SAME (0);
3358 case TRUTH_ANDIF_EXPR:
3359 case TRUTH_ORIF_EXPR:
3360 return OP_SAME (0) && OP_SAME (1);
3362 case WIDEN_MULT_PLUS_EXPR:
3363 case WIDEN_MULT_MINUS_EXPR:
3364 if (!OP_SAME (2))
3365 return false;
3366 /* The multiplcation operands are commutative. */
3367 /* FALLTHRU */
3369 case TRUTH_AND_EXPR:
3370 case TRUTH_OR_EXPR:
3371 case TRUTH_XOR_EXPR:
3372 if (OP_SAME (0) && OP_SAME (1))
3373 return true;
3375 /* Otherwise take into account this is a commutative operation. */
3376 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3377 TREE_OPERAND (arg1, 1), flags)
3378 && operand_equal_p (TREE_OPERAND (arg0, 1),
3379 TREE_OPERAND (arg1, 0), flags));
3381 case COND_EXPR:
3382 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3383 return false;
3384 flags &= ~OEP_ADDRESS_OF;
3385 return OP_SAME (0);
3387 case BIT_INSERT_EXPR:
3388 /* BIT_INSERT_EXPR has an implict operand as the type precision
3389 of op1. Need to check to make sure they are the same. */
3390 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3391 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3392 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3393 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3394 return false;
3395 /* FALLTHRU */
3397 case VEC_COND_EXPR:
3398 case DOT_PROD_EXPR:
3399 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3401 case MODIFY_EXPR:
3402 case INIT_EXPR:
3403 case COMPOUND_EXPR:
3404 case PREDECREMENT_EXPR:
3405 case PREINCREMENT_EXPR:
3406 case POSTDECREMENT_EXPR:
3407 case POSTINCREMENT_EXPR:
3408 if (flags & OEP_LEXICOGRAPHIC)
3409 return OP_SAME (0) && OP_SAME (1);
3410 return false;
3412 case CLEANUP_POINT_EXPR:
3413 case EXPR_STMT:
3414 case SAVE_EXPR:
3415 if (flags & OEP_LEXICOGRAPHIC)
3416 return OP_SAME (0);
3417 return false;
3419 case OBJ_TYPE_REF:
3420 /* Virtual table reference. */
3421 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3422 OBJ_TYPE_REF_EXPR (arg1), flags))
3423 return false;
3424 flags &= ~OEP_ADDRESS_OF;
3425 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3426 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3427 return false;
3428 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3429 OBJ_TYPE_REF_OBJECT (arg1), flags))
3430 return false;
3431 if (virtual_method_call_p (arg0))
3433 if (!virtual_method_call_p (arg1))
3434 return false;
3435 return types_same_for_odr (obj_type_ref_class (arg0),
3436 obj_type_ref_class (arg1));
3438 return false;
3440 default:
3441 return false;
3444 case tcc_vl_exp:
3445 switch (TREE_CODE (arg0))
3447 case CALL_EXPR:
3448 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3449 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3450 /* If not both CALL_EXPRs are either internal or normal function
3451 functions, then they are not equal. */
3452 return false;
3453 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3455 /* If the CALL_EXPRs call different internal functions, then they
3456 are not equal. */
3457 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3458 return false;
3460 else
3462 /* If the CALL_EXPRs call different functions, then they are not
3463 equal. */
3464 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3465 flags))
3466 return false;
3469 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3471 unsigned int cef = call_expr_flags (arg0);
3472 if (flags & OEP_PURE_SAME)
3473 cef &= ECF_CONST | ECF_PURE;
3474 else
3475 cef &= ECF_CONST;
3476 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3477 return false;
3480 /* Now see if all the arguments are the same. */
3482 const_call_expr_arg_iterator iter0, iter1;
3483 const_tree a0, a1;
3484 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3485 a1 = first_const_call_expr_arg (arg1, &iter1);
3486 a0 && a1;
3487 a0 = next_const_call_expr_arg (&iter0),
3488 a1 = next_const_call_expr_arg (&iter1))
3489 if (! operand_equal_p (a0, a1, flags))
3490 return false;
3492 /* If we get here and both argument lists are exhausted
3493 then the CALL_EXPRs are equal. */
3494 return ! (a0 || a1);
3496 default:
3497 return false;
3500 case tcc_declaration:
3501 /* Consider __builtin_sqrt equal to sqrt. */
3502 if (TREE_CODE (arg0) == FUNCTION_DECL)
3503 return (fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3504 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3505 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3506 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3508 if (DECL_P (arg0)
3509 && (flags & OEP_DECL_NAME)
3510 && (flags & OEP_LEXICOGRAPHIC))
3512 /* Consider decls with the same name equal. The caller needs
3513 to make sure they refer to the same entity (such as a function
3514 formal parameter). */
3515 tree a0name = DECL_NAME (arg0);
3516 tree a1name = DECL_NAME (arg1);
3517 const char *a0ns = a0name ? IDENTIFIER_POINTER (a0name) : NULL;
3518 const char *a1ns = a1name ? IDENTIFIER_POINTER (a1name) : NULL;
3519 return a0ns && a1ns && strcmp (a0ns, a1ns) == 0;
3521 return false;
3523 case tcc_exceptional:
3524 if (TREE_CODE (arg0) == CONSTRUCTOR)
3526 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3527 return false;
3529 /* In GIMPLE constructors are used only to build vectors from
3530 elements. Individual elements in the constructor must be
3531 indexed in increasing order and form an initial sequence.
3533 We make no effort to compare constructors in generic.
3534 (see sem_variable::equals in ipa-icf which can do so for
3535 constants). */
3536 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3537 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3538 return false;
3540 /* Be sure that vectors constructed have the same representation.
3541 We only tested element precision and modes to match.
3542 Vectors may be BLKmode and thus also check that the number of
3543 parts match. */
3544 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3545 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3546 return false;
3548 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3549 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3550 unsigned int len = vec_safe_length (v0);
3552 if (len != vec_safe_length (v1))
3553 return false;
3555 for (unsigned int i = 0; i < len; i++)
3557 constructor_elt *c0 = &(*v0)[i];
3558 constructor_elt *c1 = &(*v1)[i];
3560 if (!operand_equal_p (c0->value, c1->value, flags)
3561 /* In GIMPLE the indexes can be either NULL or matching i.
3562 Double check this so we won't get false
3563 positives for GENERIC. */
3564 || (c0->index
3565 && (TREE_CODE (c0->index) != INTEGER_CST
3566 || compare_tree_int (c0->index, i)))
3567 || (c1->index
3568 && (TREE_CODE (c1->index) != INTEGER_CST
3569 || compare_tree_int (c1->index, i))))
3570 return false;
3572 return true;
3574 else if (TREE_CODE (arg0) == STATEMENT_LIST
3575 && (flags & OEP_LEXICOGRAPHIC))
3577 /* Compare the STATEMENT_LISTs. */
3578 tree_stmt_iterator tsi1, tsi2;
3579 tree body1 = CONST_CAST_TREE (arg0);
3580 tree body2 = CONST_CAST_TREE (arg1);
3581 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3582 tsi_next (&tsi1), tsi_next (&tsi2))
3584 /* The lists don't have the same number of statements. */
3585 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3586 return false;
3587 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3588 return true;
3589 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3590 flags & (OEP_LEXICOGRAPHIC
3591 | OEP_NO_HASH_CHECK)))
3592 return false;
3595 return false;
3597 case tcc_statement:
3598 switch (TREE_CODE (arg0))
3600 case RETURN_EXPR:
3601 if (flags & OEP_LEXICOGRAPHIC)
3602 return OP_SAME_WITH_NULL (0);
3603 return false;
3604 case DEBUG_BEGIN_STMT:
3605 if (flags & OEP_LEXICOGRAPHIC)
3606 return true;
3607 return false;
3608 default:
3609 return false;
3612 default:
3613 return false;
3616 #undef OP_SAME
3617 #undef OP_SAME_WITH_NULL
3620 /* Generate a hash value for an expression. This can be used iteratively
3621 by passing a previous result as the HSTATE argument. */
3623 void
3624 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3625 unsigned int flags)
3627 int i;
3628 enum tree_code code;
3629 enum tree_code_class tclass;
3631 if (t == NULL_TREE || t == error_mark_node)
3633 hstate.merge_hash (0);
3634 return;
3637 STRIP_ANY_LOCATION_WRAPPER (t);
3639 if (!(flags & OEP_ADDRESS_OF))
3640 STRIP_NOPS (t);
3642 code = TREE_CODE (t);
3644 switch (code)
3646 /* Alas, constants aren't shared, so we can't rely on pointer
3647 identity. */
3648 case VOID_CST:
3649 hstate.merge_hash (0);
3650 return;
3651 case INTEGER_CST:
3652 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3653 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3654 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3655 return;
3656 case REAL_CST:
3658 unsigned int val2;
3659 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3660 val2 = rvc_zero;
3661 else
3662 val2 = real_hash (TREE_REAL_CST_PTR (t));
3663 hstate.merge_hash (val2);
3664 return;
3666 case FIXED_CST:
3668 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3669 hstate.merge_hash (val2);
3670 return;
3672 case STRING_CST:
3673 hstate.add ((const void *) TREE_STRING_POINTER (t),
3674 TREE_STRING_LENGTH (t));
3675 return;
3676 case COMPLEX_CST:
3677 hash_operand (TREE_REALPART (t), hstate, flags);
3678 hash_operand (TREE_IMAGPART (t), hstate, flags);
3679 return;
3680 case VECTOR_CST:
3682 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3683 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3684 unsigned int count = vector_cst_encoded_nelts (t);
3685 for (unsigned int i = 0; i < count; ++i)
3686 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3687 return;
3689 case SSA_NAME:
3690 /* We can just compare by pointer. */
3691 hstate.add_hwi (SSA_NAME_VERSION (t));
3692 return;
3693 case PLACEHOLDER_EXPR:
3694 /* The node itself doesn't matter. */
3695 return;
3696 case BLOCK:
3697 case OMP_CLAUSE:
3698 /* Ignore. */
3699 return;
3700 case TREE_LIST:
3701 /* A list of expressions, for a CALL_EXPR or as the elements of a
3702 VECTOR_CST. */
3703 for (; t; t = TREE_CHAIN (t))
3704 hash_operand (TREE_VALUE (t), hstate, flags);
3705 return;
3706 case CONSTRUCTOR:
3708 unsigned HOST_WIDE_INT idx;
3709 tree field, value;
3710 flags &= ~OEP_ADDRESS_OF;
3711 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3712 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3714 /* In GIMPLE the indexes can be either NULL or matching i. */
3715 if (field == NULL_TREE)
3716 field = bitsize_int (idx);
3717 hash_operand (field, hstate, flags);
3718 hash_operand (value, hstate, flags);
3720 return;
3722 case STATEMENT_LIST:
3724 tree_stmt_iterator i;
3725 for (i = tsi_start (CONST_CAST_TREE (t));
3726 !tsi_end_p (i); tsi_next (&i))
3727 hash_operand (tsi_stmt (i), hstate, flags);
3728 return;
3730 case TREE_VEC:
3731 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3732 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3733 return;
3734 case IDENTIFIER_NODE:
3735 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3736 return;
3737 case FUNCTION_DECL:
3738 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3739 Otherwise nodes that compare equal according to operand_equal_p might
3740 get different hash codes. However, don't do this for machine specific
3741 or front end builtins, since the function code is overloaded in those
3742 cases. */
3743 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3744 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3746 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3747 code = TREE_CODE (t);
3749 /* FALL THROUGH */
3750 default:
3751 if (POLY_INT_CST_P (t))
3753 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3754 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3755 return;
3757 tclass = TREE_CODE_CLASS (code);
3759 if (tclass == tcc_declaration)
3761 /* DECL's have a unique ID */
3762 hstate.add_hwi (DECL_UID (t));
3764 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3766 /* For comparisons that can be swapped, use the lower
3767 tree code. */
3768 enum tree_code ccode = swap_tree_comparison (code);
3769 if (code < ccode)
3770 ccode = code;
3771 hstate.add_object (ccode);
3772 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3773 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3775 else if (CONVERT_EXPR_CODE_P (code))
3777 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3778 operand_equal_p. */
3779 enum tree_code ccode = NOP_EXPR;
3780 hstate.add_object (ccode);
3782 /* Don't hash the type, that can lead to having nodes which
3783 compare equal according to operand_equal_p, but which
3784 have different hash codes. Make sure to include signedness
3785 in the hash computation. */
3786 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3787 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3789 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3790 else if (code == MEM_REF
3791 && (flags & OEP_ADDRESS_OF) != 0
3792 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3793 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3794 && integer_zerop (TREE_OPERAND (t, 1)))
3795 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3796 hstate, flags);
3797 /* Don't ICE on FE specific trees, or their arguments etc.
3798 during operand_equal_p hash verification. */
3799 else if (!IS_EXPR_CODE_CLASS (tclass))
3800 gcc_assert (flags & OEP_HASH_CHECK);
3801 else
3803 unsigned int sflags = flags;
3805 hstate.add_object (code);
3807 switch (code)
3809 case ADDR_EXPR:
3810 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3811 flags |= OEP_ADDRESS_OF;
3812 sflags = flags;
3813 break;
3815 case INDIRECT_REF:
3816 case MEM_REF:
3817 case TARGET_MEM_REF:
3818 flags &= ~OEP_ADDRESS_OF;
3819 sflags = flags;
3820 break;
3822 case COMPONENT_REF:
3823 if (sflags & OEP_ADDRESS_OF)
3825 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3826 if (TREE_OPERAND (t, 2))
3827 hash_operand (TREE_OPERAND (t, 2), hstate,
3828 flags & ~OEP_ADDRESS_OF);
3829 else
3831 tree field = TREE_OPERAND (t, 1);
3832 hash_operand (DECL_FIELD_OFFSET (field),
3833 hstate, flags & ~OEP_ADDRESS_OF);
3834 hash_operand (DECL_FIELD_BIT_OFFSET (field),
3835 hstate, flags & ~OEP_ADDRESS_OF);
3837 return;
3839 break;
3840 case ARRAY_REF:
3841 case ARRAY_RANGE_REF:
3842 case BIT_FIELD_REF:
3843 sflags &= ~OEP_ADDRESS_OF;
3844 break;
3846 case COND_EXPR:
3847 flags &= ~OEP_ADDRESS_OF;
3848 break;
3850 case WIDEN_MULT_PLUS_EXPR:
3851 case WIDEN_MULT_MINUS_EXPR:
3853 /* The multiplication operands are commutative. */
3854 inchash::hash one, two;
3855 hash_operand (TREE_OPERAND (t, 0), one, flags);
3856 hash_operand (TREE_OPERAND (t, 1), two, flags);
3857 hstate.add_commutative (one, two);
3858 hash_operand (TREE_OPERAND (t, 2), two, flags);
3859 return;
3862 case CALL_EXPR:
3863 if (CALL_EXPR_FN (t) == NULL_TREE)
3864 hstate.add_int (CALL_EXPR_IFN (t));
3865 break;
3867 case TARGET_EXPR:
3868 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3869 Usually different TARGET_EXPRs just should use
3870 different temporaries in their slots. */
3871 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
3872 return;
3874 case OBJ_TYPE_REF:
3875 /* Virtual table reference. */
3876 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
3877 flags &= ~OEP_ADDRESS_OF;
3878 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
3879 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
3880 if (!virtual_method_call_p (t))
3881 return;
3882 if (tree c = obj_type_ref_class (t))
3884 c = TYPE_NAME (TYPE_MAIN_VARIANT (c));
3885 /* We compute mangled names only when free_lang_data is run.
3886 In that case we can hash precisely. */
3887 if (TREE_CODE (c) == TYPE_DECL
3888 && DECL_ASSEMBLER_NAME_SET_P (c))
3889 hstate.add_object
3890 (IDENTIFIER_HASH_VALUE
3891 (DECL_ASSEMBLER_NAME (c)));
3893 return;
3894 default:
3895 break;
3898 /* Don't hash the type, that can lead to having nodes which
3899 compare equal according to operand_equal_p, but which
3900 have different hash codes. */
3901 if (code == NON_LVALUE_EXPR)
3903 /* Make sure to include signness in the hash computation. */
3904 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3905 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3908 else if (commutative_tree_code (code))
3910 /* It's a commutative expression. We want to hash it the same
3911 however it appears. We do this by first hashing both operands
3912 and then rehashing based on the order of their independent
3913 hashes. */
3914 inchash::hash one, two;
3915 hash_operand (TREE_OPERAND (t, 0), one, flags);
3916 hash_operand (TREE_OPERAND (t, 1), two, flags);
3917 hstate.add_commutative (one, two);
3919 else
3920 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3921 hash_operand (TREE_OPERAND (t, i), hstate,
3922 i == 0 ? flags : sflags);
3924 return;
3928 bool
3929 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
3930 unsigned int flags, bool *ret)
3932 /* When checking and unless comparing DECL names, verify that if
3933 the outermost operand_equal_p call returns non-zero then ARG0
3934 and ARG1 have the same hash value. */
3935 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
3937 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
3939 if (arg0 != arg1 && !(flags & OEP_DECL_NAME))
3941 inchash::hash hstate0 (0), hstate1 (0);
3942 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
3943 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
3944 hashval_t h0 = hstate0.end ();
3945 hashval_t h1 = hstate1.end ();
3946 gcc_assert (h0 == h1);
3948 *ret = true;
3950 else
3951 *ret = false;
3953 return true;
3956 return false;
3960 static operand_compare default_compare_instance;
3962 /* Conveinece wrapper around operand_compare class because usually we do
3963 not need to play with the valueizer. */
3965 bool
3966 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3968 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
3971 namespace inchash
3974 /* Generate a hash value for an expression. This can be used iteratively
3975 by passing a previous result as the HSTATE argument.
3977 This function is intended to produce the same hash for expressions which
3978 would compare equal using operand_equal_p. */
3979 void
3980 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
3982 default_compare_instance.hash_operand (t, hstate, flags);
3987 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3988 with a different signedness or a narrower precision. */
3990 static bool
3991 operand_equal_for_comparison_p (tree arg0, tree arg1)
3993 if (operand_equal_p (arg0, arg1, 0))
3994 return true;
3996 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3997 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3998 return false;
4000 /* Discard any conversions that don't change the modes of ARG0 and ARG1
4001 and see if the inner values are the same. This removes any
4002 signedness comparison, which doesn't matter here. */
4003 tree op0 = arg0;
4004 tree op1 = arg1;
4005 STRIP_NOPS (op0);
4006 STRIP_NOPS (op1);
4007 if (operand_equal_p (op0, op1, 0))
4008 return true;
4010 /* Discard a single widening conversion from ARG1 and see if the inner
4011 value is the same as ARG0. */
4012 if (CONVERT_EXPR_P (arg1)
4013 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4014 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4015 < TYPE_PRECISION (TREE_TYPE (arg1))
4016 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
4017 return true;
4019 return false;
4022 /* See if ARG is an expression that is either a comparison or is performing
4023 arithmetic on comparisons. The comparisons must only be comparing
4024 two different values, which will be stored in *CVAL1 and *CVAL2; if
4025 they are nonzero it means that some operands have already been found.
4026 No variables may be used anywhere else in the expression except in the
4027 comparisons.
4029 If this is true, return 1. Otherwise, return zero. */
4031 static bool
4032 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4034 enum tree_code code = TREE_CODE (arg);
4035 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4037 /* We can handle some of the tcc_expression cases here. */
4038 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4039 tclass = tcc_unary;
4040 else if (tclass == tcc_expression
4041 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4042 || code == COMPOUND_EXPR))
4043 tclass = tcc_binary;
4045 switch (tclass)
4047 case tcc_unary:
4048 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4050 case tcc_binary:
4051 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4052 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4054 case tcc_constant:
4055 return true;
4057 case tcc_expression:
4058 if (code == COND_EXPR)
4059 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4060 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4061 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4062 return false;
4064 case tcc_comparison:
4065 /* First see if we can handle the first operand, then the second. For
4066 the second operand, we know *CVAL1 can't be zero. It must be that
4067 one side of the comparison is each of the values; test for the
4068 case where this isn't true by failing if the two operands
4069 are the same. */
4071 if (operand_equal_p (TREE_OPERAND (arg, 0),
4072 TREE_OPERAND (arg, 1), 0))
4073 return false;
4075 if (*cval1 == 0)
4076 *cval1 = TREE_OPERAND (arg, 0);
4077 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4079 else if (*cval2 == 0)
4080 *cval2 = TREE_OPERAND (arg, 0);
4081 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4083 else
4084 return false;
4086 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4088 else if (*cval2 == 0)
4089 *cval2 = TREE_OPERAND (arg, 1);
4090 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4092 else
4093 return false;
4095 return true;
4097 default:
4098 return false;
4102 /* ARG is a tree that is known to contain just arithmetic operations and
4103 comparisons. Evaluate the operations in the tree substituting NEW0 for
4104 any occurrence of OLD0 as an operand of a comparison and likewise for
4105 NEW1 and OLD1. */
4107 static tree
4108 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4109 tree old1, tree new1)
4111 tree type = TREE_TYPE (arg);
4112 enum tree_code code = TREE_CODE (arg);
4113 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4115 /* We can handle some of the tcc_expression cases here. */
4116 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4117 tclass = tcc_unary;
4118 else if (tclass == tcc_expression
4119 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4120 tclass = tcc_binary;
4122 switch (tclass)
4124 case tcc_unary:
4125 return fold_build1_loc (loc, code, type,
4126 eval_subst (loc, TREE_OPERAND (arg, 0),
4127 old0, new0, old1, new1));
4129 case tcc_binary:
4130 return fold_build2_loc (loc, code, type,
4131 eval_subst (loc, TREE_OPERAND (arg, 0),
4132 old0, new0, old1, new1),
4133 eval_subst (loc, TREE_OPERAND (arg, 1),
4134 old0, new0, old1, new1));
4136 case tcc_expression:
4137 switch (code)
4139 case SAVE_EXPR:
4140 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4141 old1, new1);
4143 case COMPOUND_EXPR:
4144 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4145 old1, new1);
4147 case COND_EXPR:
4148 return fold_build3_loc (loc, code, type,
4149 eval_subst (loc, TREE_OPERAND (arg, 0),
4150 old0, new0, old1, new1),
4151 eval_subst (loc, TREE_OPERAND (arg, 1),
4152 old0, new0, old1, new1),
4153 eval_subst (loc, TREE_OPERAND (arg, 2),
4154 old0, new0, old1, new1));
4155 default:
4156 break;
4158 /* Fall through - ??? */
4160 case tcc_comparison:
4162 tree arg0 = TREE_OPERAND (arg, 0);
4163 tree arg1 = TREE_OPERAND (arg, 1);
4165 /* We need to check both for exact equality and tree equality. The
4166 former will be true if the operand has a side-effect. In that
4167 case, we know the operand occurred exactly once. */
4169 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4170 arg0 = new0;
4171 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4172 arg0 = new1;
4174 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4175 arg1 = new0;
4176 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4177 arg1 = new1;
4179 return fold_build2_loc (loc, code, type, arg0, arg1);
4182 default:
4183 return arg;
4187 /* Return a tree for the case when the result of an expression is RESULT
4188 converted to TYPE and OMITTED was previously an operand of the expression
4189 but is now not needed (e.g., we folded OMITTED * 0).
4191 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4192 the conversion of RESULT to TYPE. */
4194 tree
4195 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4197 tree t = fold_convert_loc (loc, type, result);
4199 /* If the resulting operand is an empty statement, just return the omitted
4200 statement casted to void. */
4201 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4202 return build1_loc (loc, NOP_EXPR, void_type_node,
4203 fold_ignored_result (omitted));
4205 if (TREE_SIDE_EFFECTS (omitted))
4206 return build2_loc (loc, COMPOUND_EXPR, type,
4207 fold_ignored_result (omitted), t);
4209 return non_lvalue_loc (loc, t);
4212 /* Return a tree for the case when the result of an expression is RESULT
4213 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4214 of the expression but are now not needed.
4216 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4217 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4218 evaluated before OMITTED2. Otherwise, if neither has side effects,
4219 just do the conversion of RESULT to TYPE. */
4221 tree
4222 omit_two_operands_loc (location_t loc, tree type, tree result,
4223 tree omitted1, tree omitted2)
4225 tree t = fold_convert_loc (loc, type, result);
4227 if (TREE_SIDE_EFFECTS (omitted2))
4228 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4229 if (TREE_SIDE_EFFECTS (omitted1))
4230 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4232 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4236 /* Return a simplified tree node for the truth-negation of ARG. This
4237 never alters ARG itself. We assume that ARG is an operation that
4238 returns a truth value (0 or 1).
4240 FIXME: one would think we would fold the result, but it causes
4241 problems with the dominator optimizer. */
4243 static tree
4244 fold_truth_not_expr (location_t loc, tree arg)
4246 tree type = TREE_TYPE (arg);
4247 enum tree_code code = TREE_CODE (arg);
4248 location_t loc1, loc2;
4250 /* If this is a comparison, we can simply invert it, except for
4251 floating-point non-equality comparisons, in which case we just
4252 enclose a TRUTH_NOT_EXPR around what we have. */
4254 if (TREE_CODE_CLASS (code) == tcc_comparison)
4256 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4257 if (FLOAT_TYPE_P (op_type)
4258 && flag_trapping_math
4259 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4260 && code != NE_EXPR && code != EQ_EXPR)
4261 return NULL_TREE;
4263 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4264 if (code == ERROR_MARK)
4265 return NULL_TREE;
4267 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4268 TREE_OPERAND (arg, 1));
4269 copy_warning (ret, arg);
4270 return ret;
4273 switch (code)
4275 case INTEGER_CST:
4276 return constant_boolean_node (integer_zerop (arg), type);
4278 case TRUTH_AND_EXPR:
4279 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4280 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4281 return build2_loc (loc, TRUTH_OR_EXPR, type,
4282 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4283 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4285 case TRUTH_OR_EXPR:
4286 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4287 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4288 return build2_loc (loc, TRUTH_AND_EXPR, type,
4289 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4290 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4292 case TRUTH_XOR_EXPR:
4293 /* Here we can invert either operand. We invert the first operand
4294 unless the second operand is a TRUTH_NOT_EXPR in which case our
4295 result is the XOR of the first operand with the inside of the
4296 negation of the second operand. */
4298 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4299 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4300 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4301 else
4302 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4303 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4304 TREE_OPERAND (arg, 1));
4306 case TRUTH_ANDIF_EXPR:
4307 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4308 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4309 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4310 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4311 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4313 case TRUTH_ORIF_EXPR:
4314 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4315 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4316 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4317 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4318 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4320 case TRUTH_NOT_EXPR:
4321 return TREE_OPERAND (arg, 0);
4323 case COND_EXPR:
4325 tree arg1 = TREE_OPERAND (arg, 1);
4326 tree arg2 = TREE_OPERAND (arg, 2);
4328 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4329 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4331 /* A COND_EXPR may have a throw as one operand, which
4332 then has void type. Just leave void operands
4333 as they are. */
4334 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4335 VOID_TYPE_P (TREE_TYPE (arg1))
4336 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4337 VOID_TYPE_P (TREE_TYPE (arg2))
4338 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4341 case COMPOUND_EXPR:
4342 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4343 return build2_loc (loc, COMPOUND_EXPR, type,
4344 TREE_OPERAND (arg, 0),
4345 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4347 case NON_LVALUE_EXPR:
4348 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4349 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4351 CASE_CONVERT:
4352 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4353 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4355 /* fall through */
4357 case FLOAT_EXPR:
4358 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4359 return build1_loc (loc, TREE_CODE (arg), type,
4360 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4362 case BIT_AND_EXPR:
4363 if (!integer_onep (TREE_OPERAND (arg, 1)))
4364 return NULL_TREE;
4365 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4367 case SAVE_EXPR:
4368 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4370 case CLEANUP_POINT_EXPR:
4371 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4372 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4373 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4375 default:
4376 return NULL_TREE;
4380 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4381 assume that ARG is an operation that returns a truth value (0 or 1
4382 for scalars, 0 or -1 for vectors). Return the folded expression if
4383 folding is successful. Otherwise, return NULL_TREE. */
4385 static tree
4386 fold_invert_truthvalue (location_t loc, tree arg)
4388 tree type = TREE_TYPE (arg);
4389 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4390 ? BIT_NOT_EXPR
4391 : TRUTH_NOT_EXPR,
4392 type, arg);
4395 /* Return a simplified tree node for the truth-negation of ARG. This
4396 never alters ARG itself. We assume that ARG is an operation that
4397 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4399 tree
4400 invert_truthvalue_loc (location_t loc, tree arg)
4402 if (TREE_CODE (arg) == ERROR_MARK)
4403 return arg;
4405 tree type = TREE_TYPE (arg);
4406 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4407 ? BIT_NOT_EXPR
4408 : TRUTH_NOT_EXPR,
4409 type, arg);
4412 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4413 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4414 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4415 is the original memory reference used to preserve the alias set of
4416 the access. */
4418 static tree
4419 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4420 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4421 int unsignedp, int reversep)
4423 tree result, bftype;
4425 /* Attempt not to lose the access path if possible. */
4426 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4428 tree ninner = TREE_OPERAND (orig_inner, 0);
4429 machine_mode nmode;
4430 poly_int64 nbitsize, nbitpos;
4431 tree noffset;
4432 int nunsignedp, nreversep, nvolatilep = 0;
4433 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4434 &noffset, &nmode, &nunsignedp,
4435 &nreversep, &nvolatilep);
4436 if (base == inner
4437 && noffset == NULL_TREE
4438 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4439 && !reversep
4440 && !nreversep
4441 && !nvolatilep)
4443 inner = ninner;
4444 bitpos -= nbitpos;
4448 alias_set_type iset = get_alias_set (orig_inner);
4449 if (iset == 0 && get_alias_set (inner) != iset)
4450 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4451 build_fold_addr_expr (inner),
4452 build_int_cst (ptr_type_node, 0));
4454 if (known_eq (bitpos, 0) && !reversep)
4456 tree size = TYPE_SIZE (TREE_TYPE (inner));
4457 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4458 || POINTER_TYPE_P (TREE_TYPE (inner)))
4459 && tree_fits_shwi_p (size)
4460 && tree_to_shwi (size) == bitsize)
4461 return fold_convert_loc (loc, type, inner);
4464 bftype = type;
4465 if (TYPE_PRECISION (bftype) != bitsize
4466 || TYPE_UNSIGNED (bftype) == !unsignedp)
4467 bftype = build_nonstandard_integer_type (bitsize, 0);
4469 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4470 bitsize_int (bitsize), bitsize_int (bitpos));
4471 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4473 if (bftype != type)
4474 result = fold_convert_loc (loc, type, result);
4476 return result;
4479 /* Optimize a bit-field compare.
4481 There are two cases: First is a compare against a constant and the
4482 second is a comparison of two items where the fields are at the same
4483 bit position relative to the start of a chunk (byte, halfword, word)
4484 large enough to contain it. In these cases we can avoid the shift
4485 implicit in bitfield extractions.
4487 For constants, we emit a compare of the shifted constant with the
4488 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4489 compared. For two fields at the same position, we do the ANDs with the
4490 similar mask and compare the result of the ANDs.
4492 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4493 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4494 are the left and right operands of the comparison, respectively.
4496 If the optimization described above can be done, we return the resulting
4497 tree. Otherwise we return zero. */
4499 static tree
4500 optimize_bit_field_compare (location_t loc, enum tree_code code,
4501 tree compare_type, tree lhs, tree rhs)
4503 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4504 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4505 tree type = TREE_TYPE (lhs);
4506 tree unsigned_type;
4507 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4508 machine_mode lmode, rmode;
4509 scalar_int_mode nmode;
4510 int lunsignedp, runsignedp;
4511 int lreversep, rreversep;
4512 int lvolatilep = 0, rvolatilep = 0;
4513 tree linner, rinner = NULL_TREE;
4514 tree mask;
4515 tree offset;
4517 /* Get all the information about the extractions being done. If the bit size
4518 is the same as the size of the underlying object, we aren't doing an
4519 extraction at all and so can do nothing. We also don't want to
4520 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4521 then will no longer be able to replace it. */
4522 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4523 &lunsignedp, &lreversep, &lvolatilep);
4524 if (linner == lhs
4525 || !known_size_p (plbitsize)
4526 || !plbitsize.is_constant (&lbitsize)
4527 || !plbitpos.is_constant (&lbitpos)
4528 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4529 || offset != 0
4530 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4531 || lvolatilep)
4532 return 0;
4534 if (const_p)
4535 rreversep = lreversep;
4536 else
4538 /* If this is not a constant, we can only do something if bit positions,
4539 sizes, signedness and storage order are the same. */
4540 rinner
4541 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4542 &runsignedp, &rreversep, &rvolatilep);
4544 if (rinner == rhs
4545 || maybe_ne (lbitpos, rbitpos)
4546 || maybe_ne (lbitsize, rbitsize)
4547 || lunsignedp != runsignedp
4548 || lreversep != rreversep
4549 || offset != 0
4550 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4551 || rvolatilep)
4552 return 0;
4555 /* Honor the C++ memory model and mimic what RTL expansion does. */
4556 poly_uint64 bitstart = 0;
4557 poly_uint64 bitend = 0;
4558 if (TREE_CODE (lhs) == COMPONENT_REF)
4560 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4561 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4562 return 0;
4565 /* See if we can find a mode to refer to this field. We should be able to,
4566 but fail if we can't. */
4567 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4568 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4569 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4570 TYPE_ALIGN (TREE_TYPE (rinner))),
4571 BITS_PER_WORD, false, &nmode))
4572 return 0;
4574 /* Set signed and unsigned types of the precision of this mode for the
4575 shifts below. */
4576 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4578 /* Compute the bit position and size for the new reference and our offset
4579 within it. If the new reference is the same size as the original, we
4580 won't optimize anything, so return zero. */
4581 nbitsize = GET_MODE_BITSIZE (nmode);
4582 nbitpos = lbitpos & ~ (nbitsize - 1);
4583 lbitpos -= nbitpos;
4584 if (nbitsize == lbitsize)
4585 return 0;
4587 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4588 lbitpos = nbitsize - lbitsize - lbitpos;
4590 /* Make the mask to be used against the extracted field. */
4591 mask = build_int_cst_type (unsigned_type, -1);
4592 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4593 mask = const_binop (RSHIFT_EXPR, mask,
4594 size_int (nbitsize - lbitsize - lbitpos));
4596 if (! const_p)
4598 if (nbitpos < 0)
4599 return 0;
4601 /* If not comparing with constant, just rework the comparison
4602 and return. */
4603 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4604 nbitsize, nbitpos, 1, lreversep);
4605 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4606 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4607 nbitsize, nbitpos, 1, rreversep);
4608 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4609 return fold_build2_loc (loc, code, compare_type, t1, t2);
4612 /* Otherwise, we are handling the constant case. See if the constant is too
4613 big for the field. Warn and return a tree for 0 (false) if so. We do
4614 this not only for its own sake, but to avoid having to test for this
4615 error case below. If we didn't, we might generate wrong code.
4617 For unsigned fields, the constant shifted right by the field length should
4618 be all zero. For signed fields, the high-order bits should agree with
4619 the sign bit. */
4621 if (lunsignedp)
4623 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4625 warning (0, "comparison is always %d due to width of bit-field",
4626 code == NE_EXPR);
4627 return constant_boolean_node (code == NE_EXPR, compare_type);
4630 else
4632 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4633 if (tem != 0 && tem != -1)
4635 warning (0, "comparison is always %d due to width of bit-field",
4636 code == NE_EXPR);
4637 return constant_boolean_node (code == NE_EXPR, compare_type);
4641 if (nbitpos < 0)
4642 return 0;
4644 /* Single-bit compares should always be against zero. */
4645 if (lbitsize == 1 && ! integer_zerop (rhs))
4647 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4648 rhs = build_int_cst (type, 0);
4651 /* Make a new bitfield reference, shift the constant over the
4652 appropriate number of bits and mask it with the computed mask
4653 (in case this was a signed field). If we changed it, make a new one. */
4654 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4655 nbitsize, nbitpos, 1, lreversep);
4657 rhs = const_binop (BIT_AND_EXPR,
4658 const_binop (LSHIFT_EXPR,
4659 fold_convert_loc (loc, unsigned_type, rhs),
4660 size_int (lbitpos)),
4661 mask);
4663 lhs = build2_loc (loc, code, compare_type,
4664 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4665 return lhs;
4668 /* Subroutine for fold_truth_andor_1: decode a field reference.
4670 If EXP is a comparison reference, we return the innermost reference.
4672 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4673 set to the starting bit number.
4675 If the innermost field can be completely contained in a mode-sized
4676 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4678 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4679 otherwise it is not changed.
4681 *PUNSIGNEDP is set to the signedness of the field.
4683 *PREVERSEP is set to the storage order of the field.
4685 *PMASK is set to the mask used. This is either contained in a
4686 BIT_AND_EXPR or derived from the width of the field.
4688 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4690 Return 0 if this is not a component reference or is one that we can't
4691 do anything with. */
4693 static tree
4694 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4695 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4696 int *punsignedp, int *preversep, int *pvolatilep,
4697 tree *pmask, tree *pand_mask)
4699 tree exp = *exp_;
4700 tree outer_type = 0;
4701 tree and_mask = 0;
4702 tree mask, inner, offset;
4703 tree unsigned_type;
4704 unsigned int precision;
4706 /* All the optimizations using this function assume integer fields.
4707 There are problems with FP fields since the type_for_size call
4708 below can fail for, e.g., XFmode. */
4709 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4710 return NULL_TREE;
4712 /* We are interested in the bare arrangement of bits, so strip everything
4713 that doesn't affect the machine mode. However, record the type of the
4714 outermost expression if it may matter below. */
4715 if (CONVERT_EXPR_P (exp)
4716 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4717 outer_type = TREE_TYPE (exp);
4718 STRIP_NOPS (exp);
4720 if (TREE_CODE (exp) == BIT_AND_EXPR)
4722 and_mask = TREE_OPERAND (exp, 1);
4723 exp = TREE_OPERAND (exp, 0);
4724 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4725 if (TREE_CODE (and_mask) != INTEGER_CST)
4726 return NULL_TREE;
4729 poly_int64 poly_bitsize, poly_bitpos;
4730 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4731 pmode, punsignedp, preversep, pvolatilep);
4732 if ((inner == exp && and_mask == 0)
4733 || !poly_bitsize.is_constant (pbitsize)
4734 || !poly_bitpos.is_constant (pbitpos)
4735 || *pbitsize < 0
4736 || offset != 0
4737 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4738 /* Reject out-of-bound accesses (PR79731). */
4739 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4740 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4741 *pbitpos + *pbitsize) < 0))
4742 return NULL_TREE;
4744 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4745 if (unsigned_type == NULL_TREE)
4746 return NULL_TREE;
4748 *exp_ = exp;
4750 /* If the number of bits in the reference is the same as the bitsize of
4751 the outer type, then the outer type gives the signedness. Otherwise
4752 (in case of a small bitfield) the signedness is unchanged. */
4753 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4754 *punsignedp = TYPE_UNSIGNED (outer_type);
4756 /* Compute the mask to access the bitfield. */
4757 precision = TYPE_PRECISION (unsigned_type);
4759 mask = build_int_cst_type (unsigned_type, -1);
4761 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4762 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4764 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4765 if (and_mask != 0)
4766 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4767 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4769 *pmask = mask;
4770 *pand_mask = and_mask;
4771 return inner;
4774 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4775 bit positions and MASK is SIGNED. */
4777 static bool
4778 all_ones_mask_p (const_tree mask, unsigned int size)
4780 tree type = TREE_TYPE (mask);
4781 unsigned int precision = TYPE_PRECISION (type);
4783 /* If this function returns true when the type of the mask is
4784 UNSIGNED, then there will be errors. In particular see
4785 gcc.c-torture/execute/990326-1.c. There does not appear to be
4786 any documentation paper trail as to why this is so. But the pre
4787 wide-int worked with that restriction and it has been preserved
4788 here. */
4789 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4790 return false;
4792 return wi::mask (size, false, precision) == wi::to_wide (mask);
4795 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4796 represents the sign bit of EXP's type. If EXP represents a sign
4797 or zero extension, also test VAL against the unextended type.
4798 The return value is the (sub)expression whose sign bit is VAL,
4799 or NULL_TREE otherwise. */
4801 tree
4802 sign_bit_p (tree exp, const_tree val)
4804 int width;
4805 tree t;
4807 /* Tree EXP must have an integral type. */
4808 t = TREE_TYPE (exp);
4809 if (! INTEGRAL_TYPE_P (t))
4810 return NULL_TREE;
4812 /* Tree VAL must be an integer constant. */
4813 if (TREE_CODE (val) != INTEGER_CST
4814 || TREE_OVERFLOW (val))
4815 return NULL_TREE;
4817 width = TYPE_PRECISION (t);
4818 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4819 return exp;
4821 /* Handle extension from a narrower type. */
4822 if (TREE_CODE (exp) == NOP_EXPR
4823 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4824 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4826 return NULL_TREE;
4829 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4830 to be evaluated unconditionally. */
4832 static bool
4833 simple_operand_p (const_tree exp)
4835 /* Strip any conversions that don't change the machine mode. */
4836 STRIP_NOPS (exp);
4838 return (CONSTANT_CLASS_P (exp)
4839 || TREE_CODE (exp) == SSA_NAME
4840 || (DECL_P (exp)
4841 && ! TREE_ADDRESSABLE (exp)
4842 && ! TREE_THIS_VOLATILE (exp)
4843 && ! DECL_NONLOCAL (exp)
4844 /* Don't regard global variables as simple. They may be
4845 allocated in ways unknown to the compiler (shared memory,
4846 #pragma weak, etc). */
4847 && ! TREE_PUBLIC (exp)
4848 && ! DECL_EXTERNAL (exp)
4849 /* Weakrefs are not safe to be read, since they can be NULL.
4850 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4851 have DECL_WEAK flag set. */
4852 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4853 /* Loading a static variable is unduly expensive, but global
4854 registers aren't expensive. */
4855 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4858 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4859 to be evaluated unconditionally.
4860 I addition to simple_operand_p, we assume that comparisons, conversions,
4861 and logic-not operations are simple, if their operands are simple, too. */
4863 static bool
4864 simple_operand_p_2 (tree exp)
4866 enum tree_code code;
4868 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
4869 return false;
4871 while (CONVERT_EXPR_P (exp))
4872 exp = TREE_OPERAND (exp, 0);
4874 code = TREE_CODE (exp);
4876 if (TREE_CODE_CLASS (code) == tcc_comparison)
4877 return (simple_operand_p (TREE_OPERAND (exp, 0))
4878 && simple_operand_p (TREE_OPERAND (exp, 1)));
4880 if (code == TRUTH_NOT_EXPR)
4881 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4883 return simple_operand_p (exp);
4887 /* The following functions are subroutines to fold_range_test and allow it to
4888 try to change a logical combination of comparisons into a range test.
4890 For example, both
4891 X == 2 || X == 3 || X == 4 || X == 5
4893 X >= 2 && X <= 5
4894 are converted to
4895 (unsigned) (X - 2) <= 3
4897 We describe each set of comparisons as being either inside or outside
4898 a range, using a variable named like IN_P, and then describe the
4899 range with a lower and upper bound. If one of the bounds is omitted,
4900 it represents either the highest or lowest value of the type.
4902 In the comments below, we represent a range by two numbers in brackets
4903 preceded by a "+" to designate being inside that range, or a "-" to
4904 designate being outside that range, so the condition can be inverted by
4905 flipping the prefix. An omitted bound is represented by a "-". For
4906 example, "- [-, 10]" means being outside the range starting at the lowest
4907 possible value and ending at 10, in other words, being greater than 10.
4908 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4909 always false.
4911 We set up things so that the missing bounds are handled in a consistent
4912 manner so neither a missing bound nor "true" and "false" need to be
4913 handled using a special case. */
4915 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4916 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4917 and UPPER1_P are nonzero if the respective argument is an upper bound
4918 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4919 must be specified for a comparison. ARG1 will be converted to ARG0's
4920 type if both are specified. */
4922 static tree
4923 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4924 tree arg1, int upper1_p)
4926 tree tem;
4927 int result;
4928 int sgn0, sgn1;
4930 /* If neither arg represents infinity, do the normal operation.
4931 Else, if not a comparison, return infinity. Else handle the special
4932 comparison rules. Note that most of the cases below won't occur, but
4933 are handled for consistency. */
4935 if (arg0 != 0 && arg1 != 0)
4937 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4938 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4939 STRIP_NOPS (tem);
4940 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4943 if (TREE_CODE_CLASS (code) != tcc_comparison)
4944 return 0;
4946 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4947 for neither. In real maths, we cannot assume open ended ranges are
4948 the same. But, this is computer arithmetic, where numbers are finite.
4949 We can therefore make the transformation of any unbounded range with
4950 the value Z, Z being greater than any representable number. This permits
4951 us to treat unbounded ranges as equal. */
4952 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4953 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4954 switch (code)
4956 case EQ_EXPR:
4957 result = sgn0 == sgn1;
4958 break;
4959 case NE_EXPR:
4960 result = sgn0 != sgn1;
4961 break;
4962 case LT_EXPR:
4963 result = sgn0 < sgn1;
4964 break;
4965 case LE_EXPR:
4966 result = sgn0 <= sgn1;
4967 break;
4968 case GT_EXPR:
4969 result = sgn0 > sgn1;
4970 break;
4971 case GE_EXPR:
4972 result = sgn0 >= sgn1;
4973 break;
4974 default:
4975 gcc_unreachable ();
4978 return constant_boolean_node (result, type);
4981 /* Helper routine for make_range. Perform one step for it, return
4982 new expression if the loop should continue or NULL_TREE if it should
4983 stop. */
4985 tree
4986 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4987 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4988 bool *strict_overflow_p)
4990 tree arg0_type = TREE_TYPE (arg0);
4991 tree n_low, n_high, low = *p_low, high = *p_high;
4992 int in_p = *p_in_p, n_in_p;
4994 switch (code)
4996 case TRUTH_NOT_EXPR:
4997 /* We can only do something if the range is testing for zero. */
4998 if (low == NULL_TREE || high == NULL_TREE
4999 || ! integer_zerop (low) || ! integer_zerop (high))
5000 return NULL_TREE;
5001 *p_in_p = ! in_p;
5002 return arg0;
5004 case EQ_EXPR: case NE_EXPR:
5005 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
5006 /* We can only do something if the range is testing for zero
5007 and if the second operand is an integer constant. Note that
5008 saying something is "in" the range we make is done by
5009 complementing IN_P since it will set in the initial case of
5010 being not equal to zero; "out" is leaving it alone. */
5011 if (low == NULL_TREE || high == NULL_TREE
5012 || ! integer_zerop (low) || ! integer_zerop (high)
5013 || TREE_CODE (arg1) != INTEGER_CST)
5014 return NULL_TREE;
5016 switch (code)
5018 case NE_EXPR: /* - [c, c] */
5019 low = high = arg1;
5020 break;
5021 case EQ_EXPR: /* + [c, c] */
5022 in_p = ! in_p, low = high = arg1;
5023 break;
5024 case GT_EXPR: /* - [-, c] */
5025 low = 0, high = arg1;
5026 break;
5027 case GE_EXPR: /* + [c, -] */
5028 in_p = ! in_p, low = arg1, high = 0;
5029 break;
5030 case LT_EXPR: /* - [c, -] */
5031 low = arg1, high = 0;
5032 break;
5033 case LE_EXPR: /* + [-, c] */
5034 in_p = ! in_p, low = 0, high = arg1;
5035 break;
5036 default:
5037 gcc_unreachable ();
5040 /* If this is an unsigned comparison, we also know that EXP is
5041 greater than or equal to zero. We base the range tests we make
5042 on that fact, so we record it here so we can parse existing
5043 range tests. We test arg0_type since often the return type
5044 of, e.g. EQ_EXPR, is boolean. */
5045 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5047 if (! merge_ranges (&n_in_p, &n_low, &n_high,
5048 in_p, low, high, 1,
5049 build_int_cst (arg0_type, 0),
5050 NULL_TREE))
5051 return NULL_TREE;
5053 in_p = n_in_p, low = n_low, high = n_high;
5055 /* If the high bound is missing, but we have a nonzero low
5056 bound, reverse the range so it goes from zero to the low bound
5057 minus 1. */
5058 if (high == 0 && low && ! integer_zerop (low))
5060 in_p = ! in_p;
5061 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5062 build_int_cst (TREE_TYPE (low), 1), 0);
5063 low = build_int_cst (arg0_type, 0);
5067 *p_low = low;
5068 *p_high = high;
5069 *p_in_p = in_p;
5070 return arg0;
5072 case NEGATE_EXPR:
5073 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5074 low and high are non-NULL, then normalize will DTRT. */
5075 if (!TYPE_UNSIGNED (arg0_type)
5076 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5078 if (low == NULL_TREE)
5079 low = TYPE_MIN_VALUE (arg0_type);
5080 if (high == NULL_TREE)
5081 high = TYPE_MAX_VALUE (arg0_type);
5084 /* (-x) IN [a,b] -> x in [-b, -a] */
5085 n_low = range_binop (MINUS_EXPR, exp_type,
5086 build_int_cst (exp_type, 0),
5087 0, high, 1);
5088 n_high = range_binop (MINUS_EXPR, exp_type,
5089 build_int_cst (exp_type, 0),
5090 0, low, 0);
5091 if (n_high != 0 && TREE_OVERFLOW (n_high))
5092 return NULL_TREE;
5093 goto normalize;
5095 case BIT_NOT_EXPR:
5096 /* ~ X -> -X - 1 */
5097 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5098 build_int_cst (exp_type, 1));
5100 case PLUS_EXPR:
5101 case MINUS_EXPR:
5102 if (TREE_CODE (arg1) != INTEGER_CST)
5103 return NULL_TREE;
5105 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5106 move a constant to the other side. */
5107 if (!TYPE_UNSIGNED (arg0_type)
5108 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5109 return NULL_TREE;
5111 /* If EXP is signed, any overflow in the computation is undefined,
5112 so we don't worry about it so long as our computations on
5113 the bounds don't overflow. For unsigned, overflow is defined
5114 and this is exactly the right thing. */
5115 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5116 arg0_type, low, 0, arg1, 0);
5117 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5118 arg0_type, high, 1, arg1, 0);
5119 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5120 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5121 return NULL_TREE;
5123 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5124 *strict_overflow_p = true;
5126 normalize:
5127 /* Check for an unsigned range which has wrapped around the maximum
5128 value thus making n_high < n_low, and normalize it. */
5129 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5131 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5132 build_int_cst (TREE_TYPE (n_high), 1), 0);
5133 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5134 build_int_cst (TREE_TYPE (n_low), 1), 0);
5136 /* If the range is of the form +/- [ x+1, x ], we won't
5137 be able to normalize it. But then, it represents the
5138 whole range or the empty set, so make it
5139 +/- [ -, - ]. */
5140 if (tree_int_cst_equal (n_low, low)
5141 && tree_int_cst_equal (n_high, high))
5142 low = high = 0;
5143 else
5144 in_p = ! in_p;
5146 else
5147 low = n_low, high = n_high;
5149 *p_low = low;
5150 *p_high = high;
5151 *p_in_p = in_p;
5152 return arg0;
5154 CASE_CONVERT:
5155 case NON_LVALUE_EXPR:
5156 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5157 return NULL_TREE;
5159 if (! INTEGRAL_TYPE_P (arg0_type)
5160 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5161 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5162 return NULL_TREE;
5164 n_low = low, n_high = high;
5166 if (n_low != 0)
5167 n_low = fold_convert_loc (loc, arg0_type, n_low);
5169 if (n_high != 0)
5170 n_high = fold_convert_loc (loc, arg0_type, n_high);
5172 /* If we're converting arg0 from an unsigned type, to exp,
5173 a signed type, we will be doing the comparison as unsigned.
5174 The tests above have already verified that LOW and HIGH
5175 are both positive.
5177 So we have to ensure that we will handle large unsigned
5178 values the same way that the current signed bounds treat
5179 negative values. */
5181 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5183 tree high_positive;
5184 tree equiv_type;
5185 /* For fixed-point modes, we need to pass the saturating flag
5186 as the 2nd parameter. */
5187 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5188 equiv_type
5189 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5190 TYPE_SATURATING (arg0_type));
5191 else
5192 equiv_type
5193 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5195 /* A range without an upper bound is, naturally, unbounded.
5196 Since convert would have cropped a very large value, use
5197 the max value for the destination type. */
5198 high_positive
5199 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5200 : TYPE_MAX_VALUE (arg0_type);
5202 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5203 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5204 fold_convert_loc (loc, arg0_type,
5205 high_positive),
5206 build_int_cst (arg0_type, 1));
5208 /* If the low bound is specified, "and" the range with the
5209 range for which the original unsigned value will be
5210 positive. */
5211 if (low != 0)
5213 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5214 1, fold_convert_loc (loc, arg0_type,
5215 integer_zero_node),
5216 high_positive))
5217 return NULL_TREE;
5219 in_p = (n_in_p == in_p);
5221 else
5223 /* Otherwise, "or" the range with the range of the input
5224 that will be interpreted as negative. */
5225 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5226 1, fold_convert_loc (loc, arg0_type,
5227 integer_zero_node),
5228 high_positive))
5229 return NULL_TREE;
5231 in_p = (in_p != n_in_p);
5235 *p_low = n_low;
5236 *p_high = n_high;
5237 *p_in_p = in_p;
5238 return arg0;
5240 default:
5241 return NULL_TREE;
5245 /* Given EXP, a logical expression, set the range it is testing into
5246 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5247 actually being tested. *PLOW and *PHIGH will be made of the same
5248 type as the returned expression. If EXP is not a comparison, we
5249 will most likely not be returning a useful value and range. Set
5250 *STRICT_OVERFLOW_P to true if the return value is only valid
5251 because signed overflow is undefined; otherwise, do not change
5252 *STRICT_OVERFLOW_P. */
5254 tree
5255 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5256 bool *strict_overflow_p)
5258 enum tree_code code;
5259 tree arg0, arg1 = NULL_TREE;
5260 tree exp_type, nexp;
5261 int in_p;
5262 tree low, high;
5263 location_t loc = EXPR_LOCATION (exp);
5265 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5266 and see if we can refine the range. Some of the cases below may not
5267 happen, but it doesn't seem worth worrying about this. We "continue"
5268 the outer loop when we've changed something; otherwise we "break"
5269 the switch, which will "break" the while. */
5271 in_p = 0;
5272 low = high = build_int_cst (TREE_TYPE (exp), 0);
5274 while (1)
5276 code = TREE_CODE (exp);
5277 exp_type = TREE_TYPE (exp);
5278 arg0 = NULL_TREE;
5280 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5282 if (TREE_OPERAND_LENGTH (exp) > 0)
5283 arg0 = TREE_OPERAND (exp, 0);
5284 if (TREE_CODE_CLASS (code) == tcc_binary
5285 || TREE_CODE_CLASS (code) == tcc_comparison
5286 || (TREE_CODE_CLASS (code) == tcc_expression
5287 && TREE_OPERAND_LENGTH (exp) > 1))
5288 arg1 = TREE_OPERAND (exp, 1);
5290 if (arg0 == NULL_TREE)
5291 break;
5293 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5294 &high, &in_p, strict_overflow_p);
5295 if (nexp == NULL_TREE)
5296 break;
5297 exp = nexp;
5300 /* If EXP is a constant, we can evaluate whether this is true or false. */
5301 if (TREE_CODE (exp) == INTEGER_CST)
5303 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5304 exp, 0, low, 0))
5305 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5306 exp, 1, high, 1)));
5307 low = high = 0;
5308 exp = 0;
5311 *pin_p = in_p, *plow = low, *phigh = high;
5312 return exp;
5315 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5316 a bitwise check i.e. when
5317 LOW == 0xXX...X00...0
5318 HIGH == 0xXX...X11...1
5319 Return corresponding mask in MASK and stem in VALUE. */
5321 static bool
5322 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5323 tree *value)
5325 if (TREE_CODE (low) != INTEGER_CST
5326 || TREE_CODE (high) != INTEGER_CST)
5327 return false;
5329 unsigned prec = TYPE_PRECISION (type);
5330 wide_int lo = wi::to_wide (low, prec);
5331 wide_int hi = wi::to_wide (high, prec);
5333 wide_int end_mask = lo ^ hi;
5334 if ((end_mask & (end_mask + 1)) != 0
5335 || (lo & end_mask) != 0)
5336 return false;
5338 wide_int stem_mask = ~end_mask;
5339 wide_int stem = lo & stem_mask;
5340 if (stem != (hi & stem_mask))
5341 return false;
5343 *mask = wide_int_to_tree (type, stem_mask);
5344 *value = wide_int_to_tree (type, stem);
5346 return true;
5349 /* Helper routine for build_range_check and match.pd. Return the type to
5350 perform the check or NULL if it shouldn't be optimized. */
5352 tree
5353 range_check_type (tree etype)
5355 /* First make sure that arithmetics in this type is valid, then make sure
5356 that it wraps around. */
5357 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5358 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5360 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5362 tree utype, minv, maxv;
5364 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5365 for the type in question, as we rely on this here. */
5366 utype = unsigned_type_for (etype);
5367 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5368 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5369 build_int_cst (TREE_TYPE (maxv), 1), 1);
5370 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5372 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5373 minv, 1, maxv, 1)))
5374 etype = utype;
5375 else
5376 return NULL_TREE;
5378 else if (POINTER_TYPE_P (etype) || TREE_CODE (etype) == OFFSET_TYPE)
5379 etype = unsigned_type_for (etype);
5380 return etype;
5383 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5384 type, TYPE, return an expression to test if EXP is in (or out of, depending
5385 on IN_P) the range. Return 0 if the test couldn't be created. */
5387 tree
5388 build_range_check (location_t loc, tree type, tree exp, int in_p,
5389 tree low, tree high)
5391 tree etype = TREE_TYPE (exp), mask, value;
5393 /* Disable this optimization for function pointer expressions
5394 on targets that require function pointer canonicalization. */
5395 if (targetm.have_canonicalize_funcptr_for_compare ()
5396 && POINTER_TYPE_P (etype)
5397 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5398 return NULL_TREE;
5400 if (! in_p)
5402 value = build_range_check (loc, type, exp, 1, low, high);
5403 if (value != 0)
5404 return invert_truthvalue_loc (loc, value);
5406 return 0;
5409 if (low == 0 && high == 0)
5410 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5412 if (low == 0)
5413 return fold_build2_loc (loc, LE_EXPR, type, exp,
5414 fold_convert_loc (loc, etype, high));
5416 if (high == 0)
5417 return fold_build2_loc (loc, GE_EXPR, type, exp,
5418 fold_convert_loc (loc, etype, low));
5420 if (operand_equal_p (low, high, 0))
5421 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5422 fold_convert_loc (loc, etype, low));
5424 if (TREE_CODE (exp) == BIT_AND_EXPR
5425 && maskable_range_p (low, high, etype, &mask, &value))
5426 return fold_build2_loc (loc, EQ_EXPR, type,
5427 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5428 exp, mask),
5429 value);
5431 if (integer_zerop (low))
5433 if (! TYPE_UNSIGNED (etype))
5435 etype = unsigned_type_for (etype);
5436 high = fold_convert_loc (loc, etype, high);
5437 exp = fold_convert_loc (loc, etype, exp);
5439 return build_range_check (loc, type, exp, 1, 0, high);
5442 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5443 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5445 int prec = TYPE_PRECISION (etype);
5447 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5449 if (TYPE_UNSIGNED (etype))
5451 tree signed_etype = signed_type_for (etype);
5452 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5453 etype
5454 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5455 else
5456 etype = signed_etype;
5457 exp = fold_convert_loc (loc, etype, exp);
5459 return fold_build2_loc (loc, GT_EXPR, type, exp,
5460 build_int_cst (etype, 0));
5464 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5465 This requires wrap-around arithmetics for the type of the expression. */
5466 etype = range_check_type (etype);
5467 if (etype == NULL_TREE)
5468 return NULL_TREE;
5470 high = fold_convert_loc (loc, etype, high);
5471 low = fold_convert_loc (loc, etype, low);
5472 exp = fold_convert_loc (loc, etype, exp);
5474 value = const_binop (MINUS_EXPR, high, low);
5476 if (value != 0 && !TREE_OVERFLOW (value))
5477 return build_range_check (loc, type,
5478 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5479 1, build_int_cst (etype, 0), value);
5481 return 0;
5484 /* Return the predecessor of VAL in its type, handling the infinite case. */
5486 static tree
5487 range_predecessor (tree val)
5489 tree type = TREE_TYPE (val);
5491 if (INTEGRAL_TYPE_P (type)
5492 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5493 return 0;
5494 else
5495 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5496 build_int_cst (TREE_TYPE (val), 1), 0);
5499 /* Return the successor of VAL in its type, handling the infinite case. */
5501 static tree
5502 range_successor (tree val)
5504 tree type = TREE_TYPE (val);
5506 if (INTEGRAL_TYPE_P (type)
5507 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5508 return 0;
5509 else
5510 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5511 build_int_cst (TREE_TYPE (val), 1), 0);
5514 /* Given two ranges, see if we can merge them into one. Return 1 if we
5515 can, 0 if we can't. Set the output range into the specified parameters. */
5517 bool
5518 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5519 tree high0, int in1_p, tree low1, tree high1)
5521 int no_overlap;
5522 int subset;
5523 int temp;
5524 tree tem;
5525 int in_p;
5526 tree low, high;
5527 int lowequal = ((low0 == 0 && low1 == 0)
5528 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5529 low0, 0, low1, 0)));
5530 int highequal = ((high0 == 0 && high1 == 0)
5531 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5532 high0, 1, high1, 1)));
5534 /* Make range 0 be the range that starts first, or ends last if they
5535 start at the same value. Swap them if it isn't. */
5536 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5537 low0, 0, low1, 0))
5538 || (lowequal
5539 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5540 high1, 1, high0, 1))))
5542 temp = in0_p, in0_p = in1_p, in1_p = temp;
5543 tem = low0, low0 = low1, low1 = tem;
5544 tem = high0, high0 = high1, high1 = tem;
5547 /* If the second range is != high1 where high1 is the type maximum of
5548 the type, try first merging with < high1 range. */
5549 if (low1
5550 && high1
5551 && TREE_CODE (low1) == INTEGER_CST
5552 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5553 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5554 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5555 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5556 && operand_equal_p (low1, high1, 0))
5558 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5559 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5560 !in1_p, NULL_TREE, range_predecessor (low1)))
5561 return true;
5562 /* Similarly for the second range != low1 where low1 is the type minimum
5563 of the type, try first merging with > low1 range. */
5564 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5565 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5566 !in1_p, range_successor (low1), NULL_TREE))
5567 return true;
5570 /* Now flag two cases, whether the ranges are disjoint or whether the
5571 second range is totally subsumed in the first. Note that the tests
5572 below are simplified by the ones above. */
5573 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5574 high0, 1, low1, 0));
5575 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5576 high1, 1, high0, 1));
5578 /* We now have four cases, depending on whether we are including or
5579 excluding the two ranges. */
5580 if (in0_p && in1_p)
5582 /* If they don't overlap, the result is false. If the second range
5583 is a subset it is the result. Otherwise, the range is from the start
5584 of the second to the end of the first. */
5585 if (no_overlap)
5586 in_p = 0, low = high = 0;
5587 else if (subset)
5588 in_p = 1, low = low1, high = high1;
5589 else
5590 in_p = 1, low = low1, high = high0;
5593 else if (in0_p && ! in1_p)
5595 /* If they don't overlap, the result is the first range. If they are
5596 equal, the result is false. If the second range is a subset of the
5597 first, and the ranges begin at the same place, we go from just after
5598 the end of the second range to the end of the first. If the second
5599 range is not a subset of the first, or if it is a subset and both
5600 ranges end at the same place, the range starts at the start of the
5601 first range and ends just before the second range.
5602 Otherwise, we can't describe this as a single range. */
5603 if (no_overlap)
5604 in_p = 1, low = low0, high = high0;
5605 else if (lowequal && highequal)
5606 in_p = 0, low = high = 0;
5607 else if (subset && lowequal)
5609 low = range_successor (high1);
5610 high = high0;
5611 in_p = 1;
5612 if (low == 0)
5614 /* We are in the weird situation where high0 > high1 but
5615 high1 has no successor. Punt. */
5616 return 0;
5619 else if (! subset || highequal)
5621 low = low0;
5622 high = range_predecessor (low1);
5623 in_p = 1;
5624 if (high == 0)
5626 /* low0 < low1 but low1 has no predecessor. Punt. */
5627 return 0;
5630 else
5631 return 0;
5634 else if (! in0_p && in1_p)
5636 /* If they don't overlap, the result is the second range. If the second
5637 is a subset of the first, the result is false. Otherwise,
5638 the range starts just after the first range and ends at the
5639 end of the second. */
5640 if (no_overlap)
5641 in_p = 1, low = low1, high = high1;
5642 else if (subset || highequal)
5643 in_p = 0, low = high = 0;
5644 else
5646 low = range_successor (high0);
5647 high = high1;
5648 in_p = 1;
5649 if (low == 0)
5651 /* high1 > high0 but high0 has no successor. Punt. */
5652 return 0;
5657 else
5659 /* The case where we are excluding both ranges. Here the complex case
5660 is if they don't overlap. In that case, the only time we have a
5661 range is if they are adjacent. If the second is a subset of the
5662 first, the result is the first. Otherwise, the range to exclude
5663 starts at the beginning of the first range and ends at the end of the
5664 second. */
5665 if (no_overlap)
5667 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5668 range_successor (high0),
5669 1, low1, 0)))
5670 in_p = 0, low = low0, high = high1;
5671 else
5673 /* Canonicalize - [min, x] into - [-, x]. */
5674 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5675 switch (TREE_CODE (TREE_TYPE (low0)))
5677 case ENUMERAL_TYPE:
5678 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5679 GET_MODE_BITSIZE
5680 (TYPE_MODE (TREE_TYPE (low0)))))
5681 break;
5682 /* FALLTHROUGH */
5683 case INTEGER_TYPE:
5684 if (tree_int_cst_equal (low0,
5685 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5686 low0 = 0;
5687 break;
5688 case POINTER_TYPE:
5689 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5690 && integer_zerop (low0))
5691 low0 = 0;
5692 break;
5693 default:
5694 break;
5697 /* Canonicalize - [x, max] into - [x, -]. */
5698 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5699 switch (TREE_CODE (TREE_TYPE (high1)))
5701 case ENUMERAL_TYPE:
5702 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5703 GET_MODE_BITSIZE
5704 (TYPE_MODE (TREE_TYPE (high1)))))
5705 break;
5706 /* FALLTHROUGH */
5707 case INTEGER_TYPE:
5708 if (tree_int_cst_equal (high1,
5709 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5710 high1 = 0;
5711 break;
5712 case POINTER_TYPE:
5713 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5714 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5715 high1, 1,
5716 build_int_cst (TREE_TYPE (high1), 1),
5717 1)))
5718 high1 = 0;
5719 break;
5720 default:
5721 break;
5724 /* The ranges might be also adjacent between the maximum and
5725 minimum values of the given type. For
5726 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5727 return + [x + 1, y - 1]. */
5728 if (low0 == 0 && high1 == 0)
5730 low = range_successor (high0);
5731 high = range_predecessor (low1);
5732 if (low == 0 || high == 0)
5733 return 0;
5735 in_p = 1;
5737 else
5738 return 0;
5741 else if (subset)
5742 in_p = 0, low = low0, high = high0;
5743 else
5744 in_p = 0, low = low0, high = high1;
5747 *pin_p = in_p, *plow = low, *phigh = high;
5748 return 1;
5752 /* Subroutine of fold, looking inside expressions of the form
5753 A op B ? A : C, where (ARG00, COMP_CODE, ARG01), ARG1 and ARG2
5754 are the three operands of the COND_EXPR. This function is
5755 being used also to optimize A op B ? C : A, by reversing the
5756 comparison first.
5758 Return a folded expression whose code is not a COND_EXPR
5759 anymore, or NULL_TREE if no folding opportunity is found. */
5761 static tree
5762 fold_cond_expr_with_comparison (location_t loc, tree type,
5763 enum tree_code comp_code,
5764 tree arg00, tree arg01, tree arg1, tree arg2)
5766 tree arg1_type = TREE_TYPE (arg1);
5767 tree tem;
5769 STRIP_NOPS (arg1);
5770 STRIP_NOPS (arg2);
5772 /* If we have A op 0 ? A : -A, consider applying the following
5773 transformations:
5775 A == 0? A : -A same as -A
5776 A != 0? A : -A same as A
5777 A >= 0? A : -A same as abs (A)
5778 A > 0? A : -A same as abs (A)
5779 A <= 0? A : -A same as -abs (A)
5780 A < 0? A : -A same as -abs (A)
5782 None of these transformations work for modes with signed
5783 zeros. If A is +/-0, the first two transformations will
5784 change the sign of the result (from +0 to -0, or vice
5785 versa). The last four will fix the sign of the result,
5786 even though the original expressions could be positive or
5787 negative, depending on the sign of A.
5789 Note that all these transformations are correct if A is
5790 NaN, since the two alternatives (A and -A) are also NaNs. */
5791 if (!HONOR_SIGNED_ZEROS (type)
5792 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5793 ? real_zerop (arg01)
5794 : integer_zerop (arg01))
5795 && ((TREE_CODE (arg2) == NEGATE_EXPR
5796 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5797 /* In the case that A is of the form X-Y, '-A' (arg2) may
5798 have already been folded to Y-X, check for that. */
5799 || (TREE_CODE (arg1) == MINUS_EXPR
5800 && TREE_CODE (arg2) == MINUS_EXPR
5801 && operand_equal_p (TREE_OPERAND (arg1, 0),
5802 TREE_OPERAND (arg2, 1), 0)
5803 && operand_equal_p (TREE_OPERAND (arg1, 1),
5804 TREE_OPERAND (arg2, 0), 0))))
5805 switch (comp_code)
5807 case EQ_EXPR:
5808 case UNEQ_EXPR:
5809 tem = fold_convert_loc (loc, arg1_type, arg1);
5810 return fold_convert_loc (loc, type, negate_expr (tem));
5811 case NE_EXPR:
5812 case LTGT_EXPR:
5813 return fold_convert_loc (loc, type, arg1);
5814 case UNGE_EXPR:
5815 case UNGT_EXPR:
5816 if (flag_trapping_math)
5817 break;
5818 /* Fall through. */
5819 case GE_EXPR:
5820 case GT_EXPR:
5821 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5822 break;
5823 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5824 return fold_convert_loc (loc, type, tem);
5825 case UNLE_EXPR:
5826 case UNLT_EXPR:
5827 if (flag_trapping_math)
5828 break;
5829 /* FALLTHRU */
5830 case LE_EXPR:
5831 case LT_EXPR:
5832 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5833 break;
5834 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5835 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
5837 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
5838 is not, invokes UB both in abs and in the negation of it.
5839 So, use ABSU_EXPR instead. */
5840 tree utype = unsigned_type_for (TREE_TYPE (arg1));
5841 tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
5842 tem = negate_expr (tem);
5843 return fold_convert_loc (loc, type, tem);
5845 else
5847 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5848 return negate_expr (fold_convert_loc (loc, type, tem));
5850 default:
5851 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5852 break;
5855 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5856 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5857 both transformations are correct when A is NaN: A != 0
5858 is then true, and A == 0 is false. */
5860 if (!HONOR_SIGNED_ZEROS (type)
5861 && integer_zerop (arg01) && integer_zerop (arg2))
5863 if (comp_code == NE_EXPR)
5864 return fold_convert_loc (loc, type, arg1);
5865 else if (comp_code == EQ_EXPR)
5866 return build_zero_cst (type);
5869 /* Try some transformations of A op B ? A : B.
5871 A == B? A : B same as B
5872 A != B? A : B same as A
5873 A >= B? A : B same as max (A, B)
5874 A > B? A : B same as max (B, A)
5875 A <= B? A : B same as min (A, B)
5876 A < B? A : B same as min (B, A)
5878 As above, these transformations don't work in the presence
5879 of signed zeros. For example, if A and B are zeros of
5880 opposite sign, the first two transformations will change
5881 the sign of the result. In the last four, the original
5882 expressions give different results for (A=+0, B=-0) and
5883 (A=-0, B=+0), but the transformed expressions do not.
5885 The first two transformations are correct if either A or B
5886 is a NaN. In the first transformation, the condition will
5887 be false, and B will indeed be chosen. In the case of the
5888 second transformation, the condition A != B will be true,
5889 and A will be chosen.
5891 The conversions to max() and min() are not correct if B is
5892 a number and A is not. The conditions in the original
5893 expressions will be false, so all four give B. The min()
5894 and max() versions would give a NaN instead. */
5895 if (!HONOR_SIGNED_ZEROS (type)
5896 && operand_equal_for_comparison_p (arg01, arg2)
5897 /* Avoid these transformations if the COND_EXPR may be used
5898 as an lvalue in the C++ front-end. PR c++/19199. */
5899 && (in_gimple_form
5900 || VECTOR_TYPE_P (type)
5901 || (! lang_GNU_CXX ()
5902 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5903 || ! maybe_lvalue_p (arg1)
5904 || ! maybe_lvalue_p (arg2)))
5906 tree comp_op0 = arg00;
5907 tree comp_op1 = arg01;
5908 tree comp_type = TREE_TYPE (comp_op0);
5910 switch (comp_code)
5912 case EQ_EXPR:
5913 return fold_convert_loc (loc, type, arg2);
5914 case NE_EXPR:
5915 return fold_convert_loc (loc, type, arg1);
5916 case LE_EXPR:
5917 case LT_EXPR:
5918 case UNLE_EXPR:
5919 case UNLT_EXPR:
5920 /* In C++ a ?: expression can be an lvalue, so put the
5921 operand which will be used if they are equal first
5922 so that we can convert this back to the
5923 corresponding COND_EXPR. */
5924 if (!HONOR_NANS (arg1))
5926 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5927 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5928 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5929 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5930 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5931 comp_op1, comp_op0);
5932 return fold_convert_loc (loc, type, tem);
5934 break;
5935 case GE_EXPR:
5936 case GT_EXPR:
5937 case UNGE_EXPR:
5938 case UNGT_EXPR:
5939 if (!HONOR_NANS (arg1))
5941 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5942 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5943 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5944 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5945 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5946 comp_op1, comp_op0);
5947 return fold_convert_loc (loc, type, tem);
5949 break;
5950 case UNEQ_EXPR:
5951 if (!HONOR_NANS (arg1))
5952 return fold_convert_loc (loc, type, arg2);
5953 break;
5954 case LTGT_EXPR:
5955 if (!HONOR_NANS (arg1))
5956 return fold_convert_loc (loc, type, arg1);
5957 break;
5958 default:
5959 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5960 break;
5964 return NULL_TREE;
5969 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5970 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5971 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5972 false) >= 2)
5973 #endif
5975 /* EXP is some logical combination of boolean tests. See if we can
5976 merge it into some range test. Return the new tree if so. */
5978 static tree
5979 fold_range_test (location_t loc, enum tree_code code, tree type,
5980 tree op0, tree op1)
5982 int or_op = (code == TRUTH_ORIF_EXPR
5983 || code == TRUTH_OR_EXPR);
5984 int in0_p, in1_p, in_p;
5985 tree low0, low1, low, high0, high1, high;
5986 bool strict_overflow_p = false;
5987 tree tem, lhs, rhs;
5988 const char * const warnmsg = G_("assuming signed overflow does not occur "
5989 "when simplifying range test");
5991 if (!INTEGRAL_TYPE_P (type))
5992 return 0;
5994 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5995 /* If op0 is known true or false and this is a short-circuiting
5996 operation we must not merge with op1 since that makes side-effects
5997 unconditional. So special-case this. */
5998 if (!lhs
5999 && ((code == TRUTH_ORIF_EXPR && in0_p)
6000 || (code == TRUTH_ANDIF_EXPR && !in0_p)))
6001 return op0;
6002 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
6004 /* If this is an OR operation, invert both sides; we will invert
6005 again at the end. */
6006 if (or_op)
6007 in0_p = ! in0_p, in1_p = ! in1_p;
6009 /* If both expressions are the same, if we can merge the ranges, and we
6010 can build the range test, return it or it inverted. If one of the
6011 ranges is always true or always false, consider it to be the same
6012 expression as the other. */
6013 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
6014 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
6015 in1_p, low1, high1)
6016 && (tem = (build_range_check (loc, type,
6017 lhs != 0 ? lhs
6018 : rhs != 0 ? rhs : integer_zero_node,
6019 in_p, low, high))) != 0)
6021 if (strict_overflow_p)
6022 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
6023 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6026 /* On machines where the branch cost is expensive, if this is a
6027 short-circuited branch and the underlying object on both sides
6028 is the same, make a non-short-circuit operation. */
6029 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6030 if (param_logical_op_non_short_circuit != -1)
6031 logical_op_non_short_circuit
6032 = param_logical_op_non_short_circuit;
6033 if (logical_op_non_short_circuit
6034 && !sanitize_coverage_p ()
6035 && lhs != 0 && rhs != 0
6036 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6037 && operand_equal_p (lhs, rhs, 0))
6039 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6040 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6041 which cases we can't do this. */
6042 if (simple_operand_p (lhs))
6043 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6044 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6045 type, op0, op1);
6047 else if (!lang_hooks.decls.global_bindings_p ()
6048 && !CONTAINS_PLACEHOLDER_P (lhs))
6050 tree common = save_expr (lhs);
6052 if ((lhs = build_range_check (loc, type, common,
6053 or_op ? ! in0_p : in0_p,
6054 low0, high0)) != 0
6055 && (rhs = build_range_check (loc, type, common,
6056 or_op ? ! in1_p : in1_p,
6057 low1, high1)) != 0)
6059 if (strict_overflow_p)
6060 fold_overflow_warning (warnmsg,
6061 WARN_STRICT_OVERFLOW_COMPARISON);
6062 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6063 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6064 type, lhs, rhs);
6069 return 0;
6072 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6073 bit value. Arrange things so the extra bits will be set to zero if and
6074 only if C is signed-extended to its full width. If MASK is nonzero,
6075 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6077 static tree
6078 unextend (tree c, int p, int unsignedp, tree mask)
6080 tree type = TREE_TYPE (c);
6081 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6082 tree temp;
6084 if (p == modesize || unsignedp)
6085 return c;
6087 /* We work by getting just the sign bit into the low-order bit, then
6088 into the high-order bit, then sign-extend. We then XOR that value
6089 with C. */
6090 temp = build_int_cst (TREE_TYPE (c),
6091 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6093 /* We must use a signed type in order to get an arithmetic right shift.
6094 However, we must also avoid introducing accidental overflows, so that
6095 a subsequent call to integer_zerop will work. Hence we must
6096 do the type conversion here. At this point, the constant is either
6097 zero or one, and the conversion to a signed type can never overflow.
6098 We could get an overflow if this conversion is done anywhere else. */
6099 if (TYPE_UNSIGNED (type))
6100 temp = fold_convert (signed_type_for (type), temp);
6102 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6103 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6104 if (mask != 0)
6105 temp = const_binop (BIT_AND_EXPR, temp,
6106 fold_convert (TREE_TYPE (c), mask));
6107 /* If necessary, convert the type back to match the type of C. */
6108 if (TYPE_UNSIGNED (type))
6109 temp = fold_convert (type, temp);
6111 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6114 /* For an expression that has the form
6115 (A && B) || ~B
6117 (A || B) && ~B,
6118 we can drop one of the inner expressions and simplify to
6119 A || ~B
6121 A && ~B
6122 LOC is the location of the resulting expression. OP is the inner
6123 logical operation; the left-hand side in the examples above, while CMPOP
6124 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6125 removing a condition that guards another, as in
6126 (A != NULL && A->...) || A == NULL
6127 which we must not transform. If RHS_ONLY is true, only eliminate the
6128 right-most operand of the inner logical operation. */
6130 static tree
6131 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6132 bool rhs_only)
6134 tree type = TREE_TYPE (cmpop);
6135 enum tree_code code = TREE_CODE (cmpop);
6136 enum tree_code truthop_code = TREE_CODE (op);
6137 tree lhs = TREE_OPERAND (op, 0);
6138 tree rhs = TREE_OPERAND (op, 1);
6139 tree orig_lhs = lhs, orig_rhs = rhs;
6140 enum tree_code rhs_code = TREE_CODE (rhs);
6141 enum tree_code lhs_code = TREE_CODE (lhs);
6142 enum tree_code inv_code;
6144 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6145 return NULL_TREE;
6147 if (TREE_CODE_CLASS (code) != tcc_comparison)
6148 return NULL_TREE;
6150 if (rhs_code == truthop_code)
6152 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6153 if (newrhs != NULL_TREE)
6155 rhs = newrhs;
6156 rhs_code = TREE_CODE (rhs);
6159 if (lhs_code == truthop_code && !rhs_only)
6161 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6162 if (newlhs != NULL_TREE)
6164 lhs = newlhs;
6165 lhs_code = TREE_CODE (lhs);
6169 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6170 if (inv_code == rhs_code
6171 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6172 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6173 return lhs;
6174 if (!rhs_only && inv_code == lhs_code
6175 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6176 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6177 return rhs;
6178 if (rhs != orig_rhs || lhs != orig_lhs)
6179 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6180 lhs, rhs);
6181 return NULL_TREE;
6184 /* Find ways of folding logical expressions of LHS and RHS:
6185 Try to merge two comparisons to the same innermost item.
6186 Look for range tests like "ch >= '0' && ch <= '9'".
6187 Look for combinations of simple terms on machines with expensive branches
6188 and evaluate the RHS unconditionally.
6190 For example, if we have p->a == 2 && p->b == 4 and we can make an
6191 object large enough to span both A and B, we can do this with a comparison
6192 against the object ANDed with the a mask.
6194 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6195 operations to do this with one comparison.
6197 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6198 function and the one above.
6200 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6201 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6203 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6204 two operands.
6206 We return the simplified tree or 0 if no optimization is possible. */
6208 static tree
6209 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6210 tree lhs, tree rhs)
6212 /* If this is the "or" of two comparisons, we can do something if
6213 the comparisons are NE_EXPR. If this is the "and", we can do something
6214 if the comparisons are EQ_EXPR. I.e.,
6215 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6217 WANTED_CODE is this operation code. For single bit fields, we can
6218 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6219 comparison for one-bit fields. */
6221 enum tree_code wanted_code;
6222 enum tree_code lcode, rcode;
6223 tree ll_arg, lr_arg, rl_arg, rr_arg;
6224 tree ll_inner, lr_inner, rl_inner, rr_inner;
6225 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6226 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6227 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6228 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6229 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6230 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6231 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6232 scalar_int_mode lnmode, rnmode;
6233 tree ll_mask, lr_mask, rl_mask, rr_mask;
6234 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6235 tree l_const, r_const;
6236 tree lntype, rntype, result;
6237 HOST_WIDE_INT first_bit, end_bit;
6238 int volatilep;
6240 /* Start by getting the comparison codes. Fail if anything is volatile.
6241 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6242 it were surrounded with a NE_EXPR. */
6244 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6245 return 0;
6247 lcode = TREE_CODE (lhs);
6248 rcode = TREE_CODE (rhs);
6250 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6252 lhs = build2 (NE_EXPR, truth_type, lhs,
6253 build_int_cst (TREE_TYPE (lhs), 0));
6254 lcode = NE_EXPR;
6257 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6259 rhs = build2 (NE_EXPR, truth_type, rhs,
6260 build_int_cst (TREE_TYPE (rhs), 0));
6261 rcode = NE_EXPR;
6264 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6265 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6266 return 0;
6268 ll_arg = TREE_OPERAND (lhs, 0);
6269 lr_arg = TREE_OPERAND (lhs, 1);
6270 rl_arg = TREE_OPERAND (rhs, 0);
6271 rr_arg = TREE_OPERAND (rhs, 1);
6273 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6274 if (simple_operand_p (ll_arg)
6275 && simple_operand_p (lr_arg))
6277 if (operand_equal_p (ll_arg, rl_arg, 0)
6278 && operand_equal_p (lr_arg, rr_arg, 0))
6280 result = combine_comparisons (loc, code, lcode, rcode,
6281 truth_type, ll_arg, lr_arg);
6282 if (result)
6283 return result;
6285 else if (operand_equal_p (ll_arg, rr_arg, 0)
6286 && operand_equal_p (lr_arg, rl_arg, 0))
6288 result = combine_comparisons (loc, code, lcode,
6289 swap_tree_comparison (rcode),
6290 truth_type, ll_arg, lr_arg);
6291 if (result)
6292 return result;
6296 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6297 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6299 /* If the RHS can be evaluated unconditionally and its operands are
6300 simple, it wins to evaluate the RHS unconditionally on machines
6301 with expensive branches. In this case, this isn't a comparison
6302 that can be merged. */
6304 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6305 false) >= 2
6306 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6307 && simple_operand_p (rl_arg)
6308 && simple_operand_p (rr_arg))
6310 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6311 if (code == TRUTH_OR_EXPR
6312 && lcode == NE_EXPR && integer_zerop (lr_arg)
6313 && rcode == NE_EXPR && integer_zerop (rr_arg)
6314 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6315 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6316 return build2_loc (loc, NE_EXPR, truth_type,
6317 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6318 ll_arg, rl_arg),
6319 build_int_cst (TREE_TYPE (ll_arg), 0));
6321 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6322 if (code == TRUTH_AND_EXPR
6323 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6324 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6325 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6326 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6327 return build2_loc (loc, EQ_EXPR, truth_type,
6328 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6329 ll_arg, rl_arg),
6330 build_int_cst (TREE_TYPE (ll_arg), 0));
6333 /* See if the comparisons can be merged. Then get all the parameters for
6334 each side. */
6336 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6337 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6338 return 0;
6340 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6341 volatilep = 0;
6342 ll_inner = decode_field_reference (loc, &ll_arg,
6343 &ll_bitsize, &ll_bitpos, &ll_mode,
6344 &ll_unsignedp, &ll_reversep, &volatilep,
6345 &ll_mask, &ll_and_mask);
6346 lr_inner = decode_field_reference (loc, &lr_arg,
6347 &lr_bitsize, &lr_bitpos, &lr_mode,
6348 &lr_unsignedp, &lr_reversep, &volatilep,
6349 &lr_mask, &lr_and_mask);
6350 rl_inner = decode_field_reference (loc, &rl_arg,
6351 &rl_bitsize, &rl_bitpos, &rl_mode,
6352 &rl_unsignedp, &rl_reversep, &volatilep,
6353 &rl_mask, &rl_and_mask);
6354 rr_inner = decode_field_reference (loc, &rr_arg,
6355 &rr_bitsize, &rr_bitpos, &rr_mode,
6356 &rr_unsignedp, &rr_reversep, &volatilep,
6357 &rr_mask, &rr_and_mask);
6359 /* It must be true that the inner operation on the lhs of each
6360 comparison must be the same if we are to be able to do anything.
6361 Then see if we have constants. If not, the same must be true for
6362 the rhs's. */
6363 if (volatilep
6364 || ll_reversep != rl_reversep
6365 || ll_inner == 0 || rl_inner == 0
6366 || ! operand_equal_p (ll_inner, rl_inner, 0))
6367 return 0;
6369 if (TREE_CODE (lr_arg) == INTEGER_CST
6370 && TREE_CODE (rr_arg) == INTEGER_CST)
6372 l_const = lr_arg, r_const = rr_arg;
6373 lr_reversep = ll_reversep;
6375 else if (lr_reversep != rr_reversep
6376 || lr_inner == 0 || rr_inner == 0
6377 || ! operand_equal_p (lr_inner, rr_inner, 0))
6378 return 0;
6379 else
6380 l_const = r_const = 0;
6382 /* If either comparison code is not correct for our logical operation,
6383 fail. However, we can convert a one-bit comparison against zero into
6384 the opposite comparison against that bit being set in the field. */
6386 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6387 if (lcode != wanted_code)
6389 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6391 /* Make the left operand unsigned, since we are only interested
6392 in the value of one bit. Otherwise we are doing the wrong
6393 thing below. */
6394 ll_unsignedp = 1;
6395 l_const = ll_mask;
6397 else
6398 return 0;
6401 /* This is analogous to the code for l_const above. */
6402 if (rcode != wanted_code)
6404 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6406 rl_unsignedp = 1;
6407 r_const = rl_mask;
6409 else
6410 return 0;
6413 /* See if we can find a mode that contains both fields being compared on
6414 the left. If we can't, fail. Otherwise, update all constants and masks
6415 to be relative to a field of that size. */
6416 first_bit = MIN (ll_bitpos, rl_bitpos);
6417 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6418 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6419 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6420 volatilep, &lnmode))
6421 return 0;
6423 lnbitsize = GET_MODE_BITSIZE (lnmode);
6424 lnbitpos = first_bit & ~ (lnbitsize - 1);
6425 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6426 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6428 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6430 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6431 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6434 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6435 size_int (xll_bitpos));
6436 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6437 size_int (xrl_bitpos));
6439 if (l_const)
6441 l_const = fold_convert_loc (loc, lntype, l_const);
6442 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6443 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6444 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6445 fold_build1_loc (loc, BIT_NOT_EXPR,
6446 lntype, ll_mask))))
6448 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6450 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6453 if (r_const)
6455 r_const = fold_convert_loc (loc, lntype, r_const);
6456 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6457 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6458 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6459 fold_build1_loc (loc, BIT_NOT_EXPR,
6460 lntype, rl_mask))))
6462 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6464 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6468 /* If the right sides are not constant, do the same for it. Also,
6469 disallow this optimization if a size, signedness or storage order
6470 mismatch occurs between the left and right sides. */
6471 if (l_const == 0)
6473 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6474 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6475 || ll_reversep != lr_reversep
6476 /* Make sure the two fields on the right
6477 correspond to the left without being swapped. */
6478 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6479 return 0;
6481 first_bit = MIN (lr_bitpos, rr_bitpos);
6482 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6483 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6484 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6485 volatilep, &rnmode))
6486 return 0;
6488 rnbitsize = GET_MODE_BITSIZE (rnmode);
6489 rnbitpos = first_bit & ~ (rnbitsize - 1);
6490 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6491 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6493 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6495 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6496 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6499 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6500 rntype, lr_mask),
6501 size_int (xlr_bitpos));
6502 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6503 rntype, rr_mask),
6504 size_int (xrr_bitpos));
6506 /* Make a mask that corresponds to both fields being compared.
6507 Do this for both items being compared. If the operands are the
6508 same size and the bits being compared are in the same position
6509 then we can do this by masking both and comparing the masked
6510 results. */
6511 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6512 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6513 if (lnbitsize == rnbitsize
6514 && xll_bitpos == xlr_bitpos
6515 && lnbitpos >= 0
6516 && rnbitpos >= 0)
6518 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6519 lntype, lnbitsize, lnbitpos,
6520 ll_unsignedp || rl_unsignedp, ll_reversep);
6521 if (! all_ones_mask_p (ll_mask, lnbitsize))
6522 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6524 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6525 rntype, rnbitsize, rnbitpos,
6526 lr_unsignedp || rr_unsignedp, lr_reversep);
6527 if (! all_ones_mask_p (lr_mask, rnbitsize))
6528 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6530 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6533 /* There is still another way we can do something: If both pairs of
6534 fields being compared are adjacent, we may be able to make a wider
6535 field containing them both.
6537 Note that we still must mask the lhs/rhs expressions. Furthermore,
6538 the mask must be shifted to account for the shift done by
6539 make_bit_field_ref. */
6540 if (((ll_bitsize + ll_bitpos == rl_bitpos
6541 && lr_bitsize + lr_bitpos == rr_bitpos)
6542 || (ll_bitpos == rl_bitpos + rl_bitsize
6543 && lr_bitpos == rr_bitpos + rr_bitsize))
6544 && ll_bitpos >= 0
6545 && rl_bitpos >= 0
6546 && lr_bitpos >= 0
6547 && rr_bitpos >= 0)
6549 tree type;
6551 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6552 ll_bitsize + rl_bitsize,
6553 MIN (ll_bitpos, rl_bitpos),
6554 ll_unsignedp, ll_reversep);
6555 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6556 lr_bitsize + rr_bitsize,
6557 MIN (lr_bitpos, rr_bitpos),
6558 lr_unsignedp, lr_reversep);
6560 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6561 size_int (MIN (xll_bitpos, xrl_bitpos)));
6562 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6563 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6565 /* Convert to the smaller type before masking out unwanted bits. */
6566 type = lntype;
6567 if (lntype != rntype)
6569 if (lnbitsize > rnbitsize)
6571 lhs = fold_convert_loc (loc, rntype, lhs);
6572 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6573 type = rntype;
6575 else if (lnbitsize < rnbitsize)
6577 rhs = fold_convert_loc (loc, lntype, rhs);
6578 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6579 type = lntype;
6583 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6584 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6586 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6587 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6589 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6592 return 0;
6595 /* Handle the case of comparisons with constants. If there is something in
6596 common between the masks, those bits of the constants must be the same.
6597 If not, the condition is always false. Test for this to avoid generating
6598 incorrect code below. */
6599 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6600 if (! integer_zerop (result)
6601 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6602 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6604 if (wanted_code == NE_EXPR)
6606 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6607 return constant_boolean_node (true, truth_type);
6609 else
6611 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6612 return constant_boolean_node (false, truth_type);
6616 if (lnbitpos < 0)
6617 return 0;
6619 /* Construct the expression we will return. First get the component
6620 reference we will make. Unless the mask is all ones the width of
6621 that field, perform the mask operation. Then compare with the
6622 merged constant. */
6623 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6624 lntype, lnbitsize, lnbitpos,
6625 ll_unsignedp || rl_unsignedp, ll_reversep);
6627 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6628 if (! all_ones_mask_p (ll_mask, lnbitsize))
6629 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6631 return build2_loc (loc, wanted_code, truth_type, result,
6632 const_binop (BIT_IOR_EXPR, l_const, r_const));
6635 /* T is an integer expression that is being multiplied, divided, or taken a
6636 modulus (CODE says which and what kind of divide or modulus) by a
6637 constant C. See if we can eliminate that operation by folding it with
6638 other operations already in T. WIDE_TYPE, if non-null, is a type that
6639 should be used for the computation if wider than our type.
6641 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6642 (X * 2) + (Y * 4). We must, however, be assured that either the original
6643 expression would not overflow or that overflow is undefined for the type
6644 in the language in question.
6646 If we return a non-null expression, it is an equivalent form of the
6647 original computation, but need not be in the original type.
6649 We set *STRICT_OVERFLOW_P to true if the return values depends on
6650 signed overflow being undefined. Otherwise we do not change
6651 *STRICT_OVERFLOW_P. */
6653 static tree
6654 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6655 bool *strict_overflow_p)
6657 /* To avoid exponential search depth, refuse to allow recursion past
6658 three levels. Beyond that (1) it's highly unlikely that we'll find
6659 something interesting and (2) we've probably processed it before
6660 when we built the inner expression. */
6662 static int depth;
6663 tree ret;
6665 if (depth > 3)
6666 return NULL;
6668 depth++;
6669 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6670 depth--;
6672 return ret;
6675 static tree
6676 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6677 bool *strict_overflow_p)
6679 tree type = TREE_TYPE (t);
6680 enum tree_code tcode = TREE_CODE (t);
6681 tree ctype = (wide_type != 0
6682 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6683 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6684 ? wide_type : type);
6685 tree t1, t2;
6686 int same_p = tcode == code;
6687 tree op0 = NULL_TREE, op1 = NULL_TREE;
6688 bool sub_strict_overflow_p;
6690 /* Don't deal with constants of zero here; they confuse the code below. */
6691 if (integer_zerop (c))
6692 return NULL_TREE;
6694 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6695 op0 = TREE_OPERAND (t, 0);
6697 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6698 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6700 /* Note that we need not handle conditional operations here since fold
6701 already handles those cases. So just do arithmetic here. */
6702 switch (tcode)
6704 case INTEGER_CST:
6705 /* For a constant, we can always simplify if we are a multiply
6706 or (for divide and modulus) if it is a multiple of our constant. */
6707 if (code == MULT_EXPR
6708 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6709 TYPE_SIGN (type)))
6711 tree tem = const_binop (code, fold_convert (ctype, t),
6712 fold_convert (ctype, c));
6713 /* If the multiplication overflowed, we lost information on it.
6714 See PR68142 and PR69845. */
6715 if (TREE_OVERFLOW (tem))
6716 return NULL_TREE;
6717 return tem;
6719 break;
6721 CASE_CONVERT: case NON_LVALUE_EXPR:
6722 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0)))
6723 break;
6724 /* If op0 is an expression ... */
6725 if ((COMPARISON_CLASS_P (op0)
6726 || UNARY_CLASS_P (op0)
6727 || BINARY_CLASS_P (op0)
6728 || VL_EXP_CLASS_P (op0)
6729 || EXPRESSION_CLASS_P (op0))
6730 /* ... and has wrapping overflow, and its type is smaller
6731 than ctype, then we cannot pass through as widening. */
6732 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6733 && (TYPE_PRECISION (ctype)
6734 > TYPE_PRECISION (TREE_TYPE (op0))))
6735 /* ... or this is a truncation (t is narrower than op0),
6736 then we cannot pass through this narrowing. */
6737 || (TYPE_PRECISION (type)
6738 < TYPE_PRECISION (TREE_TYPE (op0)))
6739 /* ... or signedness changes for division or modulus,
6740 then we cannot pass through this conversion. */
6741 || (code != MULT_EXPR
6742 && (TYPE_UNSIGNED (ctype)
6743 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6744 /* ... or has undefined overflow while the converted to
6745 type has not, we cannot do the operation in the inner type
6746 as that would introduce undefined overflow. */
6747 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6748 && !TYPE_OVERFLOW_UNDEFINED (type))))
6749 break;
6751 /* Pass the constant down and see if we can make a simplification. If
6752 we can, replace this expression with the inner simplification for
6753 possible later conversion to our or some other type. */
6754 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6755 && TREE_CODE (t2) == INTEGER_CST
6756 && !TREE_OVERFLOW (t2)
6757 && (t1 = extract_muldiv (op0, t2, code,
6758 code == MULT_EXPR ? ctype : NULL_TREE,
6759 strict_overflow_p)) != 0)
6760 return t1;
6761 break;
6763 case ABS_EXPR:
6764 /* If widening the type changes it from signed to unsigned, then we
6765 must avoid building ABS_EXPR itself as unsigned. */
6766 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6768 tree cstype = (*signed_type_for) (ctype);
6769 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6770 != 0)
6772 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6773 return fold_convert (ctype, t1);
6775 break;
6777 /* If the constant is negative, we cannot simplify this. */
6778 if (tree_int_cst_sgn (c) == -1)
6779 break;
6780 /* FALLTHROUGH */
6781 case NEGATE_EXPR:
6782 /* For division and modulus, type can't be unsigned, as e.g.
6783 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6784 For signed types, even with wrapping overflow, this is fine. */
6785 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6786 break;
6787 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6788 != 0)
6789 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6790 break;
6792 case MIN_EXPR: case MAX_EXPR:
6793 /* If widening the type changes the signedness, then we can't perform
6794 this optimization as that changes the result. */
6795 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6796 break;
6798 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6799 sub_strict_overflow_p = false;
6800 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6801 &sub_strict_overflow_p)) != 0
6802 && (t2 = extract_muldiv (op1, c, code, wide_type,
6803 &sub_strict_overflow_p)) != 0)
6805 if (tree_int_cst_sgn (c) < 0)
6806 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6807 if (sub_strict_overflow_p)
6808 *strict_overflow_p = true;
6809 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6810 fold_convert (ctype, t2));
6812 break;
6814 case LSHIFT_EXPR: case RSHIFT_EXPR:
6815 /* If the second operand is constant, this is a multiplication
6816 or floor division, by a power of two, so we can treat it that
6817 way unless the multiplier or divisor overflows. Signed
6818 left-shift overflow is implementation-defined rather than
6819 undefined in C90, so do not convert signed left shift into
6820 multiplication. */
6821 if (TREE_CODE (op1) == INTEGER_CST
6822 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6823 /* const_binop may not detect overflow correctly,
6824 so check for it explicitly here. */
6825 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6826 wi::to_wide (op1))
6827 && (t1 = fold_convert (ctype,
6828 const_binop (LSHIFT_EXPR, size_one_node,
6829 op1))) != 0
6830 && !TREE_OVERFLOW (t1))
6831 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6832 ? MULT_EXPR : FLOOR_DIV_EXPR,
6833 ctype,
6834 fold_convert (ctype, op0),
6835 t1),
6836 c, code, wide_type, strict_overflow_p);
6837 break;
6839 case PLUS_EXPR: case MINUS_EXPR:
6840 /* See if we can eliminate the operation on both sides. If we can, we
6841 can return a new PLUS or MINUS. If we can't, the only remaining
6842 cases where we can do anything are if the second operand is a
6843 constant. */
6844 sub_strict_overflow_p = false;
6845 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6846 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6847 if (t1 != 0 && t2 != 0
6848 && TYPE_OVERFLOW_WRAPS (ctype)
6849 && (code == MULT_EXPR
6850 /* If not multiplication, we can only do this if both operands
6851 are divisible by c. */
6852 || (multiple_of_p (ctype, op0, c)
6853 && multiple_of_p (ctype, op1, c))))
6855 if (sub_strict_overflow_p)
6856 *strict_overflow_p = true;
6857 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6858 fold_convert (ctype, t2));
6861 /* If this was a subtraction, negate OP1 and set it to be an addition.
6862 This simplifies the logic below. */
6863 if (tcode == MINUS_EXPR)
6865 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6866 /* If OP1 was not easily negatable, the constant may be OP0. */
6867 if (TREE_CODE (op0) == INTEGER_CST)
6869 std::swap (op0, op1);
6870 std::swap (t1, t2);
6874 if (TREE_CODE (op1) != INTEGER_CST)
6875 break;
6877 /* If either OP1 or C are negative, this optimization is not safe for
6878 some of the division and remainder types while for others we need
6879 to change the code. */
6880 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6882 if (code == CEIL_DIV_EXPR)
6883 code = FLOOR_DIV_EXPR;
6884 else if (code == FLOOR_DIV_EXPR)
6885 code = CEIL_DIV_EXPR;
6886 else if (code != MULT_EXPR
6887 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6888 break;
6891 /* If it's a multiply or a division/modulus operation of a multiple
6892 of our constant, do the operation and verify it doesn't overflow. */
6893 if (code == MULT_EXPR
6894 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6895 TYPE_SIGN (type)))
6897 op1 = const_binop (code, fold_convert (ctype, op1),
6898 fold_convert (ctype, c));
6899 /* We allow the constant to overflow with wrapping semantics. */
6900 if (op1 == 0
6901 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6902 break;
6904 else
6905 break;
6907 /* If we have an unsigned type, we cannot widen the operation since it
6908 will change the result if the original computation overflowed. */
6909 if (TYPE_UNSIGNED (ctype) && ctype != type)
6910 break;
6912 /* The last case is if we are a multiply. In that case, we can
6913 apply the distributive law to commute the multiply and addition
6914 if the multiplication of the constants doesn't overflow
6915 and overflow is defined. With undefined overflow
6916 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6917 But fold_plusminus_mult_expr would factor back any power-of-two
6918 value so do not distribute in the first place in this case. */
6919 if (code == MULT_EXPR
6920 && TYPE_OVERFLOW_WRAPS (ctype)
6921 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
6922 return fold_build2 (tcode, ctype,
6923 fold_build2 (code, ctype,
6924 fold_convert (ctype, op0),
6925 fold_convert (ctype, c)),
6926 op1);
6928 break;
6930 case MULT_EXPR:
6931 /* We have a special case here if we are doing something like
6932 (C * 8) % 4 since we know that's zero. */
6933 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6934 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6935 /* If the multiplication can overflow we cannot optimize this. */
6936 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6937 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6938 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6939 TYPE_SIGN (type)))
6941 *strict_overflow_p = true;
6942 return omit_one_operand (type, integer_zero_node, op0);
6945 /* ... fall through ... */
6947 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6948 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6949 /* If we can extract our operation from the LHS, do so and return a
6950 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6951 do something only if the second operand is a constant. */
6952 if (same_p
6953 && TYPE_OVERFLOW_WRAPS (ctype)
6954 && (t1 = extract_muldiv (op0, c, code, wide_type,
6955 strict_overflow_p)) != 0)
6956 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6957 fold_convert (ctype, op1));
6958 else if (tcode == MULT_EXPR && code == MULT_EXPR
6959 && TYPE_OVERFLOW_WRAPS (ctype)
6960 && (t1 = extract_muldiv (op1, c, code, wide_type,
6961 strict_overflow_p)) != 0)
6962 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6963 fold_convert (ctype, t1));
6964 else if (TREE_CODE (op1) != INTEGER_CST)
6965 return 0;
6967 /* If these are the same operation types, we can associate them
6968 assuming no overflow. */
6969 if (tcode == code)
6971 bool overflow_p = false;
6972 wi::overflow_type overflow_mul;
6973 signop sign = TYPE_SIGN (ctype);
6974 unsigned prec = TYPE_PRECISION (ctype);
6975 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6976 wi::to_wide (c, prec),
6977 sign, &overflow_mul);
6978 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6979 if (overflow_mul
6980 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6981 overflow_p = true;
6982 if (!overflow_p)
6983 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6984 wide_int_to_tree (ctype, mul));
6987 /* If these operations "cancel" each other, we have the main
6988 optimizations of this pass, which occur when either constant is a
6989 multiple of the other, in which case we replace this with either an
6990 operation or CODE or TCODE.
6992 If we have an unsigned type, we cannot do this since it will change
6993 the result if the original computation overflowed. */
6994 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6995 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6996 || (tcode == MULT_EXPR
6997 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6998 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6999 && code != MULT_EXPR)))
7001 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7002 TYPE_SIGN (type)))
7004 if (TYPE_OVERFLOW_UNDEFINED (ctype))
7005 *strict_overflow_p = true;
7006 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7007 fold_convert (ctype,
7008 const_binop (TRUNC_DIV_EXPR,
7009 op1, c)));
7011 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
7012 TYPE_SIGN (type)))
7014 if (TYPE_OVERFLOW_UNDEFINED (ctype))
7015 *strict_overflow_p = true;
7016 return fold_build2 (code, ctype, fold_convert (ctype, op0),
7017 fold_convert (ctype,
7018 const_binop (TRUNC_DIV_EXPR,
7019 c, op1)));
7022 break;
7024 default:
7025 break;
7028 return 0;
7031 /* Return a node which has the indicated constant VALUE (either 0 or
7032 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7033 and is of the indicated TYPE. */
7035 tree
7036 constant_boolean_node (bool value, tree type)
7038 if (type == integer_type_node)
7039 return value ? integer_one_node : integer_zero_node;
7040 else if (type == boolean_type_node)
7041 return value ? boolean_true_node : boolean_false_node;
7042 else if (TREE_CODE (type) == VECTOR_TYPE)
7043 return build_vector_from_val (type,
7044 build_int_cst (TREE_TYPE (type),
7045 value ? -1 : 0));
7046 else
7047 return fold_convert (type, value ? integer_one_node : integer_zero_node);
7051 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7052 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7053 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7054 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7055 COND is the first argument to CODE; otherwise (as in the example
7056 given here), it is the second argument. TYPE is the type of the
7057 original expression. Return NULL_TREE if no simplification is
7058 possible. */
7060 static tree
7061 fold_binary_op_with_conditional_arg (location_t loc,
7062 enum tree_code code,
7063 tree type, tree op0, tree op1,
7064 tree cond, tree arg, int cond_first_p)
7066 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7067 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7068 tree test, true_value, false_value;
7069 tree lhs = NULL_TREE;
7070 tree rhs = NULL_TREE;
7071 enum tree_code cond_code = COND_EXPR;
7073 /* Do not move possibly trapping operations into the conditional as this
7074 pessimizes code and causes gimplification issues when applied late. */
7075 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7076 ANY_INTEGRAL_TYPE_P (type)
7077 && TYPE_OVERFLOW_TRAPS (type), op1))
7078 return NULL_TREE;
7080 if (TREE_CODE (cond) == COND_EXPR
7081 || TREE_CODE (cond) == VEC_COND_EXPR)
7083 test = TREE_OPERAND (cond, 0);
7084 true_value = TREE_OPERAND (cond, 1);
7085 false_value = TREE_OPERAND (cond, 2);
7086 /* If this operand throws an expression, then it does not make
7087 sense to try to perform a logical or arithmetic operation
7088 involving it. */
7089 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7090 lhs = true_value;
7091 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7092 rhs = false_value;
7094 else if (!(TREE_CODE (type) != VECTOR_TYPE
7095 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
7097 tree testtype = TREE_TYPE (cond);
7098 test = cond;
7099 true_value = constant_boolean_node (true, testtype);
7100 false_value = constant_boolean_node (false, testtype);
7102 else
7103 /* Detect the case of mixing vector and scalar types - bail out. */
7104 return NULL_TREE;
7106 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
7107 cond_code = VEC_COND_EXPR;
7109 /* This transformation is only worthwhile if we don't have to wrap ARG
7110 in a SAVE_EXPR and the operation can be simplified without recursing
7111 on at least one of the branches once its pushed inside the COND_EXPR. */
7112 if (!TREE_CONSTANT (arg)
7113 && (TREE_SIDE_EFFECTS (arg)
7114 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7115 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7116 return NULL_TREE;
7118 arg = fold_convert_loc (loc, arg_type, arg);
7119 if (lhs == 0)
7121 true_value = fold_convert_loc (loc, cond_type, true_value);
7122 if (cond_first_p)
7123 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7124 else
7125 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7127 if (rhs == 0)
7129 false_value = fold_convert_loc (loc, cond_type, false_value);
7130 if (cond_first_p)
7131 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7132 else
7133 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7136 /* Check that we have simplified at least one of the branches. */
7137 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7138 return NULL_TREE;
7140 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7144 /* Subroutine of fold() that checks for the addition of ARG +/- 0.0.
7146 If !NEGATE, return true if ZERO_ARG is +/-0.0 and, for all ARG of
7147 type TYPE, ARG + ZERO_ARG is the same as ARG. If NEGATE, return true
7148 if ARG - ZERO_ARG is the same as X.
7150 If ARG is NULL, check for any value of type TYPE.
7152 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7153 and finite. The problematic cases are when X is zero, and its mode
7154 has signed zeros. In the case of rounding towards -infinity,
7155 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7156 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7158 bool
7159 fold_real_zero_addition_p (const_tree type, const_tree arg,
7160 const_tree zero_arg, int negate)
7162 if (!real_zerop (zero_arg))
7163 return false;
7165 /* Don't allow the fold with -fsignaling-nans. */
7166 if (arg ? tree_expr_maybe_signaling_nan_p (arg) : HONOR_SNANS (type))
7167 return false;
7169 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7170 if (!HONOR_SIGNED_ZEROS (type))
7171 return true;
7173 /* There is no case that is safe for all rounding modes. */
7174 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7175 return false;
7177 /* In a vector or complex, we would need to check the sign of all zeros. */
7178 if (TREE_CODE (zero_arg) == VECTOR_CST)
7179 zero_arg = uniform_vector_p (zero_arg);
7180 if (!zero_arg || TREE_CODE (zero_arg) != REAL_CST)
7181 return false;
7183 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7184 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (zero_arg)))
7185 negate = !negate;
7187 /* The mode has signed zeros, and we have to honor their sign.
7188 In this situation, there are only two cases we can return true for.
7189 (i) X - 0 is the same as X with default rounding.
7190 (ii) X + 0 is X when X can't possibly be -0.0. */
7191 return negate || (arg && !tree_expr_maybe_real_minus_zero_p (arg));
7194 /* Subroutine of match.pd that optimizes comparisons of a division by
7195 a nonzero integer constant against an integer constant, i.e.
7196 X/C1 op C2.
7198 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7199 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7201 enum tree_code
7202 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7203 tree *hi, bool *neg_overflow)
7205 tree prod, tmp, type = TREE_TYPE (c1);
7206 signop sign = TYPE_SIGN (type);
7207 wi::overflow_type overflow;
7209 /* We have to do this the hard way to detect unsigned overflow.
7210 prod = int_const_binop (MULT_EXPR, c1, c2); */
7211 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7212 prod = force_fit_type (type, val, -1, overflow);
7213 *neg_overflow = false;
7215 if (sign == UNSIGNED)
7217 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7218 *lo = prod;
7220 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7221 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7222 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7224 else if (tree_int_cst_sgn (c1) >= 0)
7226 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7227 switch (tree_int_cst_sgn (c2))
7229 case -1:
7230 *neg_overflow = true;
7231 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7232 *hi = prod;
7233 break;
7235 case 0:
7236 *lo = fold_negate_const (tmp, type);
7237 *hi = tmp;
7238 break;
7240 case 1:
7241 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7242 *lo = prod;
7243 break;
7245 default:
7246 gcc_unreachable ();
7249 else
7251 /* A negative divisor reverses the relational operators. */
7252 code = swap_tree_comparison (code);
7254 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7255 switch (tree_int_cst_sgn (c2))
7257 case -1:
7258 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7259 *lo = prod;
7260 break;
7262 case 0:
7263 *hi = fold_negate_const (tmp, type);
7264 *lo = tmp;
7265 break;
7267 case 1:
7268 *neg_overflow = true;
7269 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7270 *hi = prod;
7271 break;
7273 default:
7274 gcc_unreachable ();
7278 if (code != EQ_EXPR && code != NE_EXPR)
7279 return code;
7281 if (TREE_OVERFLOW (*lo)
7282 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7283 *lo = NULL_TREE;
7284 if (TREE_OVERFLOW (*hi)
7285 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7286 *hi = NULL_TREE;
7288 return code;
7292 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7293 equality/inequality test, then return a simplified form of the test
7294 using a sign testing. Otherwise return NULL. TYPE is the desired
7295 result type. */
7297 static tree
7298 fold_single_bit_test_into_sign_test (location_t loc,
7299 enum tree_code code, tree arg0, tree arg1,
7300 tree result_type)
7302 /* If this is testing a single bit, we can optimize the test. */
7303 if ((code == NE_EXPR || code == EQ_EXPR)
7304 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7305 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7307 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7308 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7309 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7311 if (arg00 != NULL_TREE
7312 /* This is only a win if casting to a signed type is cheap,
7313 i.e. when arg00's type is not a partial mode. */
7314 && type_has_mode_precision_p (TREE_TYPE (arg00)))
7316 tree stype = signed_type_for (TREE_TYPE (arg00));
7317 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7318 result_type,
7319 fold_convert_loc (loc, stype, arg00),
7320 build_int_cst (stype, 0));
7324 return NULL_TREE;
7327 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7328 equality/inequality test, then return a simplified form of
7329 the test using shifts and logical operations. Otherwise return
7330 NULL. TYPE is the desired result type. */
7332 tree
7333 fold_single_bit_test (location_t loc, enum tree_code code,
7334 tree arg0, tree arg1, tree result_type)
7336 /* If this is testing a single bit, we can optimize the test. */
7337 if ((code == NE_EXPR || code == EQ_EXPR)
7338 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7339 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7341 tree inner = TREE_OPERAND (arg0, 0);
7342 tree type = TREE_TYPE (arg0);
7343 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7344 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
7345 int ops_unsigned;
7346 tree signed_type, unsigned_type, intermediate_type;
7347 tree tem, one;
7349 /* First, see if we can fold the single bit test into a sign-bit
7350 test. */
7351 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7352 result_type);
7353 if (tem)
7354 return tem;
7356 /* Otherwise we have (A & C) != 0 where C is a single bit,
7357 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7358 Similarly for (A & C) == 0. */
7360 /* If INNER is a right shift of a constant and it plus BITNUM does
7361 not overflow, adjust BITNUM and INNER. */
7362 if (TREE_CODE (inner) == RSHIFT_EXPR
7363 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7364 && bitnum < TYPE_PRECISION (type)
7365 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
7366 TYPE_PRECISION (type) - bitnum))
7368 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
7369 inner = TREE_OPERAND (inner, 0);
7372 /* If we are going to be able to omit the AND below, we must do our
7373 operations as unsigned. If we must use the AND, we have a choice.
7374 Normally unsigned is faster, but for some machines signed is. */
7375 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
7376 && !flag_syntax_only) ? 0 : 1;
7378 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7379 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7380 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7381 inner = fold_convert_loc (loc, intermediate_type, inner);
7383 if (bitnum != 0)
7384 inner = build2 (RSHIFT_EXPR, intermediate_type,
7385 inner, size_int (bitnum));
7387 one = build_int_cst (intermediate_type, 1);
7389 if (code == EQ_EXPR)
7390 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7392 /* Put the AND last so it can combine with more things. */
7393 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7395 /* Make sure to return the proper type. */
7396 inner = fold_convert_loc (loc, result_type, inner);
7398 return inner;
7400 return NULL_TREE;
7403 /* Test whether it is preferable to swap two operands, ARG0 and
7404 ARG1, for example because ARG0 is an integer constant and ARG1
7405 isn't. */
7407 bool
7408 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7410 if (CONSTANT_CLASS_P (arg1))
7411 return 0;
7412 if (CONSTANT_CLASS_P (arg0))
7413 return 1;
7415 STRIP_NOPS (arg0);
7416 STRIP_NOPS (arg1);
7418 if (TREE_CONSTANT (arg1))
7419 return 0;
7420 if (TREE_CONSTANT (arg0))
7421 return 1;
7423 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7424 for commutative and comparison operators. Ensuring a canonical
7425 form allows the optimizers to find additional redundancies without
7426 having to explicitly check for both orderings. */
7427 if (TREE_CODE (arg0) == SSA_NAME
7428 && TREE_CODE (arg1) == SSA_NAME
7429 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7430 return 1;
7432 /* Put SSA_NAMEs last. */
7433 if (TREE_CODE (arg1) == SSA_NAME)
7434 return 0;
7435 if (TREE_CODE (arg0) == SSA_NAME)
7436 return 1;
7438 /* Put variables last. */
7439 if (DECL_P (arg1))
7440 return 0;
7441 if (DECL_P (arg0))
7442 return 1;
7444 return 0;
7448 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7449 means A >= Y && A != MAX, but in this case we know that
7450 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7452 static tree
7453 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7455 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7457 if (TREE_CODE (bound) == LT_EXPR)
7458 a = TREE_OPERAND (bound, 0);
7459 else if (TREE_CODE (bound) == GT_EXPR)
7460 a = TREE_OPERAND (bound, 1);
7461 else
7462 return NULL_TREE;
7464 typea = TREE_TYPE (a);
7465 if (!INTEGRAL_TYPE_P (typea)
7466 && !POINTER_TYPE_P (typea))
7467 return NULL_TREE;
7469 if (TREE_CODE (ineq) == LT_EXPR)
7471 a1 = TREE_OPERAND (ineq, 1);
7472 y = TREE_OPERAND (ineq, 0);
7474 else if (TREE_CODE (ineq) == GT_EXPR)
7476 a1 = TREE_OPERAND (ineq, 0);
7477 y = TREE_OPERAND (ineq, 1);
7479 else
7480 return NULL_TREE;
7482 if (TREE_TYPE (a1) != typea)
7483 return NULL_TREE;
7485 if (POINTER_TYPE_P (typea))
7487 /* Convert the pointer types into integer before taking the difference. */
7488 tree ta = fold_convert_loc (loc, ssizetype, a);
7489 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7490 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7492 else
7493 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7495 if (!diff || !integer_onep (diff))
7496 return NULL_TREE;
7498 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7501 /* Fold a sum or difference of at least one multiplication.
7502 Returns the folded tree or NULL if no simplification could be made. */
7504 static tree
7505 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7506 tree arg0, tree arg1)
7508 tree arg00, arg01, arg10, arg11;
7509 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7511 /* (A * C) +- (B * C) -> (A+-B) * C.
7512 (A * C) +- A -> A * (C+-1).
7513 We are most concerned about the case where C is a constant,
7514 but other combinations show up during loop reduction. Since
7515 it is not difficult, try all four possibilities. */
7517 if (TREE_CODE (arg0) == MULT_EXPR)
7519 arg00 = TREE_OPERAND (arg0, 0);
7520 arg01 = TREE_OPERAND (arg0, 1);
7522 else if (TREE_CODE (arg0) == INTEGER_CST)
7524 arg00 = build_one_cst (type);
7525 arg01 = arg0;
7527 else
7529 /* We cannot generate constant 1 for fract. */
7530 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7531 return NULL_TREE;
7532 arg00 = arg0;
7533 arg01 = build_one_cst (type);
7535 if (TREE_CODE (arg1) == MULT_EXPR)
7537 arg10 = TREE_OPERAND (arg1, 0);
7538 arg11 = TREE_OPERAND (arg1, 1);
7540 else if (TREE_CODE (arg1) == INTEGER_CST)
7542 arg10 = build_one_cst (type);
7543 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7544 the purpose of this canonicalization. */
7545 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7546 && negate_expr_p (arg1)
7547 && code == PLUS_EXPR)
7549 arg11 = negate_expr (arg1);
7550 code = MINUS_EXPR;
7552 else
7553 arg11 = arg1;
7555 else
7557 /* We cannot generate constant 1 for fract. */
7558 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7559 return NULL_TREE;
7560 arg10 = arg1;
7561 arg11 = build_one_cst (type);
7563 same = NULL_TREE;
7565 /* Prefer factoring a common non-constant. */
7566 if (operand_equal_p (arg00, arg10, 0))
7567 same = arg00, alt0 = arg01, alt1 = arg11;
7568 else if (operand_equal_p (arg01, arg11, 0))
7569 same = arg01, alt0 = arg00, alt1 = arg10;
7570 else if (operand_equal_p (arg00, arg11, 0))
7571 same = arg00, alt0 = arg01, alt1 = arg10;
7572 else if (operand_equal_p (arg01, arg10, 0))
7573 same = arg01, alt0 = arg00, alt1 = arg11;
7575 /* No identical multiplicands; see if we can find a common
7576 power-of-two factor in non-power-of-two multiplies. This
7577 can help in multi-dimensional array access. */
7578 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7580 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7581 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7582 HOST_WIDE_INT tmp;
7583 bool swap = false;
7584 tree maybe_same;
7586 /* Move min of absolute values to int11. */
7587 if (absu_hwi (int01) < absu_hwi (int11))
7589 tmp = int01, int01 = int11, int11 = tmp;
7590 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7591 maybe_same = arg01;
7592 swap = true;
7594 else
7595 maybe_same = arg11;
7597 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7598 if (factor > 1
7599 && pow2p_hwi (factor)
7600 && (int01 & (factor - 1)) == 0
7601 /* The remainder should not be a constant, otherwise we
7602 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7603 increased the number of multiplications necessary. */
7604 && TREE_CODE (arg10) != INTEGER_CST)
7606 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7607 build_int_cst (TREE_TYPE (arg00),
7608 int01 / int11));
7609 alt1 = arg10;
7610 same = maybe_same;
7611 if (swap)
7612 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7616 if (!same)
7617 return NULL_TREE;
7619 if (! ANY_INTEGRAL_TYPE_P (type)
7620 || TYPE_OVERFLOW_WRAPS (type)
7621 /* We are neither factoring zero nor minus one. */
7622 || TREE_CODE (same) == INTEGER_CST)
7623 return fold_build2_loc (loc, MULT_EXPR, type,
7624 fold_build2_loc (loc, code, type,
7625 fold_convert_loc (loc, type, alt0),
7626 fold_convert_loc (loc, type, alt1)),
7627 fold_convert_loc (loc, type, same));
7629 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7630 same may be minus one and thus the multiplication may overflow. Perform
7631 the sum operation in an unsigned type. */
7632 tree utype = unsigned_type_for (type);
7633 tree tem = fold_build2_loc (loc, code, utype,
7634 fold_convert_loc (loc, utype, alt0),
7635 fold_convert_loc (loc, utype, alt1));
7636 /* If the sum evaluated to a constant that is not -INF the multiplication
7637 cannot overflow. */
7638 if (TREE_CODE (tem) == INTEGER_CST
7639 && (wi::to_wide (tem)
7640 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7641 return fold_build2_loc (loc, MULT_EXPR, type,
7642 fold_convert (type, tem), same);
7644 /* Do not resort to unsigned multiplication because
7645 we lose the no-overflow property of the expression. */
7646 return NULL_TREE;
7649 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7650 specified by EXPR into the buffer PTR of length LEN bytes.
7651 Return the number of bytes placed in the buffer, or zero
7652 upon failure. */
7654 static int
7655 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7657 tree type = TREE_TYPE (expr);
7658 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7659 int byte, offset, word, words;
7660 unsigned char value;
7662 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7663 return 0;
7664 if (off == -1)
7665 off = 0;
7667 if (ptr == NULL)
7668 /* Dry run. */
7669 return MIN (len, total_bytes - off);
7671 words = total_bytes / UNITS_PER_WORD;
7673 for (byte = 0; byte < total_bytes; byte++)
7675 int bitpos = byte * BITS_PER_UNIT;
7676 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7677 number of bytes. */
7678 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7680 if (total_bytes > UNITS_PER_WORD)
7682 word = byte / UNITS_PER_WORD;
7683 if (WORDS_BIG_ENDIAN)
7684 word = (words - 1) - word;
7685 offset = word * UNITS_PER_WORD;
7686 if (BYTES_BIG_ENDIAN)
7687 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7688 else
7689 offset += byte % UNITS_PER_WORD;
7691 else
7692 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7693 if (offset >= off && offset - off < len)
7694 ptr[offset - off] = value;
7696 return MIN (len, total_bytes - off);
7700 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7701 specified by EXPR into the buffer PTR of length LEN bytes.
7702 Return the number of bytes placed in the buffer, or zero
7703 upon failure. */
7705 static int
7706 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7708 tree type = TREE_TYPE (expr);
7709 scalar_mode mode = SCALAR_TYPE_MODE (type);
7710 int total_bytes = GET_MODE_SIZE (mode);
7711 FIXED_VALUE_TYPE value;
7712 tree i_value, i_type;
7714 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7715 return 0;
7717 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7719 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7720 return 0;
7722 value = TREE_FIXED_CST (expr);
7723 i_value = double_int_to_tree (i_type, value.data);
7725 return native_encode_int (i_value, ptr, len, off);
7729 /* Subroutine of native_encode_expr. Encode the REAL_CST
7730 specified by EXPR into the buffer PTR of length LEN bytes.
7731 Return the number of bytes placed in the buffer, or zero
7732 upon failure. */
7734 static int
7735 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7737 tree type = TREE_TYPE (expr);
7738 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7739 int byte, offset, word, words, bitpos;
7740 unsigned char value;
7742 /* There are always 32 bits in each long, no matter the size of
7743 the hosts long. We handle floating point representations with
7744 up to 192 bits. */
7745 long tmp[6];
7747 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7748 return 0;
7749 if (off == -1)
7750 off = 0;
7752 if (ptr == NULL)
7753 /* Dry run. */
7754 return MIN (len, total_bytes - off);
7756 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7758 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7760 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7761 bitpos += BITS_PER_UNIT)
7763 byte = (bitpos / BITS_PER_UNIT) & 3;
7764 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7766 if (UNITS_PER_WORD < 4)
7768 word = byte / UNITS_PER_WORD;
7769 if (WORDS_BIG_ENDIAN)
7770 word = (words - 1) - word;
7771 offset = word * UNITS_PER_WORD;
7772 if (BYTES_BIG_ENDIAN)
7773 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7774 else
7775 offset += byte % UNITS_PER_WORD;
7777 else
7779 offset = byte;
7780 if (BYTES_BIG_ENDIAN)
7782 /* Reverse bytes within each long, or within the entire float
7783 if it's smaller than a long (for HFmode). */
7784 offset = MIN (3, total_bytes - 1) - offset;
7785 gcc_assert (offset >= 0);
7788 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7789 if (offset >= off
7790 && offset - off < len)
7791 ptr[offset - off] = value;
7793 return MIN (len, total_bytes - off);
7796 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7797 specified by EXPR into the buffer PTR of length LEN bytes.
7798 Return the number of bytes placed in the buffer, or zero
7799 upon failure. */
7801 static int
7802 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7804 int rsize, isize;
7805 tree part;
7807 part = TREE_REALPART (expr);
7808 rsize = native_encode_expr (part, ptr, len, off);
7809 if (off == -1 && rsize == 0)
7810 return 0;
7811 part = TREE_IMAGPART (expr);
7812 if (off != -1)
7813 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7814 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7815 len - rsize, off);
7816 if (off == -1 && isize != rsize)
7817 return 0;
7818 return rsize + isize;
7821 /* Like native_encode_vector, but only encode the first COUNT elements.
7822 The other arguments are as for native_encode_vector. */
7824 static int
7825 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7826 int off, unsigned HOST_WIDE_INT count)
7828 tree itype = TREE_TYPE (TREE_TYPE (expr));
7829 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7830 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7832 /* This is the only case in which elements can be smaller than a byte.
7833 Element 0 is always in the lsb of the containing byte. */
7834 unsigned int elt_bits = TYPE_PRECISION (itype);
7835 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7836 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7837 return 0;
7839 if (off == -1)
7840 off = 0;
7842 /* Zero the buffer and then set bits later where necessary. */
7843 int extract_bytes = MIN (len, total_bytes - off);
7844 if (ptr)
7845 memset (ptr, 0, extract_bytes);
7847 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7848 unsigned int first_elt = off * elts_per_byte;
7849 unsigned int extract_elts = extract_bytes * elts_per_byte;
7850 for (unsigned int i = 0; i < extract_elts; ++i)
7852 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7853 if (TREE_CODE (elt) != INTEGER_CST)
7854 return 0;
7856 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7858 unsigned int bit = i * elt_bits;
7859 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7862 return extract_bytes;
7865 int offset = 0;
7866 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7867 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7869 if (off >= size)
7871 off -= size;
7872 continue;
7874 tree elem = VECTOR_CST_ELT (expr, i);
7875 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7876 len - offset, off);
7877 if ((off == -1 && res != size) || res == 0)
7878 return 0;
7879 offset += res;
7880 if (offset >= len)
7881 return (off == -1 && i < count - 1) ? 0 : offset;
7882 if (off != -1)
7883 off = 0;
7885 return offset;
7888 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7889 specified by EXPR into the buffer PTR of length LEN bytes.
7890 Return the number of bytes placed in the buffer, or zero
7891 upon failure. */
7893 static int
7894 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7896 unsigned HOST_WIDE_INT count;
7897 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7898 return 0;
7899 return native_encode_vector_part (expr, ptr, len, off, count);
7903 /* Subroutine of native_encode_expr. Encode the STRING_CST
7904 specified by EXPR into the buffer PTR of length LEN bytes.
7905 Return the number of bytes placed in the buffer, or zero
7906 upon failure. */
7908 static int
7909 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7911 tree type = TREE_TYPE (expr);
7913 /* Wide-char strings are encoded in target byte-order so native
7914 encoding them is trivial. */
7915 if (BITS_PER_UNIT != CHAR_BIT
7916 || TREE_CODE (type) != ARRAY_TYPE
7917 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7918 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7919 return 0;
7921 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7922 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7923 return 0;
7924 if (off == -1)
7925 off = 0;
7926 len = MIN (total_bytes - off, len);
7927 if (ptr == NULL)
7928 /* Dry run. */;
7929 else
7931 int written = 0;
7932 if (off < TREE_STRING_LENGTH (expr))
7934 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7935 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7937 memset (ptr + written, 0, len - written);
7939 return len;
7943 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
7944 FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
7945 the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
7946 anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
7947 sufficient to encode the entire EXPR, or if OFF is out of bounds.
7948 Otherwise, start at byte offset OFF and encode at most LEN bytes.
7949 Return the number of bytes placed in the buffer, or zero upon failure. */
7952 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7954 /* We don't support starting at negative offset and -1 is special. */
7955 if (off < -1)
7956 return 0;
7958 switch (TREE_CODE (expr))
7960 case INTEGER_CST:
7961 return native_encode_int (expr, ptr, len, off);
7963 case REAL_CST:
7964 return native_encode_real (expr, ptr, len, off);
7966 case FIXED_CST:
7967 return native_encode_fixed (expr, ptr, len, off);
7969 case COMPLEX_CST:
7970 return native_encode_complex (expr, ptr, len, off);
7972 case VECTOR_CST:
7973 return native_encode_vector (expr, ptr, len, off);
7975 case STRING_CST:
7976 return native_encode_string (expr, ptr, len, off);
7978 default:
7979 return 0;
7983 /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
7984 or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
7985 of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
7986 machine modes, we can't just use build_nonstandard_integer_type. */
7988 tree
7989 find_bitfield_repr_type (int fieldsize, int len)
7991 machine_mode mode;
7992 for (int pass = 0; pass < 2; pass++)
7994 enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
7995 FOR_EACH_MODE_IN_CLASS (mode, mclass)
7996 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
7997 && known_eq (GET_MODE_PRECISION (mode),
7998 GET_MODE_BITSIZE (mode))
7999 && known_le (GET_MODE_SIZE (mode), len))
8001 tree ret = lang_hooks.types.type_for_mode (mode, 1);
8002 if (ret && TYPE_MODE (ret) == mode)
8003 return ret;
8007 for (int i = 0; i < NUM_INT_N_ENTS; i ++)
8008 if (int_n_enabled_p[i]
8009 && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
8010 && int_n_trees[i].unsigned_type)
8012 tree ret = int_n_trees[i].unsigned_type;
8013 mode = TYPE_MODE (ret);
8014 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8015 && known_eq (GET_MODE_PRECISION (mode),
8016 GET_MODE_BITSIZE (mode))
8017 && known_le (GET_MODE_SIZE (mode), len))
8018 return ret;
8021 return NULL_TREE;
8024 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8025 NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
8026 to be non-NULL and OFF zero), then in addition to filling the
8027 bytes pointed by PTR with the value also clear any bits pointed
8028 by MASK that are known to be initialized, keep them as is for
8029 e.g. uninitialized padding bits or uninitialized fields. */
8032 native_encode_initializer (tree init, unsigned char *ptr, int len,
8033 int off, unsigned char *mask)
8035 int r;
8037 /* We don't support starting at negative offset and -1 is special. */
8038 if (off < -1 || init == NULL_TREE)
8039 return 0;
8041 gcc_assert (mask == NULL || (off == 0 && ptr));
8043 STRIP_NOPS (init);
8044 switch (TREE_CODE (init))
8046 case VIEW_CONVERT_EXPR:
8047 case NON_LVALUE_EXPR:
8048 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
8049 mask);
8050 default:
8051 r = native_encode_expr (init, ptr, len, off);
8052 if (mask)
8053 memset (mask, 0, r);
8054 return r;
8055 case CONSTRUCTOR:
8056 tree type = TREE_TYPE (init);
8057 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
8058 if (total_bytes < 0)
8059 return 0;
8060 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8061 return 0;
8062 int o = off == -1 ? 0 : off;
8063 if (TREE_CODE (type) == ARRAY_TYPE)
8065 tree min_index;
8066 unsigned HOST_WIDE_INT cnt;
8067 HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
8068 constructor_elt *ce;
8070 if (!TYPE_DOMAIN (type)
8071 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
8072 return 0;
8074 fieldsize = int_size_in_bytes (TREE_TYPE (type));
8075 if (fieldsize <= 0)
8076 return 0;
8078 min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8079 if (ptr)
8080 memset (ptr, '\0', MIN (total_bytes - off, len));
8082 for (cnt = 0; ; cnt++)
8084 tree val = NULL_TREE, index = NULL_TREE;
8085 HOST_WIDE_INT pos = curpos, count = 0;
8086 bool full = false;
8087 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8089 val = ce->value;
8090 index = ce->index;
8092 else if (mask == NULL
8093 || CONSTRUCTOR_NO_CLEARING (init)
8094 || curpos >= total_bytes)
8095 break;
8096 else
8097 pos = total_bytes;
8099 if (index && TREE_CODE (index) == RANGE_EXPR)
8101 if (TREE_CODE (TREE_OPERAND (index, 0)) != INTEGER_CST
8102 || TREE_CODE (TREE_OPERAND (index, 1)) != INTEGER_CST)
8103 return 0;
8104 offset_int wpos
8105 = wi::sext (wi::to_offset (TREE_OPERAND (index, 0))
8106 - wi::to_offset (min_index),
8107 TYPE_PRECISION (sizetype));
8108 wpos *= fieldsize;
8109 if (!wi::fits_shwi_p (pos))
8110 return 0;
8111 pos = wpos.to_shwi ();
8112 offset_int wcount
8113 = wi::sext (wi::to_offset (TREE_OPERAND (index, 1))
8114 - wi::to_offset (TREE_OPERAND (index, 0)),
8115 TYPE_PRECISION (sizetype));
8116 if (!wi::fits_shwi_p (wcount))
8117 return 0;
8118 count = wcount.to_shwi ();
8120 else if (index)
8122 if (TREE_CODE (index) != INTEGER_CST)
8123 return 0;
8124 offset_int wpos
8125 = wi::sext (wi::to_offset (index)
8126 - wi::to_offset (min_index),
8127 TYPE_PRECISION (sizetype));
8128 wpos *= fieldsize;
8129 if (!wi::fits_shwi_p (wpos))
8130 return 0;
8131 pos = wpos.to_shwi ();
8134 if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
8136 if (valueinit == -1)
8138 tree zero = build_zero_cst (TREE_TYPE (type));
8139 r = native_encode_initializer (zero, ptr + curpos,
8140 fieldsize, 0,
8141 mask + curpos);
8142 if (TREE_CODE (zero) == CONSTRUCTOR)
8143 ggc_free (zero);
8144 if (!r)
8145 return 0;
8146 valueinit = curpos;
8147 curpos += fieldsize;
8149 while (curpos != pos)
8151 memcpy (ptr + curpos, ptr + valueinit, fieldsize);
8152 memcpy (mask + curpos, mask + valueinit, fieldsize);
8153 curpos += fieldsize;
8157 curpos = pos;
8158 if (val)
8161 if (off == -1
8162 || (curpos >= off
8163 && (curpos + fieldsize
8164 <= (HOST_WIDE_INT) off + len)))
8166 if (full)
8168 if (ptr)
8169 memcpy (ptr + (curpos - o), ptr + (pos - o),
8170 fieldsize);
8171 if (mask)
8172 memcpy (mask + curpos, mask + pos, fieldsize);
8174 else if (!native_encode_initializer (val,
8176 ? ptr + curpos - o
8177 : NULL,
8178 fieldsize,
8179 off == -1 ? -1
8180 : 0,
8181 mask
8182 ? mask + curpos
8183 : NULL))
8184 return 0;
8185 else
8187 full = true;
8188 pos = curpos;
8191 else if (curpos + fieldsize > off
8192 && curpos < (HOST_WIDE_INT) off + len)
8194 /* Partial overlap. */
8195 unsigned char *p = NULL;
8196 int no = 0;
8197 int l;
8198 gcc_assert (mask == NULL);
8199 if (curpos >= off)
8201 if (ptr)
8202 p = ptr + curpos - off;
8203 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8204 fieldsize);
8206 else
8208 p = ptr;
8209 no = off - curpos;
8210 l = len;
8212 if (!native_encode_initializer (val, p, l, no, NULL))
8213 return 0;
8215 curpos += fieldsize;
8217 while (count-- != 0);
8219 return MIN (total_bytes - off, len);
8221 else if (TREE_CODE (type) == RECORD_TYPE
8222 || TREE_CODE (type) == UNION_TYPE)
8224 unsigned HOST_WIDE_INT cnt;
8225 constructor_elt *ce;
8226 tree fld_base = TYPE_FIELDS (type);
8227 tree to_free = NULL_TREE;
8229 gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
8230 if (ptr != NULL)
8231 memset (ptr, '\0', MIN (total_bytes - o, len));
8232 for (cnt = 0; ; cnt++)
8234 tree val = NULL_TREE, field = NULL_TREE;
8235 HOST_WIDE_INT pos = 0, fieldsize;
8236 unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8238 if (to_free)
8240 ggc_free (to_free);
8241 to_free = NULL_TREE;
8244 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8246 val = ce->value;
8247 field = ce->index;
8248 if (field == NULL_TREE)
8249 return 0;
8251 pos = int_byte_position (field);
8252 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8253 continue;
8255 else if (mask == NULL
8256 || CONSTRUCTOR_NO_CLEARING (init))
8257 break;
8258 else
8259 pos = total_bytes;
8261 if (mask && !CONSTRUCTOR_NO_CLEARING (init))
8263 tree fld;
8264 for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
8266 if (TREE_CODE (fld) != FIELD_DECL)
8267 continue;
8268 if (fld == field)
8269 break;
8270 if (DECL_PADDING_P (fld))
8271 continue;
8272 if (DECL_SIZE_UNIT (fld) == NULL_TREE
8273 || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
8274 return 0;
8275 if (integer_zerop (DECL_SIZE_UNIT (fld)))
8276 continue;
8277 break;
8279 if (fld == NULL_TREE)
8281 if (ce == NULL)
8282 break;
8283 return 0;
8285 fld_base = DECL_CHAIN (fld);
8286 if (fld != field)
8288 cnt--;
8289 field = fld;
8290 pos = int_byte_position (field);
8291 val = build_zero_cst (TREE_TYPE (fld));
8292 if (TREE_CODE (val) == CONSTRUCTOR)
8293 to_free = val;
8297 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8298 && TYPE_DOMAIN (TREE_TYPE (field))
8299 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8301 if (mask || off != -1)
8302 return 0;
8303 if (val == NULL_TREE)
8304 continue;
8305 if (TREE_CODE (TREE_TYPE (val)) != ARRAY_TYPE)
8306 return 0;
8307 fieldsize = int_size_in_bytes (TREE_TYPE (val));
8308 if (fieldsize < 0
8309 || (int) fieldsize != fieldsize
8310 || (pos + fieldsize) > INT_MAX)
8311 return 0;
8312 if (pos + fieldsize > total_bytes)
8314 if (ptr != NULL && total_bytes < len)
8315 memset (ptr + total_bytes, '\0',
8316 MIN (pos + fieldsize, len) - total_bytes);
8317 total_bytes = pos + fieldsize;
8320 else
8322 if (DECL_SIZE_UNIT (field) == NULL_TREE
8323 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8324 return 0;
8325 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8327 if (fieldsize == 0)
8328 continue;
8330 if (DECL_BIT_FIELD (field))
8332 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8333 return 0;
8334 fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8335 bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8336 if (bpos % BITS_PER_UNIT)
8337 bpos %= BITS_PER_UNIT;
8338 else
8339 bpos = 0;
8340 fieldsize += bpos;
8341 epos = fieldsize % BITS_PER_UNIT;
8342 fieldsize += BITS_PER_UNIT - 1;
8343 fieldsize /= BITS_PER_UNIT;
8346 if (off != -1 && pos + fieldsize <= off)
8347 continue;
8349 if (val == NULL_TREE)
8350 continue;
8352 if (DECL_BIT_FIELD (field))
8354 /* FIXME: Handle PDP endian. */
8355 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8356 return 0;
8358 if (TREE_CODE (val) != INTEGER_CST)
8359 return 0;
8361 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8362 tree repr_type = NULL_TREE;
8363 HOST_WIDE_INT rpos = 0;
8364 if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8366 rpos = int_byte_position (repr);
8367 repr_type = TREE_TYPE (repr);
8369 else
8371 repr_type = find_bitfield_repr_type (fieldsize, len);
8372 if (repr_type == NULL_TREE)
8373 return 0;
8374 HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
8375 gcc_assert (repr_size > 0 && repr_size <= len);
8376 if (pos + repr_size <= o + len)
8377 rpos = pos;
8378 else
8380 rpos = o + len - repr_size;
8381 gcc_assert (rpos <= pos);
8385 if (rpos > pos)
8386 return 0;
8387 wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
8388 int diff = (TYPE_PRECISION (repr_type)
8389 - TYPE_PRECISION (TREE_TYPE (field)));
8390 HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8391 if (!BYTES_BIG_ENDIAN)
8392 w = wi::lshift (w, bitoff);
8393 else
8394 w = wi::lshift (w, diff - bitoff);
8395 val = wide_int_to_tree (repr_type, w);
8397 unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8398 / BITS_PER_UNIT + 1];
8399 int l = native_encode_int (val, buf, sizeof buf, 0);
8400 if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
8401 return 0;
8403 if (ptr == NULL)
8404 continue;
8406 /* If the bitfield does not start at byte boundary, handle
8407 the partial byte at the start. */
8408 if (bpos
8409 && (off == -1 || (pos >= off && len >= 1)))
8411 if (!BYTES_BIG_ENDIAN)
8413 int msk = (1 << bpos) - 1;
8414 buf[pos - rpos] &= ~msk;
8415 buf[pos - rpos] |= ptr[pos - o] & msk;
8416 if (mask)
8418 if (fieldsize > 1 || epos == 0)
8419 mask[pos] &= msk;
8420 else
8421 mask[pos] &= (msk | ~((1 << epos) - 1));
8424 else
8426 int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
8427 buf[pos - rpos] &= msk;
8428 buf[pos - rpos] |= ptr[pos - o] & ~msk;
8429 if (mask)
8431 if (fieldsize > 1 || epos == 0)
8432 mask[pos] &= ~msk;
8433 else
8434 mask[pos] &= (~msk
8435 | ((1 << (BITS_PER_UNIT - epos))
8436 - 1));
8440 /* If the bitfield does not end at byte boundary, handle
8441 the partial byte at the end. */
8442 if (epos
8443 && (off == -1
8444 || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8446 if (!BYTES_BIG_ENDIAN)
8448 int msk = (1 << epos) - 1;
8449 buf[pos - rpos + fieldsize - 1] &= msk;
8450 buf[pos - rpos + fieldsize - 1]
8451 |= ptr[pos + fieldsize - 1 - o] & ~msk;
8452 if (mask && (fieldsize > 1 || bpos == 0))
8453 mask[pos + fieldsize - 1] &= ~msk;
8455 else
8457 int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
8458 buf[pos - rpos + fieldsize - 1] &= ~msk;
8459 buf[pos - rpos + fieldsize - 1]
8460 |= ptr[pos + fieldsize - 1 - o] & msk;
8461 if (mask && (fieldsize > 1 || bpos == 0))
8462 mask[pos + fieldsize - 1] &= msk;
8465 if (off == -1
8466 || (pos >= off
8467 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8469 memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8470 if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
8471 memset (mask + pos + (bpos != 0), 0,
8472 fieldsize - (bpos != 0) - (epos != 0));
8474 else
8476 /* Partial overlap. */
8477 HOST_WIDE_INT fsz = fieldsize;
8478 gcc_assert (mask == NULL);
8479 if (pos < off)
8481 fsz -= (off - pos);
8482 pos = off;
8484 if (pos + fsz > (HOST_WIDE_INT) off + len)
8485 fsz = (HOST_WIDE_INT) off + len - pos;
8486 memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8488 continue;
8491 if (off == -1
8492 || (pos >= off
8493 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8495 int fldsize = fieldsize;
8496 if (off == -1)
8498 tree fld = DECL_CHAIN (field);
8499 while (fld)
8501 if (TREE_CODE (fld) == FIELD_DECL)
8502 break;
8503 fld = DECL_CHAIN (fld);
8505 if (fld == NULL_TREE)
8506 fldsize = len - pos;
8508 r = native_encode_initializer (val, ptr ? ptr + pos - o
8509 : NULL,
8510 fldsize,
8511 off == -1 ? -1 : 0,
8512 mask ? mask + pos : NULL);
8513 if (!r)
8514 return 0;
8515 if (off == -1
8516 && fldsize != fieldsize
8517 && r > fieldsize
8518 && pos + r > total_bytes)
8519 total_bytes = pos + r;
8521 else
8523 /* Partial overlap. */
8524 unsigned char *p = NULL;
8525 int no = 0;
8526 int l;
8527 gcc_assert (mask == NULL);
8528 if (pos >= off)
8530 if (ptr)
8531 p = ptr + pos - off;
8532 l = MIN ((HOST_WIDE_INT) off + len - pos,
8533 fieldsize);
8535 else
8537 p = ptr;
8538 no = off - pos;
8539 l = len;
8541 if (!native_encode_initializer (val, p, l, no, NULL))
8542 return 0;
8545 return MIN (total_bytes - off, len);
8547 return 0;
8552 /* Subroutine of native_interpret_expr. Interpret the contents of
8553 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8554 If the buffer cannot be interpreted, return NULL_TREE. */
8556 static tree
8557 native_interpret_int (tree type, const unsigned char *ptr, int len)
8559 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8561 if (total_bytes > len
8562 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8563 return NULL_TREE;
8565 wide_int result = wi::from_buffer (ptr, total_bytes);
8567 return wide_int_to_tree (type, result);
8571 /* Subroutine of native_interpret_expr. Interpret the contents of
8572 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8573 If the buffer cannot be interpreted, return NULL_TREE. */
8575 static tree
8576 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8578 scalar_mode mode = SCALAR_TYPE_MODE (type);
8579 int total_bytes = GET_MODE_SIZE (mode);
8580 double_int result;
8581 FIXED_VALUE_TYPE fixed_value;
8583 if (total_bytes > len
8584 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8585 return NULL_TREE;
8587 result = double_int::from_buffer (ptr, total_bytes);
8588 fixed_value = fixed_from_double_int (result, mode);
8590 return build_fixed (type, fixed_value);
8594 /* Subroutine of native_interpret_expr. Interpret the contents of
8595 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8596 If the buffer cannot be interpreted, return NULL_TREE. */
8598 static tree
8599 native_interpret_real (tree type, const unsigned char *ptr, int len)
8601 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8602 int total_bytes = GET_MODE_SIZE (mode);
8603 unsigned char value;
8604 /* There are always 32 bits in each long, no matter the size of
8605 the hosts long. We handle floating point representations with
8606 up to 192 bits. */
8607 REAL_VALUE_TYPE r;
8608 long tmp[6];
8610 if (total_bytes > len || total_bytes > 24)
8611 return NULL_TREE;
8612 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8614 memset (tmp, 0, sizeof (tmp));
8615 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8616 bitpos += BITS_PER_UNIT)
8618 /* Both OFFSET and BYTE index within a long;
8619 bitpos indexes the whole float. */
8620 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8621 if (UNITS_PER_WORD < 4)
8623 int word = byte / UNITS_PER_WORD;
8624 if (WORDS_BIG_ENDIAN)
8625 word = (words - 1) - word;
8626 offset = word * UNITS_PER_WORD;
8627 if (BYTES_BIG_ENDIAN)
8628 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8629 else
8630 offset += byte % UNITS_PER_WORD;
8632 else
8634 offset = byte;
8635 if (BYTES_BIG_ENDIAN)
8637 /* Reverse bytes within each long, or within the entire float
8638 if it's smaller than a long (for HFmode). */
8639 offset = MIN (3, total_bytes - 1) - offset;
8640 gcc_assert (offset >= 0);
8643 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8645 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8648 real_from_target (&r, tmp, mode);
8649 tree ret = build_real (type, r);
8650 if (MODE_COMPOSITE_P (mode))
8652 /* For floating point values in composite modes, punt if this folding
8653 doesn't preserve bit representation. As the mode doesn't have fixed
8654 precision while GCC pretends it does, there could be valid values that
8655 GCC can't really represent accurately. See PR95450. */
8656 unsigned char buf[24];
8657 if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
8658 || memcmp (ptr, buf, total_bytes) != 0)
8659 ret = NULL_TREE;
8661 return ret;
8665 /* Subroutine of native_interpret_expr. Interpret the contents of
8666 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8667 If the buffer cannot be interpreted, return NULL_TREE. */
8669 static tree
8670 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8672 tree etype, rpart, ipart;
8673 int size;
8675 etype = TREE_TYPE (type);
8676 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8677 if (size * 2 > len)
8678 return NULL_TREE;
8679 rpart = native_interpret_expr (etype, ptr, size);
8680 if (!rpart)
8681 return NULL_TREE;
8682 ipart = native_interpret_expr (etype, ptr+size, size);
8683 if (!ipart)
8684 return NULL_TREE;
8685 return build_complex (type, rpart, ipart);
8688 /* Read a vector of type TYPE from the target memory image given by BYTES,
8689 which contains LEN bytes. The vector is known to be encodable using
8690 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8692 Return the vector on success, otherwise return null. */
8694 static tree
8695 native_interpret_vector_part (tree type, const unsigned char *bytes,
8696 unsigned int len, unsigned int npatterns,
8697 unsigned int nelts_per_pattern)
8699 tree elt_type = TREE_TYPE (type);
8700 if (VECTOR_BOOLEAN_TYPE_P (type)
8701 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8703 /* This is the only case in which elements can be smaller than a byte.
8704 Element 0 is always in the lsb of the containing byte. */
8705 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8706 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8707 return NULL_TREE;
8709 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8710 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8712 unsigned int bit_index = i * elt_bits;
8713 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8714 unsigned int lsb = bit_index % BITS_PER_UNIT;
8715 builder.quick_push (bytes[byte_index] & (1 << lsb)
8716 ? build_all_ones_cst (elt_type)
8717 : build_zero_cst (elt_type));
8719 return builder.build ();
8722 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8723 if (elt_bytes * npatterns * nelts_per_pattern > len)
8724 return NULL_TREE;
8726 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8727 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8729 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8730 if (!elt)
8731 return NULL_TREE;
8732 builder.quick_push (elt);
8733 bytes += elt_bytes;
8735 return builder.build ();
8738 /* Subroutine of native_interpret_expr. Interpret the contents of
8739 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8740 If the buffer cannot be interpreted, return NULL_TREE. */
8742 static tree
8743 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8745 tree etype;
8746 unsigned int size;
8747 unsigned HOST_WIDE_INT count;
8749 etype = TREE_TYPE (type);
8750 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8751 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
8752 || size * count > len)
8753 return NULL_TREE;
8755 return native_interpret_vector_part (type, ptr, len, count, 1);
8759 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8760 the buffer PTR of length LEN as a constant of type TYPE. For
8761 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8762 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8763 return NULL_TREE. */
8765 tree
8766 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8768 switch (TREE_CODE (type))
8770 case INTEGER_TYPE:
8771 case ENUMERAL_TYPE:
8772 case BOOLEAN_TYPE:
8773 case POINTER_TYPE:
8774 case REFERENCE_TYPE:
8775 return native_interpret_int (type, ptr, len);
8777 case REAL_TYPE:
8778 return native_interpret_real (type, ptr, len);
8780 case FIXED_POINT_TYPE:
8781 return native_interpret_fixed (type, ptr, len);
8783 case COMPLEX_TYPE:
8784 return native_interpret_complex (type, ptr, len);
8786 case VECTOR_TYPE:
8787 return native_interpret_vector (type, ptr, len);
8789 default:
8790 return NULL_TREE;
8794 /* Returns true if we can interpret the contents of a native encoding
8795 as TYPE. */
8797 bool
8798 can_native_interpret_type_p (tree type)
8800 switch (TREE_CODE (type))
8802 case INTEGER_TYPE:
8803 case ENUMERAL_TYPE:
8804 case BOOLEAN_TYPE:
8805 case POINTER_TYPE:
8806 case REFERENCE_TYPE:
8807 case FIXED_POINT_TYPE:
8808 case REAL_TYPE:
8809 case COMPLEX_TYPE:
8810 case VECTOR_TYPE:
8811 return true;
8812 default:
8813 return false;
8817 /* Attempt to interpret aggregate of TYPE from bytes encoded in target
8818 byte order at PTR + OFF with LEN bytes. Does not handle unions. */
8820 tree
8821 native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
8822 int len)
8824 vec<constructor_elt, va_gc> *elts = NULL;
8825 if (TREE_CODE (type) == ARRAY_TYPE)
8827 HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
8828 if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
8829 return NULL_TREE;
8831 HOST_WIDE_INT cnt = 0;
8832 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
8834 if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
8835 return NULL_TREE;
8836 cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
8838 if (eltsz == 0)
8839 cnt = 0;
8840 HOST_WIDE_INT pos = 0;
8841 for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
8843 tree v = NULL_TREE;
8844 if (pos >= len || pos + eltsz > len)
8845 return NULL_TREE;
8846 if (can_native_interpret_type_p (TREE_TYPE (type)))
8848 v = native_interpret_expr (TREE_TYPE (type),
8849 ptr + off + pos, eltsz);
8850 if (v == NULL_TREE)
8851 return NULL_TREE;
8853 else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
8854 || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
8855 v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos,
8856 eltsz);
8857 if (v == NULL_TREE)
8858 return NULL_TREE;
8859 CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
8861 return build_constructor (type, elts);
8863 if (TREE_CODE (type) != RECORD_TYPE)
8864 return NULL_TREE;
8865 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8867 if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field))
8868 continue;
8869 tree fld = field;
8870 HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
8871 int diff = 0;
8872 tree v = NULL_TREE;
8873 if (DECL_BIT_FIELD (field))
8875 fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
8876 if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
8878 poly_int64 bitoffset;
8879 poly_uint64 field_offset, fld_offset;
8880 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8881 && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
8882 bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
8883 else
8884 bitoffset = 0;
8885 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8886 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
8887 diff = (TYPE_PRECISION (TREE_TYPE (fld))
8888 - TYPE_PRECISION (TREE_TYPE (field)));
8889 if (!bitoffset.is_constant (&bitoff)
8890 || bitoff < 0
8891 || bitoff > diff)
8892 return NULL_TREE;
8894 else
8896 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8897 return NULL_TREE;
8898 int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8899 int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8900 bpos %= BITS_PER_UNIT;
8901 fieldsize += bpos;
8902 fieldsize += BITS_PER_UNIT - 1;
8903 fieldsize /= BITS_PER_UNIT;
8904 tree repr_type = find_bitfield_repr_type (fieldsize, len);
8905 if (repr_type == NULL_TREE)
8906 return NULL_TREE;
8907 sz = int_size_in_bytes (repr_type);
8908 if (sz < 0 || sz > len)
8909 return NULL_TREE;
8910 pos = int_byte_position (field);
8911 if (pos < 0 || pos > len || pos + fieldsize > len)
8912 return NULL_TREE;
8913 HOST_WIDE_INT rpos;
8914 if (pos + sz <= len)
8915 rpos = pos;
8916 else
8918 rpos = len - sz;
8919 gcc_assert (rpos <= pos);
8921 bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
8922 pos = rpos;
8923 diff = (TYPE_PRECISION (repr_type)
8924 - TYPE_PRECISION (TREE_TYPE (field)));
8925 v = native_interpret_expr (repr_type, ptr + off + pos, sz);
8926 if (v == NULL_TREE)
8927 return NULL_TREE;
8928 fld = NULL_TREE;
8932 if (fld)
8934 sz = int_size_in_bytes (TREE_TYPE (fld));
8935 if (sz < 0 || sz > len)
8936 return NULL_TREE;
8937 tree byte_pos = byte_position (fld);
8938 if (!tree_fits_shwi_p (byte_pos))
8939 return NULL_TREE;
8940 pos = tree_to_shwi (byte_pos);
8941 if (pos < 0 || pos > len || pos + sz > len)
8942 return NULL_TREE;
8944 if (fld == NULL_TREE)
8945 /* Already handled above. */;
8946 else if (can_native_interpret_type_p (TREE_TYPE (fld)))
8948 v = native_interpret_expr (TREE_TYPE (fld),
8949 ptr + off + pos, sz);
8950 if (v == NULL_TREE)
8951 return NULL_TREE;
8953 else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
8954 || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
8955 v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz);
8956 if (v == NULL_TREE)
8957 return NULL_TREE;
8958 if (fld != field)
8960 if (TREE_CODE (v) != INTEGER_CST)
8961 return NULL_TREE;
8963 /* FIXME: Figure out how to handle PDP endian bitfields. */
8964 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8965 return NULL_TREE;
8966 if (!BYTES_BIG_ENDIAN)
8967 v = wide_int_to_tree (TREE_TYPE (field),
8968 wi::lrshift (wi::to_wide (v), bitoff));
8969 else
8970 v = wide_int_to_tree (TREE_TYPE (field),
8971 wi::lrshift (wi::to_wide (v),
8972 diff - bitoff));
8974 CONSTRUCTOR_APPEND_ELT (elts, field, v);
8976 return build_constructor (type, elts);
8979 /* Routines for manipulation of native_encode_expr encoded data if the encoded
8980 or extracted constant positions and/or sizes aren't byte aligned. */
8982 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
8983 bits between adjacent elements. AMNT should be within
8984 [0, BITS_PER_UNIT).
8985 Example, AMNT = 2:
8986 00011111|11100000 << 2 = 01111111|10000000
8987 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
8989 void
8990 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
8991 unsigned int amnt)
8993 if (amnt == 0)
8994 return;
8996 unsigned char carry_over = 0U;
8997 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
8998 unsigned char clear_mask = (~0U) << amnt;
9000 for (unsigned int i = 0; i < sz; i++)
9002 unsigned prev_carry_over = carry_over;
9003 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
9005 ptr[i] <<= amnt;
9006 if (i != 0)
9008 ptr[i] &= clear_mask;
9009 ptr[i] |= prev_carry_over;
9014 /* Like shift_bytes_in_array_left but for big-endian.
9015 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
9016 bits between adjacent elements. AMNT should be within
9017 [0, BITS_PER_UNIT).
9018 Example, AMNT = 2:
9019 00011111|11100000 >> 2 = 00000111|11111000
9020 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
9022 void
9023 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
9024 unsigned int amnt)
9026 if (amnt == 0)
9027 return;
9029 unsigned char carry_over = 0U;
9030 unsigned char carry_mask = ~(~0U << amnt);
9032 for (unsigned int i = 0; i < sz; i++)
9034 unsigned prev_carry_over = carry_over;
9035 carry_over = ptr[i] & carry_mask;
9037 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
9038 ptr[i] >>= amnt;
9039 ptr[i] |= prev_carry_over;
9043 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9044 directly on the VECTOR_CST encoding, in a way that works for variable-
9045 length vectors. Return the resulting VECTOR_CST on success or null
9046 on failure. */
9048 static tree
9049 fold_view_convert_vector_encoding (tree type, tree expr)
9051 tree expr_type = TREE_TYPE (expr);
9052 poly_uint64 type_bits, expr_bits;
9053 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
9054 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
9055 return NULL_TREE;
9057 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
9058 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
9059 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
9060 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
9062 /* We can only preserve the semantics of a stepped pattern if the new
9063 vector element is an integer of the same size. */
9064 if (VECTOR_CST_STEPPED_P (expr)
9065 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
9066 return NULL_TREE;
9068 /* The number of bits needed to encode one element from every pattern
9069 of the original vector. */
9070 unsigned int expr_sequence_bits
9071 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
9073 /* The number of bits needed to encode one element from every pattern
9074 of the result. */
9075 unsigned int type_sequence_bits
9076 = least_common_multiple (expr_sequence_bits, type_elt_bits);
9078 /* Don't try to read more bytes than are available, which can happen
9079 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9080 The general VIEW_CONVERT handling can cope with that case, so there's
9081 no point complicating things here. */
9082 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
9083 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
9084 BITS_PER_UNIT);
9085 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
9086 if (known_gt (buffer_bits, expr_bits))
9087 return NULL_TREE;
9089 /* Get enough bytes of EXPR to form the new encoding. */
9090 auto_vec<unsigned char, 128> buffer (buffer_bytes);
9091 buffer.quick_grow (buffer_bytes);
9092 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
9093 buffer_bits / expr_elt_bits)
9094 != (int) buffer_bytes)
9095 return NULL_TREE;
9097 /* Reencode the bytes as TYPE. */
9098 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
9099 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
9100 type_npatterns, nelts_per_pattern);
9103 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9104 TYPE at compile-time. If we're unable to perform the conversion
9105 return NULL_TREE. */
9107 static tree
9108 fold_view_convert_expr (tree type, tree expr)
9110 /* We support up to 512-bit values (for V8DFmode). */
9111 unsigned char buffer[64];
9112 int len;
9114 /* Check that the host and target are sane. */
9115 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
9116 return NULL_TREE;
9118 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
9119 if (tree res = fold_view_convert_vector_encoding (type, expr))
9120 return res;
9122 len = native_encode_expr (expr, buffer, sizeof (buffer));
9123 if (len == 0)
9124 return NULL_TREE;
9126 return native_interpret_expr (type, buffer, len);
9129 /* Build an expression for the address of T. Folds away INDIRECT_REF
9130 to avoid confusing the gimplify process. */
9132 tree
9133 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
9135 /* The size of the object is not relevant when talking about its address. */
9136 if (TREE_CODE (t) == WITH_SIZE_EXPR)
9137 t = TREE_OPERAND (t, 0);
9139 if (TREE_CODE (t) == INDIRECT_REF)
9141 t = TREE_OPERAND (t, 0);
9143 if (TREE_TYPE (t) != ptrtype)
9144 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
9146 else if (TREE_CODE (t) == MEM_REF
9147 && integer_zerop (TREE_OPERAND (t, 1)))
9149 t = TREE_OPERAND (t, 0);
9151 if (TREE_TYPE (t) != ptrtype)
9152 t = fold_convert_loc (loc, ptrtype, t);
9154 else if (TREE_CODE (t) == MEM_REF
9155 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
9156 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
9157 TREE_OPERAND (t, 0),
9158 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
9159 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
9161 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
9163 if (TREE_TYPE (t) != ptrtype)
9164 t = fold_convert_loc (loc, ptrtype, t);
9166 else
9167 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
9169 return t;
9172 /* Build an expression for the address of T. */
9174 tree
9175 build_fold_addr_expr_loc (location_t loc, tree t)
9177 tree ptrtype = build_pointer_type (TREE_TYPE (t));
9179 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
9182 /* Fold a unary expression of code CODE and type TYPE with operand
9183 OP0. Return the folded expression if folding is successful.
9184 Otherwise, return NULL_TREE. */
9186 tree
9187 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
9189 tree tem;
9190 tree arg0;
9191 enum tree_code_class kind = TREE_CODE_CLASS (code);
9193 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9194 && TREE_CODE_LENGTH (code) == 1);
9196 arg0 = op0;
9197 if (arg0)
9199 if (CONVERT_EXPR_CODE_P (code)
9200 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
9202 /* Don't use STRIP_NOPS, because signedness of argument type
9203 matters. */
9204 STRIP_SIGN_NOPS (arg0);
9206 else
9208 /* Strip any conversions that don't change the mode. This
9209 is safe for every expression, except for a comparison
9210 expression because its signedness is derived from its
9211 operands.
9213 Note that this is done as an internal manipulation within
9214 the constant folder, in order to find the simplest
9215 representation of the arguments so that their form can be
9216 studied. In any cases, the appropriate type conversions
9217 should be put back in the tree that will get out of the
9218 constant folder. */
9219 STRIP_NOPS (arg0);
9222 if (CONSTANT_CLASS_P (arg0))
9224 tree tem = const_unop (code, type, arg0);
9225 if (tem)
9227 if (TREE_TYPE (tem) != type)
9228 tem = fold_convert_loc (loc, type, tem);
9229 return tem;
9234 tem = generic_simplify (loc, code, type, op0);
9235 if (tem)
9236 return tem;
9238 if (TREE_CODE_CLASS (code) == tcc_unary)
9240 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9241 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9242 fold_build1_loc (loc, code, type,
9243 fold_convert_loc (loc, TREE_TYPE (op0),
9244 TREE_OPERAND (arg0, 1))));
9245 else if (TREE_CODE (arg0) == COND_EXPR)
9247 tree arg01 = TREE_OPERAND (arg0, 1);
9248 tree arg02 = TREE_OPERAND (arg0, 2);
9249 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
9250 arg01 = fold_build1_loc (loc, code, type,
9251 fold_convert_loc (loc,
9252 TREE_TYPE (op0), arg01));
9253 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
9254 arg02 = fold_build1_loc (loc, code, type,
9255 fold_convert_loc (loc,
9256 TREE_TYPE (op0), arg02));
9257 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
9258 arg01, arg02);
9260 /* If this was a conversion, and all we did was to move into
9261 inside the COND_EXPR, bring it back out. But leave it if
9262 it is a conversion from integer to integer and the
9263 result precision is no wider than a word since such a
9264 conversion is cheap and may be optimized away by combine,
9265 while it couldn't if it were outside the COND_EXPR. Then return
9266 so we don't get into an infinite recursion loop taking the
9267 conversion out and then back in. */
9269 if ((CONVERT_EXPR_CODE_P (code)
9270 || code == NON_LVALUE_EXPR)
9271 && TREE_CODE (tem) == COND_EXPR
9272 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
9273 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
9274 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
9275 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
9276 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
9277 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
9278 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9279 && (INTEGRAL_TYPE_P
9280 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
9281 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
9282 || flag_syntax_only))
9283 tem = build1_loc (loc, code, type,
9284 build3 (COND_EXPR,
9285 TREE_TYPE (TREE_OPERAND
9286 (TREE_OPERAND (tem, 1), 0)),
9287 TREE_OPERAND (tem, 0),
9288 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
9289 TREE_OPERAND (TREE_OPERAND (tem, 2),
9290 0)));
9291 return tem;
9295 switch (code)
9297 case NON_LVALUE_EXPR:
9298 if (!maybe_lvalue_p (op0))
9299 return fold_convert_loc (loc, type, op0);
9300 return NULL_TREE;
9302 CASE_CONVERT:
9303 case FLOAT_EXPR:
9304 case FIX_TRUNC_EXPR:
9305 if (COMPARISON_CLASS_P (op0))
9307 /* If we have (type) (a CMP b) and type is an integral type, return
9308 new expression involving the new type. Canonicalize
9309 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9310 non-integral type.
9311 Do not fold the result as that would not simplify further, also
9312 folding again results in recursions. */
9313 if (TREE_CODE (type) == BOOLEAN_TYPE)
9314 return build2_loc (loc, TREE_CODE (op0), type,
9315 TREE_OPERAND (op0, 0),
9316 TREE_OPERAND (op0, 1));
9317 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
9318 && TREE_CODE (type) != VECTOR_TYPE)
9319 return build3_loc (loc, COND_EXPR, type, op0,
9320 constant_boolean_node (true, type),
9321 constant_boolean_node (false, type));
9324 /* Handle (T *)&A.B.C for A being of type T and B and C
9325 living at offset zero. This occurs frequently in
9326 C++ upcasting and then accessing the base. */
9327 if (TREE_CODE (op0) == ADDR_EXPR
9328 && POINTER_TYPE_P (type)
9329 && handled_component_p (TREE_OPERAND (op0, 0)))
9331 poly_int64 bitsize, bitpos;
9332 tree offset;
9333 machine_mode mode;
9334 int unsignedp, reversep, volatilep;
9335 tree base
9336 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
9337 &offset, &mode, &unsignedp, &reversep,
9338 &volatilep);
9339 /* If the reference was to a (constant) zero offset, we can use
9340 the address of the base if it has the same base type
9341 as the result type and the pointer type is unqualified. */
9342 if (!offset
9343 && known_eq (bitpos, 0)
9344 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
9345 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
9346 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
9347 return fold_convert_loc (loc, type,
9348 build_fold_addr_expr_loc (loc, base));
9351 if (TREE_CODE (op0) == MODIFY_EXPR
9352 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
9353 /* Detect assigning a bitfield. */
9354 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
9355 && DECL_BIT_FIELD
9356 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
9358 /* Don't leave an assignment inside a conversion
9359 unless assigning a bitfield. */
9360 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
9361 /* First do the assignment, then return converted constant. */
9362 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
9363 suppress_warning (tem /* What warning? */);
9364 TREE_USED (tem) = 1;
9365 return tem;
9368 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9369 constants (if x has signed type, the sign bit cannot be set
9370 in c). This folds extension into the BIT_AND_EXPR.
9371 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9372 very likely don't have maximal range for their precision and this
9373 transformation effectively doesn't preserve non-maximal ranges. */
9374 if (TREE_CODE (type) == INTEGER_TYPE
9375 && TREE_CODE (op0) == BIT_AND_EXPR
9376 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9378 tree and_expr = op0;
9379 tree and0 = TREE_OPERAND (and_expr, 0);
9380 tree and1 = TREE_OPERAND (and_expr, 1);
9381 int change = 0;
9383 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
9384 || (TYPE_PRECISION (type)
9385 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
9386 change = 1;
9387 else if (TYPE_PRECISION (TREE_TYPE (and1))
9388 <= HOST_BITS_PER_WIDE_INT
9389 && tree_fits_uhwi_p (and1))
9391 unsigned HOST_WIDE_INT cst;
9393 cst = tree_to_uhwi (and1);
9394 cst &= HOST_WIDE_INT_M1U
9395 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
9396 change = (cst == 0);
9397 if (change
9398 && !flag_syntax_only
9399 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
9400 == ZERO_EXTEND))
9402 tree uns = unsigned_type_for (TREE_TYPE (and0));
9403 and0 = fold_convert_loc (loc, uns, and0);
9404 and1 = fold_convert_loc (loc, uns, and1);
9407 if (change)
9409 tem = force_fit_type (type, wi::to_widest (and1), 0,
9410 TREE_OVERFLOW (and1));
9411 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9412 fold_convert_loc (loc, type, and0), tem);
9416 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9417 cast (T1)X will fold away. We assume that this happens when X itself
9418 is a cast. */
9419 if (POINTER_TYPE_P (type)
9420 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
9421 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
9423 tree arg00 = TREE_OPERAND (arg0, 0);
9424 tree arg01 = TREE_OPERAND (arg0, 1);
9426 /* If -fsanitize=alignment, avoid this optimization in GENERIC
9427 when the pointed type needs higher alignment than
9428 the p+ first operand's pointed type. */
9429 if (!in_gimple_form
9430 && sanitize_flags_p (SANITIZE_ALIGNMENT)
9431 && (min_align_of_type (TREE_TYPE (type))
9432 > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00)))))
9433 return NULL_TREE;
9435 arg00 = fold_convert_loc (loc, type, arg00);
9436 return fold_build_pointer_plus_loc (loc, arg00, arg01);
9439 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9440 of the same precision, and X is an integer type not narrower than
9441 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9442 if (INTEGRAL_TYPE_P (type)
9443 && TREE_CODE (op0) == BIT_NOT_EXPR
9444 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9445 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
9446 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
9448 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
9449 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9450 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9451 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9452 fold_convert_loc (loc, type, tem));
9455 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9456 type of X and Y (integer types only). */
9457 if (INTEGRAL_TYPE_P (type)
9458 && TREE_CODE (op0) == MULT_EXPR
9459 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9460 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
9462 /* Be careful not to introduce new overflows. */
9463 tree mult_type;
9464 if (TYPE_OVERFLOW_WRAPS (type))
9465 mult_type = type;
9466 else
9467 mult_type = unsigned_type_for (type);
9469 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9471 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9472 fold_convert_loc (loc, mult_type,
9473 TREE_OPERAND (op0, 0)),
9474 fold_convert_loc (loc, mult_type,
9475 TREE_OPERAND (op0, 1)));
9476 return fold_convert_loc (loc, type, tem);
9480 return NULL_TREE;
9482 case VIEW_CONVERT_EXPR:
9483 if (TREE_CODE (op0) == MEM_REF)
9485 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9486 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9487 tem = fold_build2_loc (loc, MEM_REF, type,
9488 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9489 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9490 return tem;
9493 return NULL_TREE;
9495 case NEGATE_EXPR:
9496 tem = fold_negate_expr (loc, arg0);
9497 if (tem)
9498 return fold_convert_loc (loc, type, tem);
9499 return NULL_TREE;
9501 case ABS_EXPR:
9502 /* Convert fabs((double)float) into (double)fabsf(float). */
9503 if (TREE_CODE (arg0) == NOP_EXPR
9504 && TREE_CODE (type) == REAL_TYPE)
9506 tree targ0 = strip_float_extensions (arg0);
9507 if (targ0 != arg0)
9508 return fold_convert_loc (loc, type,
9509 fold_build1_loc (loc, ABS_EXPR,
9510 TREE_TYPE (targ0),
9511 targ0));
9513 return NULL_TREE;
9515 case BIT_NOT_EXPR:
9516 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9517 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9518 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9519 fold_convert_loc (loc, type,
9520 TREE_OPERAND (arg0, 0)))))
9521 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9522 fold_convert_loc (loc, type,
9523 TREE_OPERAND (arg0, 1)));
9524 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9525 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9526 fold_convert_loc (loc, type,
9527 TREE_OPERAND (arg0, 1)))))
9528 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9529 fold_convert_loc (loc, type,
9530 TREE_OPERAND (arg0, 0)), tem);
9532 return NULL_TREE;
9534 case TRUTH_NOT_EXPR:
9535 /* Note that the operand of this must be an int
9536 and its values must be 0 or 1.
9537 ("true" is a fixed value perhaps depending on the language,
9538 but we don't handle values other than 1 correctly yet.) */
9539 tem = fold_truth_not_expr (loc, arg0);
9540 if (!tem)
9541 return NULL_TREE;
9542 return fold_convert_loc (loc, type, tem);
9544 case INDIRECT_REF:
9545 /* Fold *&X to X if X is an lvalue. */
9546 if (TREE_CODE (op0) == ADDR_EXPR)
9548 tree op00 = TREE_OPERAND (op0, 0);
9549 if ((VAR_P (op00)
9550 || TREE_CODE (op00) == PARM_DECL
9551 || TREE_CODE (op00) == RESULT_DECL)
9552 && !TREE_READONLY (op00))
9553 return op00;
9555 return NULL_TREE;
9557 default:
9558 return NULL_TREE;
9559 } /* switch (code) */
9563 /* If the operation was a conversion do _not_ mark a resulting constant
9564 with TREE_OVERFLOW if the original constant was not. These conversions
9565 have implementation defined behavior and retaining the TREE_OVERFLOW
9566 flag here would confuse later passes such as VRP. */
9567 tree
9568 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9569 tree type, tree op0)
9571 tree res = fold_unary_loc (loc, code, type, op0);
9572 if (res
9573 && TREE_CODE (res) == INTEGER_CST
9574 && TREE_CODE (op0) == INTEGER_CST
9575 && CONVERT_EXPR_CODE_P (code))
9576 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9578 return res;
9581 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9582 operands OP0 and OP1. LOC is the location of the resulting expression.
9583 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9584 Return the folded expression if folding is successful. Otherwise,
9585 return NULL_TREE. */
9586 static tree
9587 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9588 tree arg0, tree arg1, tree op0, tree op1)
9590 tree tem;
9592 /* We only do these simplifications if we are optimizing. */
9593 if (!optimize)
9594 return NULL_TREE;
9596 /* Check for things like (A || B) && (A || C). We can convert this
9597 to A || (B && C). Note that either operator can be any of the four
9598 truth and/or operations and the transformation will still be
9599 valid. Also note that we only care about order for the
9600 ANDIF and ORIF operators. If B contains side effects, this
9601 might change the truth-value of A. */
9602 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9603 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9604 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9605 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9606 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9607 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9609 tree a00 = TREE_OPERAND (arg0, 0);
9610 tree a01 = TREE_OPERAND (arg0, 1);
9611 tree a10 = TREE_OPERAND (arg1, 0);
9612 tree a11 = TREE_OPERAND (arg1, 1);
9613 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9614 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9615 && (code == TRUTH_AND_EXPR
9616 || code == TRUTH_OR_EXPR));
9618 if (operand_equal_p (a00, a10, 0))
9619 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9620 fold_build2_loc (loc, code, type, a01, a11));
9621 else if (commutative && operand_equal_p (a00, a11, 0))
9622 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9623 fold_build2_loc (loc, code, type, a01, a10));
9624 else if (commutative && operand_equal_p (a01, a10, 0))
9625 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9626 fold_build2_loc (loc, code, type, a00, a11));
9628 /* This case if tricky because we must either have commutative
9629 operators or else A10 must not have side-effects. */
9631 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9632 && operand_equal_p (a01, a11, 0))
9633 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9634 fold_build2_loc (loc, code, type, a00, a10),
9635 a01);
9638 /* See if we can build a range comparison. */
9639 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9640 return tem;
9642 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9643 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9645 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9646 if (tem)
9647 return fold_build2_loc (loc, code, type, tem, arg1);
9650 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9651 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9653 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9654 if (tem)
9655 return fold_build2_loc (loc, code, type, arg0, tem);
9658 /* Check for the possibility of merging component references. If our
9659 lhs is another similar operation, try to merge its rhs with our
9660 rhs. Then try to merge our lhs and rhs. */
9661 if (TREE_CODE (arg0) == code
9662 && (tem = fold_truth_andor_1 (loc, code, type,
9663 TREE_OPERAND (arg0, 1), arg1)) != 0)
9664 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9666 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9667 return tem;
9669 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9670 if (param_logical_op_non_short_circuit != -1)
9671 logical_op_non_short_circuit
9672 = param_logical_op_non_short_circuit;
9673 if (logical_op_non_short_circuit
9674 && !sanitize_coverage_p ()
9675 && (code == TRUTH_AND_EXPR
9676 || code == TRUTH_ANDIF_EXPR
9677 || code == TRUTH_OR_EXPR
9678 || code == TRUTH_ORIF_EXPR))
9680 enum tree_code ncode, icode;
9682 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9683 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9684 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9686 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9687 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9688 We don't want to pack more than two leafs to a non-IF AND/OR
9689 expression.
9690 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9691 equal to IF-CODE, then we don't want to add right-hand operand.
9692 If the inner right-hand side of left-hand operand has
9693 side-effects, or isn't simple, then we can't add to it,
9694 as otherwise we might destroy if-sequence. */
9695 if (TREE_CODE (arg0) == icode
9696 && simple_operand_p_2 (arg1)
9697 /* Needed for sequence points to handle trappings, and
9698 side-effects. */
9699 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
9701 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9702 arg1);
9703 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9704 tem);
9706 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9707 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9708 else if (TREE_CODE (arg1) == icode
9709 && simple_operand_p_2 (arg0)
9710 /* Needed for sequence points to handle trappings, and
9711 side-effects. */
9712 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
9714 tem = fold_build2_loc (loc, ncode, type,
9715 arg0, TREE_OPERAND (arg1, 0));
9716 return fold_build2_loc (loc, icode, type, tem,
9717 TREE_OPERAND (arg1, 1));
9719 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9720 into (A OR B).
9721 For sequence point consistancy, we need to check for trapping,
9722 and side-effects. */
9723 else if (code == icode && simple_operand_p_2 (arg0)
9724 && simple_operand_p_2 (arg1))
9725 return fold_build2_loc (loc, ncode, type, arg0, arg1);
9728 return NULL_TREE;
9731 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9732 by changing CODE to reduce the magnitude of constants involved in
9733 ARG0 of the comparison.
9734 Returns a canonicalized comparison tree if a simplification was
9735 possible, otherwise returns NULL_TREE.
9736 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9737 valid if signed overflow is undefined. */
9739 static tree
9740 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9741 tree arg0, tree arg1,
9742 bool *strict_overflow_p)
9744 enum tree_code code0 = TREE_CODE (arg0);
9745 tree t, cst0 = NULL_TREE;
9746 int sgn0;
9748 /* Match A +- CST code arg1. We can change this only if overflow
9749 is undefined. */
9750 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9751 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9752 /* In principle pointers also have undefined overflow behavior,
9753 but that causes problems elsewhere. */
9754 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9755 && (code0 == MINUS_EXPR
9756 || code0 == PLUS_EXPR)
9757 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9758 return NULL_TREE;
9760 /* Identify the constant in arg0 and its sign. */
9761 cst0 = TREE_OPERAND (arg0, 1);
9762 sgn0 = tree_int_cst_sgn (cst0);
9764 /* Overflowed constants and zero will cause problems. */
9765 if (integer_zerop (cst0)
9766 || TREE_OVERFLOW (cst0))
9767 return NULL_TREE;
9769 /* See if we can reduce the magnitude of the constant in
9770 arg0 by changing the comparison code. */
9771 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9772 if (code == LT_EXPR
9773 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9774 code = LE_EXPR;
9775 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9776 else if (code == GT_EXPR
9777 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9778 code = GE_EXPR;
9779 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9780 else if (code == LE_EXPR
9781 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9782 code = LT_EXPR;
9783 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9784 else if (code == GE_EXPR
9785 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9786 code = GT_EXPR;
9787 else
9788 return NULL_TREE;
9789 *strict_overflow_p = true;
9791 /* Now build the constant reduced in magnitude. But not if that
9792 would produce one outside of its types range. */
9793 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9794 && ((sgn0 == 1
9795 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9796 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9797 || (sgn0 == -1
9798 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9799 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9800 return NULL_TREE;
9802 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9803 cst0, build_int_cst (TREE_TYPE (cst0), 1));
9804 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9805 t = fold_convert (TREE_TYPE (arg1), t);
9807 return fold_build2_loc (loc, code, type, t, arg1);
9810 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9811 overflow further. Try to decrease the magnitude of constants involved
9812 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9813 and put sole constants at the second argument position.
9814 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9816 static tree
9817 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9818 tree arg0, tree arg1)
9820 tree t;
9821 bool strict_overflow_p;
9822 const char * const warnmsg = G_("assuming signed overflow does not occur "
9823 "when reducing constant in comparison");
9825 /* Try canonicalization by simplifying arg0. */
9826 strict_overflow_p = false;
9827 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9828 &strict_overflow_p);
9829 if (t)
9831 if (strict_overflow_p)
9832 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9833 return t;
9836 /* Try canonicalization by simplifying arg1 using the swapped
9837 comparison. */
9838 code = swap_tree_comparison (code);
9839 strict_overflow_p = false;
9840 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9841 &strict_overflow_p);
9842 if (t && strict_overflow_p)
9843 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9844 return t;
9847 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9848 space. This is used to avoid issuing overflow warnings for
9849 expressions like &p->x which cannot wrap. */
9851 static bool
9852 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9854 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9855 return true;
9857 if (maybe_lt (bitpos, 0))
9858 return true;
9860 poly_wide_int wi_offset;
9861 int precision = TYPE_PRECISION (TREE_TYPE (base));
9862 if (offset == NULL_TREE)
9863 wi_offset = wi::zero (precision);
9864 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9865 return true;
9866 else
9867 wi_offset = wi::to_poly_wide (offset);
9869 wi::overflow_type overflow;
9870 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9871 precision);
9872 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9873 if (overflow)
9874 return true;
9876 poly_uint64 total_hwi, size;
9877 if (!total.to_uhwi (&total_hwi)
9878 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
9879 &size)
9880 || known_eq (size, 0U))
9881 return true;
9883 if (known_le (total_hwi, size))
9884 return false;
9886 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9887 array. */
9888 if (TREE_CODE (base) == ADDR_EXPR
9889 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
9890 &size)
9891 && maybe_ne (size, 0U)
9892 && known_le (total_hwi, size))
9893 return false;
9895 return true;
9898 /* Return a positive integer when the symbol DECL is known to have
9899 a nonzero address, zero when it's known not to (e.g., it's a weak
9900 symbol), and a negative integer when the symbol is not yet in the
9901 symbol table and so whether or not its address is zero is unknown.
9902 For function local objects always return positive integer. */
9903 static int
9904 maybe_nonzero_address (tree decl)
9906 if (DECL_P (decl) && decl_in_symtab_p (decl))
9907 if (struct symtab_node *symbol = symtab_node::get_create (decl))
9908 return symbol->nonzero_address ();
9910 /* Function local objects are never NULL. */
9911 if (DECL_P (decl)
9912 && (DECL_CONTEXT (decl)
9913 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
9914 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
9915 return 1;
9917 return -1;
9920 /* Subroutine of fold_binary. This routine performs all of the
9921 transformations that are common to the equality/inequality
9922 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9923 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9924 fold_binary should call fold_binary. Fold a comparison with
9925 tree code CODE and type TYPE with operands OP0 and OP1. Return
9926 the folded comparison or NULL_TREE. */
9928 static tree
9929 fold_comparison (location_t loc, enum tree_code code, tree type,
9930 tree op0, tree op1)
9932 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
9933 tree arg0, arg1, tem;
9935 arg0 = op0;
9936 arg1 = op1;
9938 STRIP_SIGN_NOPS (arg0);
9939 STRIP_SIGN_NOPS (arg1);
9941 /* For comparisons of pointers we can decompose it to a compile time
9942 comparison of the base objects and the offsets into the object.
9943 This requires at least one operand being an ADDR_EXPR or a
9944 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9945 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9946 && (TREE_CODE (arg0) == ADDR_EXPR
9947 || TREE_CODE (arg1) == ADDR_EXPR
9948 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9949 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9951 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9952 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
9953 machine_mode mode;
9954 int volatilep, reversep, unsignedp;
9955 bool indirect_base0 = false, indirect_base1 = false;
9957 /* Get base and offset for the access. Strip ADDR_EXPR for
9958 get_inner_reference, but put it back by stripping INDIRECT_REF
9959 off the base object if possible. indirect_baseN will be true
9960 if baseN is not an address but refers to the object itself. */
9961 base0 = arg0;
9962 if (TREE_CODE (arg0) == ADDR_EXPR)
9964 base0
9965 = get_inner_reference (TREE_OPERAND (arg0, 0),
9966 &bitsize, &bitpos0, &offset0, &mode,
9967 &unsignedp, &reversep, &volatilep);
9968 if (TREE_CODE (base0) == INDIRECT_REF)
9969 base0 = TREE_OPERAND (base0, 0);
9970 else
9971 indirect_base0 = true;
9973 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9975 base0 = TREE_OPERAND (arg0, 0);
9976 STRIP_SIGN_NOPS (base0);
9977 if (TREE_CODE (base0) == ADDR_EXPR)
9979 base0
9980 = get_inner_reference (TREE_OPERAND (base0, 0),
9981 &bitsize, &bitpos0, &offset0, &mode,
9982 &unsignedp, &reversep, &volatilep);
9983 if (TREE_CODE (base0) == INDIRECT_REF)
9984 base0 = TREE_OPERAND (base0, 0);
9985 else
9986 indirect_base0 = true;
9988 if (offset0 == NULL_TREE || integer_zerop (offset0))
9989 offset0 = TREE_OPERAND (arg0, 1);
9990 else
9991 offset0 = size_binop (PLUS_EXPR, offset0,
9992 TREE_OPERAND (arg0, 1));
9993 if (poly_int_tree_p (offset0))
9995 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
9996 TYPE_PRECISION (sizetype));
9997 tem <<= LOG2_BITS_PER_UNIT;
9998 tem += bitpos0;
9999 if (tem.to_shwi (&bitpos0))
10000 offset0 = NULL_TREE;
10004 base1 = arg1;
10005 if (TREE_CODE (arg1) == ADDR_EXPR)
10007 base1
10008 = get_inner_reference (TREE_OPERAND (arg1, 0),
10009 &bitsize, &bitpos1, &offset1, &mode,
10010 &unsignedp, &reversep, &volatilep);
10011 if (TREE_CODE (base1) == INDIRECT_REF)
10012 base1 = TREE_OPERAND (base1, 0);
10013 else
10014 indirect_base1 = true;
10016 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10018 base1 = TREE_OPERAND (arg1, 0);
10019 STRIP_SIGN_NOPS (base1);
10020 if (TREE_CODE (base1) == ADDR_EXPR)
10022 base1
10023 = get_inner_reference (TREE_OPERAND (base1, 0),
10024 &bitsize, &bitpos1, &offset1, &mode,
10025 &unsignedp, &reversep, &volatilep);
10026 if (TREE_CODE (base1) == INDIRECT_REF)
10027 base1 = TREE_OPERAND (base1, 0);
10028 else
10029 indirect_base1 = true;
10031 if (offset1 == NULL_TREE || integer_zerop (offset1))
10032 offset1 = TREE_OPERAND (arg1, 1);
10033 else
10034 offset1 = size_binop (PLUS_EXPR, offset1,
10035 TREE_OPERAND (arg1, 1));
10036 if (poly_int_tree_p (offset1))
10038 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
10039 TYPE_PRECISION (sizetype));
10040 tem <<= LOG2_BITS_PER_UNIT;
10041 tem += bitpos1;
10042 if (tem.to_shwi (&bitpos1))
10043 offset1 = NULL_TREE;
10047 /* If we have equivalent bases we might be able to simplify. */
10048 if (indirect_base0 == indirect_base1
10049 && operand_equal_p (base0, base1,
10050 indirect_base0 ? OEP_ADDRESS_OF : 0))
10052 /* We can fold this expression to a constant if the non-constant
10053 offset parts are equal. */
10054 if ((offset0 == offset1
10055 || (offset0 && offset1
10056 && operand_equal_p (offset0, offset1, 0)))
10057 && (equality_code
10058 || (indirect_base0
10059 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10060 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10062 if (!equality_code
10063 && maybe_ne (bitpos0, bitpos1)
10064 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10065 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10066 fold_overflow_warning (("assuming pointer wraparound does not "
10067 "occur when comparing P +- C1 with "
10068 "P +- C2"),
10069 WARN_STRICT_OVERFLOW_CONDITIONAL);
10071 switch (code)
10073 case EQ_EXPR:
10074 if (known_eq (bitpos0, bitpos1))
10075 return constant_boolean_node (true, type);
10076 if (known_ne (bitpos0, bitpos1))
10077 return constant_boolean_node (false, type);
10078 break;
10079 case NE_EXPR:
10080 if (known_ne (bitpos0, bitpos1))
10081 return constant_boolean_node (true, type);
10082 if (known_eq (bitpos0, bitpos1))
10083 return constant_boolean_node (false, type);
10084 break;
10085 case LT_EXPR:
10086 if (known_lt (bitpos0, bitpos1))
10087 return constant_boolean_node (true, type);
10088 if (known_ge (bitpos0, bitpos1))
10089 return constant_boolean_node (false, type);
10090 break;
10091 case LE_EXPR:
10092 if (known_le (bitpos0, bitpos1))
10093 return constant_boolean_node (true, type);
10094 if (known_gt (bitpos0, bitpos1))
10095 return constant_boolean_node (false, type);
10096 break;
10097 case GE_EXPR:
10098 if (known_ge (bitpos0, bitpos1))
10099 return constant_boolean_node (true, type);
10100 if (known_lt (bitpos0, bitpos1))
10101 return constant_boolean_node (false, type);
10102 break;
10103 case GT_EXPR:
10104 if (known_gt (bitpos0, bitpos1))
10105 return constant_boolean_node (true, type);
10106 if (known_le (bitpos0, bitpos1))
10107 return constant_boolean_node (false, type);
10108 break;
10109 default:;
10112 /* We can simplify the comparison to a comparison of the variable
10113 offset parts if the constant offset parts are equal.
10114 Be careful to use signed sizetype here because otherwise we
10115 mess with array offsets in the wrong way. This is possible
10116 because pointer arithmetic is restricted to retain within an
10117 object and overflow on pointer differences is undefined as of
10118 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10119 else if (known_eq (bitpos0, bitpos1)
10120 && (equality_code
10121 || (indirect_base0
10122 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10123 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10125 /* By converting to signed sizetype we cover middle-end pointer
10126 arithmetic which operates on unsigned pointer types of size
10127 type size and ARRAY_REF offsets which are properly sign or
10128 zero extended from their type in case it is narrower than
10129 sizetype. */
10130 if (offset0 == NULL_TREE)
10131 offset0 = build_int_cst (ssizetype, 0);
10132 else
10133 offset0 = fold_convert_loc (loc, ssizetype, offset0);
10134 if (offset1 == NULL_TREE)
10135 offset1 = build_int_cst (ssizetype, 0);
10136 else
10137 offset1 = fold_convert_loc (loc, ssizetype, offset1);
10139 if (!equality_code
10140 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10141 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10142 fold_overflow_warning (("assuming pointer wraparound does not "
10143 "occur when comparing P +- C1 with "
10144 "P +- C2"),
10145 WARN_STRICT_OVERFLOW_COMPARISON);
10147 return fold_build2_loc (loc, code, type, offset0, offset1);
10150 /* For equal offsets we can simplify to a comparison of the
10151 base addresses. */
10152 else if (known_eq (bitpos0, bitpos1)
10153 && (indirect_base0
10154 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
10155 && (indirect_base1
10156 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
10157 && ((offset0 == offset1)
10158 || (offset0 && offset1
10159 && operand_equal_p (offset0, offset1, 0))))
10161 if (indirect_base0)
10162 base0 = build_fold_addr_expr_loc (loc, base0);
10163 if (indirect_base1)
10164 base1 = build_fold_addr_expr_loc (loc, base1);
10165 return fold_build2_loc (loc, code, type, base0, base1);
10167 /* Comparison between an ordinary (non-weak) symbol and a null
10168 pointer can be eliminated since such symbols must have a non
10169 null address. In C, relational expressions between pointers
10170 to objects and null pointers are undefined. The results
10171 below follow the C++ rules with the additional property that
10172 every object pointer compares greater than a null pointer.
10174 else if (((DECL_P (base0)
10175 && maybe_nonzero_address (base0) > 0
10176 /* Avoid folding references to struct members at offset 0 to
10177 prevent tests like '&ptr->firstmember == 0' from getting
10178 eliminated. When ptr is null, although the -> expression
10179 is strictly speaking invalid, GCC retains it as a matter
10180 of QoI. See PR c/44555. */
10181 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
10182 || CONSTANT_CLASS_P (base0))
10183 && indirect_base0
10184 /* The caller guarantees that when one of the arguments is
10185 constant (i.e., null in this case) it is second. */
10186 && integer_zerop (arg1))
10188 switch (code)
10190 case EQ_EXPR:
10191 case LE_EXPR:
10192 case LT_EXPR:
10193 return constant_boolean_node (false, type);
10194 case GE_EXPR:
10195 case GT_EXPR:
10196 case NE_EXPR:
10197 return constant_boolean_node (true, type);
10198 default:
10199 gcc_unreachable ();
10204 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10205 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10206 the resulting offset is smaller in absolute value than the
10207 original one and has the same sign. */
10208 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10209 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
10210 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10211 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10212 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
10213 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
10214 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10215 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
10217 tree const1 = TREE_OPERAND (arg0, 1);
10218 tree const2 = TREE_OPERAND (arg1, 1);
10219 tree variable1 = TREE_OPERAND (arg0, 0);
10220 tree variable2 = TREE_OPERAND (arg1, 0);
10221 tree cst;
10222 const char * const warnmsg = G_("assuming signed overflow does not "
10223 "occur when combining constants around "
10224 "a comparison");
10226 /* Put the constant on the side where it doesn't overflow and is
10227 of lower absolute value and of same sign than before. */
10228 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10229 ? MINUS_EXPR : PLUS_EXPR,
10230 const2, const1);
10231 if (!TREE_OVERFLOW (cst)
10232 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
10233 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
10235 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10236 return fold_build2_loc (loc, code, type,
10237 variable1,
10238 fold_build2_loc (loc, TREE_CODE (arg1),
10239 TREE_TYPE (arg1),
10240 variable2, cst));
10243 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10244 ? MINUS_EXPR : PLUS_EXPR,
10245 const1, const2);
10246 if (!TREE_OVERFLOW (cst)
10247 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
10248 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
10250 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10251 return fold_build2_loc (loc, code, type,
10252 fold_build2_loc (loc, TREE_CODE (arg0),
10253 TREE_TYPE (arg0),
10254 variable1, cst),
10255 variable2);
10259 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
10260 if (tem)
10261 return tem;
10263 /* If we are comparing an expression that just has comparisons
10264 of two integer values, arithmetic expressions of those comparisons,
10265 and constants, we can simplify it. There are only three cases
10266 to check: the two values can either be equal, the first can be
10267 greater, or the second can be greater. Fold the expression for
10268 those three values. Since each value must be 0 or 1, we have
10269 eight possibilities, each of which corresponds to the constant 0
10270 or 1 or one of the six possible comparisons.
10272 This handles common cases like (a > b) == 0 but also handles
10273 expressions like ((x > y) - (y > x)) > 0, which supposedly
10274 occur in macroized code. */
10276 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
10278 tree cval1 = 0, cval2 = 0;
10280 if (twoval_comparison_p (arg0, &cval1, &cval2)
10281 /* Don't handle degenerate cases here; they should already
10282 have been handled anyway. */
10283 && cval1 != 0 && cval2 != 0
10284 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
10285 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
10286 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
10287 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
10288 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
10289 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
10290 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
10292 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
10293 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
10295 /* We can't just pass T to eval_subst in case cval1 or cval2
10296 was the same as ARG1. */
10298 tree high_result
10299 = fold_build2_loc (loc, code, type,
10300 eval_subst (loc, arg0, cval1, maxval,
10301 cval2, minval),
10302 arg1);
10303 tree equal_result
10304 = fold_build2_loc (loc, code, type,
10305 eval_subst (loc, arg0, cval1, maxval,
10306 cval2, maxval),
10307 arg1);
10308 tree low_result
10309 = fold_build2_loc (loc, code, type,
10310 eval_subst (loc, arg0, cval1, minval,
10311 cval2, maxval),
10312 arg1);
10314 /* All three of these results should be 0 or 1. Confirm they are.
10315 Then use those values to select the proper code to use. */
10317 if (TREE_CODE (high_result) == INTEGER_CST
10318 && TREE_CODE (equal_result) == INTEGER_CST
10319 && TREE_CODE (low_result) == INTEGER_CST)
10321 /* Make a 3-bit mask with the high-order bit being the
10322 value for `>', the next for '=', and the low for '<'. */
10323 switch ((integer_onep (high_result) * 4)
10324 + (integer_onep (equal_result) * 2)
10325 + integer_onep (low_result))
10327 case 0:
10328 /* Always false. */
10329 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10330 case 1:
10331 code = LT_EXPR;
10332 break;
10333 case 2:
10334 code = EQ_EXPR;
10335 break;
10336 case 3:
10337 code = LE_EXPR;
10338 break;
10339 case 4:
10340 code = GT_EXPR;
10341 break;
10342 case 5:
10343 code = NE_EXPR;
10344 break;
10345 case 6:
10346 code = GE_EXPR;
10347 break;
10348 case 7:
10349 /* Always true. */
10350 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10353 return fold_build2_loc (loc, code, type, cval1, cval2);
10358 return NULL_TREE;
10362 /* Subroutine of fold_binary. Optimize complex multiplications of the
10363 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10364 argument EXPR represents the expression "z" of type TYPE. */
10366 static tree
10367 fold_mult_zconjz (location_t loc, tree type, tree expr)
10369 tree itype = TREE_TYPE (type);
10370 tree rpart, ipart, tem;
10372 if (TREE_CODE (expr) == COMPLEX_EXPR)
10374 rpart = TREE_OPERAND (expr, 0);
10375 ipart = TREE_OPERAND (expr, 1);
10377 else if (TREE_CODE (expr) == COMPLEX_CST)
10379 rpart = TREE_REALPART (expr);
10380 ipart = TREE_IMAGPART (expr);
10382 else
10384 expr = save_expr (expr);
10385 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
10386 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
10389 rpart = save_expr (rpart);
10390 ipart = save_expr (ipart);
10391 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
10392 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
10393 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
10394 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
10395 build_zero_cst (itype));
10399 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10400 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10401 true if successful. */
10403 static bool
10404 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
10406 unsigned HOST_WIDE_INT i, nunits;
10408 if (TREE_CODE (arg) == VECTOR_CST
10409 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
10411 for (i = 0; i < nunits; ++i)
10412 elts[i] = VECTOR_CST_ELT (arg, i);
10414 else if (TREE_CODE (arg) == CONSTRUCTOR)
10416 constructor_elt *elt;
10418 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
10419 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
10420 return false;
10421 else
10422 elts[i] = elt->value;
10424 else
10425 return false;
10426 for (; i < nelts; i++)
10427 elts[i]
10428 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
10429 return true;
10432 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10433 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10434 NULL_TREE otherwise. */
10436 tree
10437 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
10439 unsigned int i;
10440 unsigned HOST_WIDE_INT nelts;
10441 bool need_ctor = false;
10443 if (!sel.length ().is_constant (&nelts))
10444 return NULL_TREE;
10445 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
10446 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
10447 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
10448 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
10449 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10450 return NULL_TREE;
10452 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10453 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
10454 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
10455 return NULL_TREE;
10457 tree_vector_builder out_elts (type, nelts, 1);
10458 for (i = 0; i < nelts; i++)
10460 HOST_WIDE_INT index;
10461 if (!sel[i].is_constant (&index))
10462 return NULL_TREE;
10463 if (!CONSTANT_CLASS_P (in_elts[index]))
10464 need_ctor = true;
10465 out_elts.quick_push (unshare_expr (in_elts[index]));
10468 if (need_ctor)
10470 vec<constructor_elt, va_gc> *v;
10471 vec_alloc (v, nelts);
10472 for (i = 0; i < nelts; i++)
10473 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
10474 return build_constructor (type, v);
10476 else
10477 return out_elts.build ();
10480 /* Try to fold a pointer difference of type TYPE two address expressions of
10481 array references AREF0 and AREF1 using location LOC. Return a
10482 simplified expression for the difference or NULL_TREE. */
10484 static tree
10485 fold_addr_of_array_ref_difference (location_t loc, tree type,
10486 tree aref0, tree aref1,
10487 bool use_pointer_diff)
10489 tree base0 = TREE_OPERAND (aref0, 0);
10490 tree base1 = TREE_OPERAND (aref1, 0);
10491 tree base_offset = build_int_cst (type, 0);
10493 /* If the bases are array references as well, recurse. If the bases
10494 are pointer indirections compute the difference of the pointers.
10495 If the bases are equal, we are set. */
10496 if ((TREE_CODE (base0) == ARRAY_REF
10497 && TREE_CODE (base1) == ARRAY_REF
10498 && (base_offset
10499 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
10500 use_pointer_diff)))
10501 || (INDIRECT_REF_P (base0)
10502 && INDIRECT_REF_P (base1)
10503 && (base_offset
10504 = use_pointer_diff
10505 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
10506 TREE_OPERAND (base0, 0),
10507 TREE_OPERAND (base1, 0))
10508 : fold_binary_loc (loc, MINUS_EXPR, type,
10509 fold_convert (type,
10510 TREE_OPERAND (base0, 0)),
10511 fold_convert (type,
10512 TREE_OPERAND (base1, 0)))))
10513 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
10515 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10516 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10517 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
10518 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
10519 return fold_build2_loc (loc, PLUS_EXPR, type,
10520 base_offset,
10521 fold_build2_loc (loc, MULT_EXPR, type,
10522 diff, esz));
10524 return NULL_TREE;
10527 /* If the real or vector real constant CST of type TYPE has an exact
10528 inverse, return it, else return NULL. */
10530 tree
10531 exact_inverse (tree type, tree cst)
10533 REAL_VALUE_TYPE r;
10534 tree unit_type;
10535 machine_mode mode;
10537 switch (TREE_CODE (cst))
10539 case REAL_CST:
10540 r = TREE_REAL_CST (cst);
10542 if (exact_real_inverse (TYPE_MODE (type), &r))
10543 return build_real (type, r);
10545 return NULL_TREE;
10547 case VECTOR_CST:
10549 unit_type = TREE_TYPE (type);
10550 mode = TYPE_MODE (unit_type);
10552 tree_vector_builder elts;
10553 if (!elts.new_unary_operation (type, cst, false))
10554 return NULL_TREE;
10555 unsigned int count = elts.encoded_nelts ();
10556 for (unsigned int i = 0; i < count; ++i)
10558 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10559 if (!exact_real_inverse (mode, &r))
10560 return NULL_TREE;
10561 elts.quick_push (build_real (unit_type, r));
10564 return elts.build ();
10567 default:
10568 return NULL_TREE;
10572 /* Mask out the tz least significant bits of X of type TYPE where
10573 tz is the number of trailing zeroes in Y. */
10574 static wide_int
10575 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10577 int tz = wi::ctz (y);
10578 if (tz > 0)
10579 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10580 return x;
10583 /* Return true when T is an address and is known to be nonzero.
10584 For floating point we further ensure that T is not denormal.
10585 Similar logic is present in nonzero_address in rtlanal.h.
10587 If the return value is based on the assumption that signed overflow
10588 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10589 change *STRICT_OVERFLOW_P. */
10591 static bool
10592 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10594 tree type = TREE_TYPE (t);
10595 enum tree_code code;
10597 /* Doing something useful for floating point would need more work. */
10598 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10599 return false;
10601 code = TREE_CODE (t);
10602 switch (TREE_CODE_CLASS (code))
10604 case tcc_unary:
10605 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10606 strict_overflow_p);
10607 case tcc_binary:
10608 case tcc_comparison:
10609 return tree_binary_nonzero_warnv_p (code, type,
10610 TREE_OPERAND (t, 0),
10611 TREE_OPERAND (t, 1),
10612 strict_overflow_p);
10613 case tcc_constant:
10614 case tcc_declaration:
10615 case tcc_reference:
10616 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10618 default:
10619 break;
10622 switch (code)
10624 case TRUTH_NOT_EXPR:
10625 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10626 strict_overflow_p);
10628 case TRUTH_AND_EXPR:
10629 case TRUTH_OR_EXPR:
10630 case TRUTH_XOR_EXPR:
10631 return tree_binary_nonzero_warnv_p (code, type,
10632 TREE_OPERAND (t, 0),
10633 TREE_OPERAND (t, 1),
10634 strict_overflow_p);
10636 case COND_EXPR:
10637 case CONSTRUCTOR:
10638 case OBJ_TYPE_REF:
10639 case ASSERT_EXPR:
10640 case ADDR_EXPR:
10641 case WITH_SIZE_EXPR:
10642 case SSA_NAME:
10643 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10645 case COMPOUND_EXPR:
10646 case MODIFY_EXPR:
10647 case BIND_EXPR:
10648 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10649 strict_overflow_p);
10651 case SAVE_EXPR:
10652 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10653 strict_overflow_p);
10655 case CALL_EXPR:
10657 tree fndecl = get_callee_fndecl (t);
10658 if (!fndecl) return false;
10659 if (flag_delete_null_pointer_checks && !flag_check_new
10660 && DECL_IS_OPERATOR_NEW_P (fndecl)
10661 && !TREE_NOTHROW (fndecl))
10662 return true;
10663 if (flag_delete_null_pointer_checks
10664 && lookup_attribute ("returns_nonnull",
10665 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10666 return true;
10667 return alloca_call_p (t);
10670 default:
10671 break;
10673 return false;
10676 /* Return true when T is an address and is known to be nonzero.
10677 Handle warnings about undefined signed overflow. */
10679 bool
10680 tree_expr_nonzero_p (tree t)
10682 bool ret, strict_overflow_p;
10684 strict_overflow_p = false;
10685 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10686 if (strict_overflow_p)
10687 fold_overflow_warning (("assuming signed overflow does not occur when "
10688 "determining that expression is always "
10689 "non-zero"),
10690 WARN_STRICT_OVERFLOW_MISC);
10691 return ret;
10694 /* Return true if T is known not to be equal to an integer W. */
10696 bool
10697 expr_not_equal_to (tree t, const wide_int &w)
10699 value_range vr;
10700 switch (TREE_CODE (t))
10702 case INTEGER_CST:
10703 return wi::to_wide (t) != w;
10705 case SSA_NAME:
10706 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10707 return false;
10709 if (cfun)
10710 get_range_query (cfun)->range_of_expr (vr, t);
10711 else
10712 get_global_range_query ()->range_of_expr (vr, t);
10714 if (!vr.undefined_p ()
10715 && !vr.contains_p (wide_int_to_tree (TREE_TYPE (t), w)))
10716 return true;
10717 /* If T has some known zero bits and W has any of those bits set,
10718 then T is known not to be equal to W. */
10719 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10720 TYPE_PRECISION (TREE_TYPE (t))), 0))
10721 return true;
10722 return false;
10724 default:
10725 return false;
10729 /* Fold a binary expression of code CODE and type TYPE with operands
10730 OP0 and OP1. LOC is the location of the resulting expression.
10731 Return the folded expression if folding is successful. Otherwise,
10732 return NULL_TREE. */
10734 tree
10735 fold_binary_loc (location_t loc, enum tree_code code, tree type,
10736 tree op0, tree op1)
10738 enum tree_code_class kind = TREE_CODE_CLASS (code);
10739 tree arg0, arg1, tem;
10740 tree t1 = NULL_TREE;
10741 bool strict_overflow_p;
10742 unsigned int prec;
10744 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10745 && TREE_CODE_LENGTH (code) == 2
10746 && op0 != NULL_TREE
10747 && op1 != NULL_TREE);
10749 arg0 = op0;
10750 arg1 = op1;
10752 /* Strip any conversions that don't change the mode. This is
10753 safe for every expression, except for a comparison expression
10754 because its signedness is derived from its operands. So, in
10755 the latter case, only strip conversions that don't change the
10756 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10757 preserved.
10759 Note that this is done as an internal manipulation within the
10760 constant folder, in order to find the simplest representation
10761 of the arguments so that their form can be studied. In any
10762 cases, the appropriate type conversions should be put back in
10763 the tree that will get out of the constant folder. */
10765 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10767 STRIP_SIGN_NOPS (arg0);
10768 STRIP_SIGN_NOPS (arg1);
10770 else
10772 STRIP_NOPS (arg0);
10773 STRIP_NOPS (arg1);
10776 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10777 constant but we can't do arithmetic on them. */
10778 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
10780 tem = const_binop (code, type, arg0, arg1);
10781 if (tem != NULL_TREE)
10783 if (TREE_TYPE (tem) != type)
10784 tem = fold_convert_loc (loc, type, tem);
10785 return tem;
10789 /* If this is a commutative operation, and ARG0 is a constant, move it
10790 to ARG1 to reduce the number of tests below. */
10791 if (commutative_tree_code (code)
10792 && tree_swap_operands_p (arg0, arg1))
10793 return fold_build2_loc (loc, code, type, op1, op0);
10795 /* Likewise if this is a comparison, and ARG0 is a constant, move it
10796 to ARG1 to reduce the number of tests below. */
10797 if (kind == tcc_comparison
10798 && tree_swap_operands_p (arg0, arg1))
10799 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
10801 tem = generic_simplify (loc, code, type, op0, op1);
10802 if (tem)
10803 return tem;
10805 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10807 First check for cases where an arithmetic operation is applied to a
10808 compound, conditional, or comparison operation. Push the arithmetic
10809 operation inside the compound or conditional to see if any folding
10810 can then be done. Convert comparison to conditional for this purpose.
10811 The also optimizes non-constant cases that used to be done in
10812 expand_expr.
10814 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10815 one of the operands is a comparison and the other is a comparison, a
10816 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10817 code below would make the expression more complex. Change it to a
10818 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10819 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10821 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10822 || code == EQ_EXPR || code == NE_EXPR)
10823 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
10824 && ((truth_value_p (TREE_CODE (arg0))
10825 && (truth_value_p (TREE_CODE (arg1))
10826 || (TREE_CODE (arg1) == BIT_AND_EXPR
10827 && integer_onep (TREE_OPERAND (arg1, 1)))))
10828 || (truth_value_p (TREE_CODE (arg1))
10829 && (truth_value_p (TREE_CODE (arg0))
10830 || (TREE_CODE (arg0) == BIT_AND_EXPR
10831 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10833 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10834 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10835 : TRUTH_XOR_EXPR,
10836 boolean_type_node,
10837 fold_convert_loc (loc, boolean_type_node, arg0),
10838 fold_convert_loc (loc, boolean_type_node, arg1));
10840 if (code == EQ_EXPR)
10841 tem = invert_truthvalue_loc (loc, tem);
10843 return fold_convert_loc (loc, type, tem);
10846 if (TREE_CODE_CLASS (code) == tcc_binary
10847 || TREE_CODE_CLASS (code) == tcc_comparison)
10849 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10851 tem = fold_build2_loc (loc, code, type,
10852 fold_convert_loc (loc, TREE_TYPE (op0),
10853 TREE_OPERAND (arg0, 1)), op1);
10854 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10855 tem);
10857 if (TREE_CODE (arg1) == COMPOUND_EXPR)
10859 tem = fold_build2_loc (loc, code, type, op0,
10860 fold_convert_loc (loc, TREE_TYPE (op1),
10861 TREE_OPERAND (arg1, 1)));
10862 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10863 tem);
10866 if (TREE_CODE (arg0) == COND_EXPR
10867 || TREE_CODE (arg0) == VEC_COND_EXPR
10868 || COMPARISON_CLASS_P (arg0))
10870 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10871 arg0, arg1,
10872 /*cond_first_p=*/1);
10873 if (tem != NULL_TREE)
10874 return tem;
10877 if (TREE_CODE (arg1) == COND_EXPR
10878 || TREE_CODE (arg1) == VEC_COND_EXPR
10879 || COMPARISON_CLASS_P (arg1))
10881 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10882 arg1, arg0,
10883 /*cond_first_p=*/0);
10884 if (tem != NULL_TREE)
10885 return tem;
10889 switch (code)
10891 case MEM_REF:
10892 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10893 if (TREE_CODE (arg0) == ADDR_EXPR
10894 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10896 tree iref = TREE_OPERAND (arg0, 0);
10897 return fold_build2 (MEM_REF, type,
10898 TREE_OPERAND (iref, 0),
10899 int_const_binop (PLUS_EXPR, arg1,
10900 TREE_OPERAND (iref, 1)));
10903 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10904 if (TREE_CODE (arg0) == ADDR_EXPR
10905 && handled_component_p (TREE_OPERAND (arg0, 0)))
10907 tree base;
10908 poly_int64 coffset;
10909 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10910 &coffset);
10911 if (!base)
10912 return NULL_TREE;
10913 return fold_build2 (MEM_REF, type,
10914 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
10915 int_const_binop (PLUS_EXPR, arg1,
10916 size_int (coffset)));
10919 return NULL_TREE;
10921 case POINTER_PLUS_EXPR:
10922 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10923 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10924 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10925 return fold_convert_loc (loc, type,
10926 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10927 fold_convert_loc (loc, sizetype,
10928 arg1),
10929 fold_convert_loc (loc, sizetype,
10930 arg0)));
10932 return NULL_TREE;
10934 case PLUS_EXPR:
10935 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10937 /* X + (X / CST) * -CST is X % CST. */
10938 if (TREE_CODE (arg1) == MULT_EXPR
10939 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10940 && operand_equal_p (arg0,
10941 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10943 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10944 tree cst1 = TREE_OPERAND (arg1, 1);
10945 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10946 cst1, cst0);
10947 if (sum && integer_zerop (sum))
10948 return fold_convert_loc (loc, type,
10949 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10950 TREE_TYPE (arg0), arg0,
10951 cst0));
10955 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10956 one. Make sure the type is not saturating and has the signedness of
10957 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10958 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10959 if ((TREE_CODE (arg0) == MULT_EXPR
10960 || TREE_CODE (arg1) == MULT_EXPR)
10961 && !TYPE_SATURATING (type)
10962 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10963 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10964 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10966 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10967 if (tem)
10968 return tem;
10971 if (! FLOAT_TYPE_P (type))
10973 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10974 (plus (plus (mult) (mult)) (foo)) so that we can
10975 take advantage of the factoring cases below. */
10976 if (ANY_INTEGRAL_TYPE_P (type)
10977 && TYPE_OVERFLOW_WRAPS (type)
10978 && (((TREE_CODE (arg0) == PLUS_EXPR
10979 || TREE_CODE (arg0) == MINUS_EXPR)
10980 && TREE_CODE (arg1) == MULT_EXPR)
10981 || ((TREE_CODE (arg1) == PLUS_EXPR
10982 || TREE_CODE (arg1) == MINUS_EXPR)
10983 && TREE_CODE (arg0) == MULT_EXPR)))
10985 tree parg0, parg1, parg, marg;
10986 enum tree_code pcode;
10988 if (TREE_CODE (arg1) == MULT_EXPR)
10989 parg = arg0, marg = arg1;
10990 else
10991 parg = arg1, marg = arg0;
10992 pcode = TREE_CODE (parg);
10993 parg0 = TREE_OPERAND (parg, 0);
10994 parg1 = TREE_OPERAND (parg, 1);
10995 STRIP_NOPS (parg0);
10996 STRIP_NOPS (parg1);
10998 if (TREE_CODE (parg0) == MULT_EXPR
10999 && TREE_CODE (parg1) != MULT_EXPR)
11000 return fold_build2_loc (loc, pcode, type,
11001 fold_build2_loc (loc, PLUS_EXPR, type,
11002 fold_convert_loc (loc, type,
11003 parg0),
11004 fold_convert_loc (loc, type,
11005 marg)),
11006 fold_convert_loc (loc, type, parg1));
11007 if (TREE_CODE (parg0) != MULT_EXPR
11008 && TREE_CODE (parg1) == MULT_EXPR)
11009 return
11010 fold_build2_loc (loc, PLUS_EXPR, type,
11011 fold_convert_loc (loc, type, parg0),
11012 fold_build2_loc (loc, pcode, type,
11013 fold_convert_loc (loc, type, marg),
11014 fold_convert_loc (loc, type,
11015 parg1)));
11018 else
11020 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
11021 to __complex__ ( x, y ). This is not the same for SNaNs or
11022 if signed zeros are involved. */
11023 if (!HONOR_SNANS (arg0)
11024 && !HONOR_SIGNED_ZEROS (arg0)
11025 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11027 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11028 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11029 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11030 bool arg0rz = false, arg0iz = false;
11031 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11032 || (arg0i && (arg0iz = real_zerop (arg0i))))
11034 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11035 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11036 if (arg0rz && arg1i && real_zerop (arg1i))
11038 tree rp = arg1r ? arg1r
11039 : build1 (REALPART_EXPR, rtype, arg1);
11040 tree ip = arg0i ? arg0i
11041 : build1 (IMAGPART_EXPR, rtype, arg0);
11042 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11044 else if (arg0iz && arg1r && real_zerop (arg1r))
11046 tree rp = arg0r ? arg0r
11047 : build1 (REALPART_EXPR, rtype, arg0);
11048 tree ip = arg1i ? arg1i
11049 : build1 (IMAGPART_EXPR, rtype, arg1);
11050 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11055 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11056 We associate floats only if the user has specified
11057 -fassociative-math. */
11058 if (flag_associative_math
11059 && TREE_CODE (arg1) == PLUS_EXPR
11060 && TREE_CODE (arg0) != MULT_EXPR)
11062 tree tree10 = TREE_OPERAND (arg1, 0);
11063 tree tree11 = TREE_OPERAND (arg1, 1);
11064 if (TREE_CODE (tree11) == MULT_EXPR
11065 && TREE_CODE (tree10) == MULT_EXPR)
11067 tree tree0;
11068 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
11069 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
11072 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11073 We associate floats only if the user has specified
11074 -fassociative-math. */
11075 if (flag_associative_math
11076 && TREE_CODE (arg0) == PLUS_EXPR
11077 && TREE_CODE (arg1) != MULT_EXPR)
11079 tree tree00 = TREE_OPERAND (arg0, 0);
11080 tree tree01 = TREE_OPERAND (arg0, 1);
11081 if (TREE_CODE (tree01) == MULT_EXPR
11082 && TREE_CODE (tree00) == MULT_EXPR)
11084 tree tree0;
11085 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
11086 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
11091 bit_rotate:
11092 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11093 is a rotate of A by C1 bits. */
11094 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11095 is a rotate of A by B bits.
11096 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11097 though in this case CODE must be | and not + or ^, otherwise
11098 it doesn't return A when B is 0. */
11100 enum tree_code code0, code1;
11101 tree rtype;
11102 code0 = TREE_CODE (arg0);
11103 code1 = TREE_CODE (arg1);
11104 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
11105 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
11106 && operand_equal_p (TREE_OPERAND (arg0, 0),
11107 TREE_OPERAND (arg1, 0), 0)
11108 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
11109 TYPE_UNSIGNED (rtype))
11110 /* Only create rotates in complete modes. Other cases are not
11111 expanded properly. */
11112 && (element_precision (rtype)
11113 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
11115 tree tree01, tree11;
11116 tree orig_tree01, orig_tree11;
11117 enum tree_code code01, code11;
11119 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
11120 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
11121 STRIP_NOPS (tree01);
11122 STRIP_NOPS (tree11);
11123 code01 = TREE_CODE (tree01);
11124 code11 = TREE_CODE (tree11);
11125 if (code11 != MINUS_EXPR
11126 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
11128 std::swap (code0, code1);
11129 std::swap (code01, code11);
11130 std::swap (tree01, tree11);
11131 std::swap (orig_tree01, orig_tree11);
11133 if (code01 == INTEGER_CST
11134 && code11 == INTEGER_CST
11135 && (wi::to_widest (tree01) + wi::to_widest (tree11)
11136 == element_precision (rtype)))
11138 tem = build2_loc (loc, LROTATE_EXPR,
11139 rtype, TREE_OPERAND (arg0, 0),
11140 code0 == LSHIFT_EXPR
11141 ? orig_tree01 : orig_tree11);
11142 return fold_convert_loc (loc, type, tem);
11144 else if (code11 == MINUS_EXPR)
11146 tree tree110, tree111;
11147 tree110 = TREE_OPERAND (tree11, 0);
11148 tree111 = TREE_OPERAND (tree11, 1);
11149 STRIP_NOPS (tree110);
11150 STRIP_NOPS (tree111);
11151 if (TREE_CODE (tree110) == INTEGER_CST
11152 && compare_tree_int (tree110,
11153 element_precision (rtype)) == 0
11154 && operand_equal_p (tree01, tree111, 0))
11156 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11157 ? LROTATE_EXPR : RROTATE_EXPR),
11158 rtype, TREE_OPERAND (arg0, 0),
11159 orig_tree01);
11160 return fold_convert_loc (loc, type, tem);
11163 else if (code == BIT_IOR_EXPR
11164 && code11 == BIT_AND_EXPR
11165 && pow2p_hwi (element_precision (rtype)))
11167 tree tree110, tree111;
11168 tree110 = TREE_OPERAND (tree11, 0);
11169 tree111 = TREE_OPERAND (tree11, 1);
11170 STRIP_NOPS (tree110);
11171 STRIP_NOPS (tree111);
11172 if (TREE_CODE (tree110) == NEGATE_EXPR
11173 && TREE_CODE (tree111) == INTEGER_CST
11174 && compare_tree_int (tree111,
11175 element_precision (rtype) - 1) == 0
11176 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
11178 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11179 ? LROTATE_EXPR : RROTATE_EXPR),
11180 rtype, TREE_OPERAND (arg0, 0),
11181 orig_tree01);
11182 return fold_convert_loc (loc, type, tem);
11188 associate:
11189 /* In most languages, can't associate operations on floats through
11190 parentheses. Rather than remember where the parentheses were, we
11191 don't associate floats at all, unless the user has specified
11192 -fassociative-math.
11193 And, we need to make sure type is not saturating. */
11195 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
11196 && !TYPE_SATURATING (type))
11198 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
11199 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
11200 tree atype = type;
11201 bool ok = true;
11203 /* Split both trees into variables, constants, and literals. Then
11204 associate each group together, the constants with literals,
11205 then the result with variables. This increases the chances of
11206 literals being recombined later and of generating relocatable
11207 expressions for the sum of a constant and literal. */
11208 var0 = split_tree (arg0, type, code,
11209 &minus_var0, &con0, &minus_con0,
11210 &lit0, &minus_lit0, 0);
11211 var1 = split_tree (arg1, type, code,
11212 &minus_var1, &con1, &minus_con1,
11213 &lit1, &minus_lit1, code == MINUS_EXPR);
11215 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11216 if (code == MINUS_EXPR)
11217 code = PLUS_EXPR;
11219 /* With undefined overflow prefer doing association in a type
11220 which wraps on overflow, if that is one of the operand types. */
11221 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
11222 && !TYPE_OVERFLOW_WRAPS (type))
11224 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11225 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11226 atype = TREE_TYPE (arg0);
11227 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11228 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
11229 atype = TREE_TYPE (arg1);
11230 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
11233 /* With undefined overflow we can only associate constants with one
11234 variable, and constants whose association doesn't overflow. */
11235 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
11236 && !TYPE_OVERFLOW_WRAPS (atype))
11238 if ((var0 && var1) || (minus_var0 && minus_var1))
11240 /* ??? If split_tree would handle NEGATE_EXPR we could
11241 simply reject these cases and the allowed cases would
11242 be the var0/minus_var1 ones. */
11243 tree tmp0 = var0 ? var0 : minus_var0;
11244 tree tmp1 = var1 ? var1 : minus_var1;
11245 bool one_neg = false;
11247 if (TREE_CODE (tmp0) == NEGATE_EXPR)
11249 tmp0 = TREE_OPERAND (tmp0, 0);
11250 one_neg = !one_neg;
11252 if (CONVERT_EXPR_P (tmp0)
11253 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11254 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11255 <= TYPE_PRECISION (atype)))
11256 tmp0 = TREE_OPERAND (tmp0, 0);
11257 if (TREE_CODE (tmp1) == NEGATE_EXPR)
11259 tmp1 = TREE_OPERAND (tmp1, 0);
11260 one_neg = !one_neg;
11262 if (CONVERT_EXPR_P (tmp1)
11263 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11264 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11265 <= TYPE_PRECISION (atype)))
11266 tmp1 = TREE_OPERAND (tmp1, 0);
11267 /* The only case we can still associate with two variables
11268 is if they cancel out. */
11269 if (!one_neg
11270 || !operand_equal_p (tmp0, tmp1, 0))
11271 ok = false;
11273 else if ((var0 && minus_var1
11274 && ! operand_equal_p (var0, minus_var1, 0))
11275 || (minus_var0 && var1
11276 && ! operand_equal_p (minus_var0, var1, 0)))
11277 ok = false;
11280 /* Only do something if we found more than two objects. Otherwise,
11281 nothing has changed and we risk infinite recursion. */
11282 if (ok
11283 && ((var0 != 0) + (var1 != 0)
11284 + (minus_var0 != 0) + (minus_var1 != 0)
11285 + (con0 != 0) + (con1 != 0)
11286 + (minus_con0 != 0) + (minus_con1 != 0)
11287 + (lit0 != 0) + (lit1 != 0)
11288 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
11290 var0 = associate_trees (loc, var0, var1, code, atype);
11291 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
11292 code, atype);
11293 con0 = associate_trees (loc, con0, con1, code, atype);
11294 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
11295 code, atype);
11296 lit0 = associate_trees (loc, lit0, lit1, code, atype);
11297 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
11298 code, atype);
11300 if (minus_var0 && var0)
11302 var0 = associate_trees (loc, var0, minus_var0,
11303 MINUS_EXPR, atype);
11304 minus_var0 = 0;
11306 if (minus_con0 && con0)
11308 con0 = associate_trees (loc, con0, minus_con0,
11309 MINUS_EXPR, atype);
11310 minus_con0 = 0;
11313 /* Preserve the MINUS_EXPR if the negative part of the literal is
11314 greater than the positive part. Otherwise, the multiplicative
11315 folding code (i.e extract_muldiv) may be fooled in case
11316 unsigned constants are subtracted, like in the following
11317 example: ((X*2 + 4) - 8U)/2. */
11318 if (minus_lit0 && lit0)
11320 if (TREE_CODE (lit0) == INTEGER_CST
11321 && TREE_CODE (minus_lit0) == INTEGER_CST
11322 && tree_int_cst_lt (lit0, minus_lit0)
11323 /* But avoid ending up with only negated parts. */
11324 && (var0 || con0))
11326 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
11327 MINUS_EXPR, atype);
11328 lit0 = 0;
11330 else
11332 lit0 = associate_trees (loc, lit0, minus_lit0,
11333 MINUS_EXPR, atype);
11334 minus_lit0 = 0;
11338 /* Don't introduce overflows through reassociation. */
11339 if ((lit0 && TREE_OVERFLOW_P (lit0))
11340 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
11341 return NULL_TREE;
11343 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11344 con0 = associate_trees (loc, con0, lit0, code, atype);
11345 lit0 = 0;
11346 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
11347 code, atype);
11348 minus_lit0 = 0;
11350 /* Eliminate minus_con0. */
11351 if (minus_con0)
11353 if (con0)
11354 con0 = associate_trees (loc, con0, minus_con0,
11355 MINUS_EXPR, atype);
11356 else if (var0)
11357 var0 = associate_trees (loc, var0, minus_con0,
11358 MINUS_EXPR, atype);
11359 else
11360 gcc_unreachable ();
11361 minus_con0 = 0;
11364 /* Eliminate minus_var0. */
11365 if (minus_var0)
11367 if (con0)
11368 con0 = associate_trees (loc, con0, minus_var0,
11369 MINUS_EXPR, atype);
11370 else
11371 gcc_unreachable ();
11372 minus_var0 = 0;
11375 return
11376 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
11377 code, atype));
11381 return NULL_TREE;
11383 case POINTER_DIFF_EXPR:
11384 case MINUS_EXPR:
11385 /* Fold &a[i] - &a[j] to i-j. */
11386 if (TREE_CODE (arg0) == ADDR_EXPR
11387 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11388 && TREE_CODE (arg1) == ADDR_EXPR
11389 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11391 tree tem = fold_addr_of_array_ref_difference (loc, type,
11392 TREE_OPERAND (arg0, 0),
11393 TREE_OPERAND (arg1, 0),
11394 code
11395 == POINTER_DIFF_EXPR);
11396 if (tem)
11397 return tem;
11400 /* Further transformations are not for pointers. */
11401 if (code == POINTER_DIFF_EXPR)
11402 return NULL_TREE;
11404 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11405 if (TREE_CODE (arg0) == NEGATE_EXPR
11406 && negate_expr_p (op1)
11407 /* If arg0 is e.g. unsigned int and type is int, then this could
11408 introduce UB, because if A is INT_MIN at runtime, the original
11409 expression can be well defined while the latter is not.
11410 See PR83269. */
11411 && !(ANY_INTEGRAL_TYPE_P (type)
11412 && TYPE_OVERFLOW_UNDEFINED (type)
11413 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11414 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
11415 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
11416 fold_convert_loc (loc, type,
11417 TREE_OPERAND (arg0, 0)));
11419 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11420 __complex__ ( x, -y ). This is not the same for SNaNs or if
11421 signed zeros are involved. */
11422 if (!HONOR_SNANS (arg0)
11423 && !HONOR_SIGNED_ZEROS (arg0)
11424 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11426 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11427 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11428 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11429 bool arg0rz = false, arg0iz = false;
11430 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11431 || (arg0i && (arg0iz = real_zerop (arg0i))))
11433 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11434 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11435 if (arg0rz && arg1i && real_zerop (arg1i))
11437 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11438 arg1r ? arg1r
11439 : build1 (REALPART_EXPR, rtype, arg1));
11440 tree ip = arg0i ? arg0i
11441 : build1 (IMAGPART_EXPR, rtype, arg0);
11442 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11444 else if (arg0iz && arg1r && real_zerop (arg1r))
11446 tree rp = arg0r ? arg0r
11447 : build1 (REALPART_EXPR, rtype, arg0);
11448 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11449 arg1i ? arg1i
11450 : build1 (IMAGPART_EXPR, rtype, arg1));
11451 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11456 /* A - B -> A + (-B) if B is easily negatable. */
11457 if (negate_expr_p (op1)
11458 && ! TYPE_OVERFLOW_SANITIZED (type)
11459 && ((FLOAT_TYPE_P (type)
11460 /* Avoid this transformation if B is a positive REAL_CST. */
11461 && (TREE_CODE (op1) != REAL_CST
11462 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
11463 || INTEGRAL_TYPE_P (type)))
11464 return fold_build2_loc (loc, PLUS_EXPR, type,
11465 fold_convert_loc (loc, type, arg0),
11466 negate_expr (op1));
11468 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11469 one. Make sure the type is not saturating and has the signedness of
11470 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11471 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11472 if ((TREE_CODE (arg0) == MULT_EXPR
11473 || TREE_CODE (arg1) == MULT_EXPR)
11474 && !TYPE_SATURATING (type)
11475 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11476 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11477 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11479 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11480 if (tem)
11481 return tem;
11484 goto associate;
11486 case MULT_EXPR:
11487 if (! FLOAT_TYPE_P (type))
11489 /* Transform x * -C into -x * C if x is easily negatable. */
11490 if (TREE_CODE (op1) == INTEGER_CST
11491 && tree_int_cst_sgn (op1) == -1
11492 && negate_expr_p (op0)
11493 && negate_expr_p (op1)
11494 && (tem = negate_expr (op1)) != op1
11495 && ! TREE_OVERFLOW (tem))
11496 return fold_build2_loc (loc, MULT_EXPR, type,
11497 fold_convert_loc (loc, type,
11498 negate_expr (op0)), tem);
11500 strict_overflow_p = false;
11501 if (TREE_CODE (arg1) == INTEGER_CST
11502 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11503 &strict_overflow_p)) != 0)
11505 if (strict_overflow_p)
11506 fold_overflow_warning (("assuming signed overflow does not "
11507 "occur when simplifying "
11508 "multiplication"),
11509 WARN_STRICT_OVERFLOW_MISC);
11510 return fold_convert_loc (loc, type, tem);
11513 /* Optimize z * conj(z) for integer complex numbers. */
11514 if (TREE_CODE (arg0) == CONJ_EXPR
11515 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11516 return fold_mult_zconjz (loc, type, arg1);
11517 if (TREE_CODE (arg1) == CONJ_EXPR
11518 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11519 return fold_mult_zconjz (loc, type, arg0);
11521 else
11523 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11524 This is not the same for NaNs or if signed zeros are
11525 involved. */
11526 if (!HONOR_NANS (arg0)
11527 && !HONOR_SIGNED_ZEROS (arg0)
11528 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11529 && TREE_CODE (arg1) == COMPLEX_CST
11530 && real_zerop (TREE_REALPART (arg1)))
11532 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11533 if (real_onep (TREE_IMAGPART (arg1)))
11534 return
11535 fold_build2_loc (loc, COMPLEX_EXPR, type,
11536 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11537 rtype, arg0)),
11538 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11539 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11540 return
11541 fold_build2_loc (loc, COMPLEX_EXPR, type,
11542 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11543 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11544 rtype, arg0)));
11547 /* Optimize z * conj(z) for floating point complex numbers.
11548 Guarded by flag_unsafe_math_optimizations as non-finite
11549 imaginary components don't produce scalar results. */
11550 if (flag_unsafe_math_optimizations
11551 && TREE_CODE (arg0) == CONJ_EXPR
11552 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11553 return fold_mult_zconjz (loc, type, arg1);
11554 if (flag_unsafe_math_optimizations
11555 && TREE_CODE (arg1) == CONJ_EXPR
11556 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11557 return fold_mult_zconjz (loc, type, arg0);
11559 goto associate;
11561 case BIT_IOR_EXPR:
11562 /* Canonicalize (X & C1) | C2. */
11563 if (TREE_CODE (arg0) == BIT_AND_EXPR
11564 && TREE_CODE (arg1) == INTEGER_CST
11565 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11567 int width = TYPE_PRECISION (type), w;
11568 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
11569 wide_int c2 = wi::to_wide (arg1);
11571 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11572 if ((c1 & c2) == c1)
11573 return omit_one_operand_loc (loc, type, arg1,
11574 TREE_OPERAND (arg0, 0));
11576 wide_int msk = wi::mask (width, false,
11577 TYPE_PRECISION (TREE_TYPE (arg1)));
11579 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11580 if (wi::bit_and_not (msk, c1 | c2) == 0)
11582 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11583 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11586 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11587 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11588 mode which allows further optimizations. */
11589 c1 &= msk;
11590 c2 &= msk;
11591 wide_int c3 = wi::bit_and_not (c1, c2);
11592 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11594 wide_int mask = wi::mask (w, false,
11595 TYPE_PRECISION (type));
11596 if (((c1 | c2) & mask) == mask
11597 && wi::bit_and_not (c1, mask) == 0)
11599 c3 = mask;
11600 break;
11604 if (c3 != c1)
11606 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11607 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
11608 wide_int_to_tree (type, c3));
11609 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11613 /* See if this can be simplified into a rotate first. If that
11614 is unsuccessful continue in the association code. */
11615 goto bit_rotate;
11617 case BIT_XOR_EXPR:
11618 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11619 if (TREE_CODE (arg0) == BIT_AND_EXPR
11620 && INTEGRAL_TYPE_P (type)
11621 && integer_onep (TREE_OPERAND (arg0, 1))
11622 && integer_onep (arg1))
11623 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11624 build_zero_cst (TREE_TYPE (arg0)));
11626 /* See if this can be simplified into a rotate first. If that
11627 is unsuccessful continue in the association code. */
11628 goto bit_rotate;
11630 case BIT_AND_EXPR:
11631 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11632 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11633 && INTEGRAL_TYPE_P (type)
11634 && integer_onep (TREE_OPERAND (arg0, 1))
11635 && integer_onep (arg1))
11637 tree tem2;
11638 tem = TREE_OPERAND (arg0, 0);
11639 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11640 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11641 tem, tem2);
11642 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11643 build_zero_cst (TREE_TYPE (tem)));
11645 /* Fold ~X & 1 as (X & 1) == 0. */
11646 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11647 && INTEGRAL_TYPE_P (type)
11648 && integer_onep (arg1))
11650 tree tem2;
11651 tem = TREE_OPERAND (arg0, 0);
11652 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11653 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11654 tem, tem2);
11655 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11656 build_zero_cst (TREE_TYPE (tem)));
11658 /* Fold !X & 1 as X == 0. */
11659 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11660 && integer_onep (arg1))
11662 tem = TREE_OPERAND (arg0, 0);
11663 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11664 build_zero_cst (TREE_TYPE (tem)));
11667 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11668 multiple of 1 << CST. */
11669 if (TREE_CODE (arg1) == INTEGER_CST)
11671 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11672 wide_int ncst1 = -cst1;
11673 if ((cst1 & ncst1) == ncst1
11674 && multiple_of_p (type, arg0,
11675 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11676 return fold_convert_loc (loc, type, arg0);
11679 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11680 bits from CST2. */
11681 if (TREE_CODE (arg1) == INTEGER_CST
11682 && TREE_CODE (arg0) == MULT_EXPR
11683 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11685 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11686 wide_int masked
11687 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11689 if (masked == 0)
11690 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11691 arg0, arg1);
11692 else if (masked != warg1)
11694 /* Avoid the transform if arg1 is a mask of some
11695 mode which allows further optimizations. */
11696 int pop = wi::popcount (warg1);
11697 if (!(pop >= BITS_PER_UNIT
11698 && pow2p_hwi (pop)
11699 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11700 return fold_build2_loc (loc, code, type, op0,
11701 wide_int_to_tree (type, masked));
11705 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11706 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11707 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11709 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11711 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11712 if (mask == -1)
11713 return
11714 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11717 goto associate;
11719 case RDIV_EXPR:
11720 /* Don't touch a floating-point divide by zero unless the mode
11721 of the constant can represent infinity. */
11722 if (TREE_CODE (arg1) == REAL_CST
11723 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11724 && real_zerop (arg1))
11725 return NULL_TREE;
11727 /* (-A) / (-B) -> A / B */
11728 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11729 return fold_build2_loc (loc, RDIV_EXPR, type,
11730 TREE_OPERAND (arg0, 0),
11731 negate_expr (arg1));
11732 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11733 return fold_build2_loc (loc, RDIV_EXPR, type,
11734 negate_expr (arg0),
11735 TREE_OPERAND (arg1, 0));
11736 return NULL_TREE;
11738 case TRUNC_DIV_EXPR:
11739 /* Fall through */
11741 case FLOOR_DIV_EXPR:
11742 /* Simplify A / (B << N) where A and B are positive and B is
11743 a power of 2, to A >> (N + log2(B)). */
11744 strict_overflow_p = false;
11745 if (TREE_CODE (arg1) == LSHIFT_EXPR
11746 && (TYPE_UNSIGNED (type)
11747 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11749 tree sval = TREE_OPERAND (arg1, 0);
11750 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11752 tree sh_cnt = TREE_OPERAND (arg1, 1);
11753 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11754 wi::exact_log2 (wi::to_wide (sval)));
11756 if (strict_overflow_p)
11757 fold_overflow_warning (("assuming signed overflow does not "
11758 "occur when simplifying A / (B << N)"),
11759 WARN_STRICT_OVERFLOW_MISC);
11761 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11762 sh_cnt, pow2);
11763 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11764 fold_convert_loc (loc, type, arg0), sh_cnt);
11768 /* Fall through */
11770 case ROUND_DIV_EXPR:
11771 case CEIL_DIV_EXPR:
11772 case EXACT_DIV_EXPR:
11773 if (integer_zerop (arg1))
11774 return NULL_TREE;
11776 /* Convert -A / -B to A / B when the type is signed and overflow is
11777 undefined. */
11778 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11779 && TREE_CODE (op0) == NEGATE_EXPR
11780 && negate_expr_p (op1))
11782 if (ANY_INTEGRAL_TYPE_P (type))
11783 fold_overflow_warning (("assuming signed overflow does not occur "
11784 "when distributing negation across "
11785 "division"),
11786 WARN_STRICT_OVERFLOW_MISC);
11787 return fold_build2_loc (loc, code, type,
11788 fold_convert_loc (loc, type,
11789 TREE_OPERAND (arg0, 0)),
11790 negate_expr (op1));
11792 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11793 && TREE_CODE (arg1) == NEGATE_EXPR
11794 && negate_expr_p (op0))
11796 if (ANY_INTEGRAL_TYPE_P (type))
11797 fold_overflow_warning (("assuming signed overflow does not occur "
11798 "when distributing negation across "
11799 "division"),
11800 WARN_STRICT_OVERFLOW_MISC);
11801 return fold_build2_loc (loc, code, type,
11802 negate_expr (op0),
11803 fold_convert_loc (loc, type,
11804 TREE_OPERAND (arg1, 0)));
11807 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11808 operation, EXACT_DIV_EXPR.
11810 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11811 At one time others generated faster code, it's not clear if they do
11812 after the last round to changes to the DIV code in expmed.c. */
11813 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11814 && multiple_of_p (type, arg0, arg1))
11815 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
11816 fold_convert (type, arg0),
11817 fold_convert (type, arg1));
11819 strict_overflow_p = false;
11820 if (TREE_CODE (arg1) == INTEGER_CST
11821 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11822 &strict_overflow_p)) != 0)
11824 if (strict_overflow_p)
11825 fold_overflow_warning (("assuming signed overflow does not occur "
11826 "when simplifying division"),
11827 WARN_STRICT_OVERFLOW_MISC);
11828 return fold_convert_loc (loc, type, tem);
11831 return NULL_TREE;
11833 case CEIL_MOD_EXPR:
11834 case FLOOR_MOD_EXPR:
11835 case ROUND_MOD_EXPR:
11836 case TRUNC_MOD_EXPR:
11837 strict_overflow_p = false;
11838 if (TREE_CODE (arg1) == INTEGER_CST
11839 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11840 &strict_overflow_p)) != 0)
11842 if (strict_overflow_p)
11843 fold_overflow_warning (("assuming signed overflow does not occur "
11844 "when simplifying modulus"),
11845 WARN_STRICT_OVERFLOW_MISC);
11846 return fold_convert_loc (loc, type, tem);
11849 return NULL_TREE;
11851 case LROTATE_EXPR:
11852 case RROTATE_EXPR:
11853 case RSHIFT_EXPR:
11854 case LSHIFT_EXPR:
11855 /* Since negative shift count is not well-defined,
11856 don't try to compute it in the compiler. */
11857 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11858 return NULL_TREE;
11860 prec = element_precision (type);
11862 /* If we have a rotate of a bit operation with the rotate count and
11863 the second operand of the bit operation both constant,
11864 permute the two operations. */
11865 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11866 && (TREE_CODE (arg0) == BIT_AND_EXPR
11867 || TREE_CODE (arg0) == BIT_IOR_EXPR
11868 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11869 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11871 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11872 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11873 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11874 fold_build2_loc (loc, code, type,
11875 arg00, arg1),
11876 fold_build2_loc (loc, code, type,
11877 arg01, arg1));
11880 /* Two consecutive rotates adding up to the some integer
11881 multiple of the precision of the type can be ignored. */
11882 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11883 && TREE_CODE (arg0) == RROTATE_EXPR
11884 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11885 && wi::umod_trunc (wi::to_wide (arg1)
11886 + wi::to_wide (TREE_OPERAND (arg0, 1)),
11887 prec) == 0)
11888 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11890 return NULL_TREE;
11892 case MIN_EXPR:
11893 case MAX_EXPR:
11894 goto associate;
11896 case TRUTH_ANDIF_EXPR:
11897 /* Note that the operands of this must be ints
11898 and their values must be 0 or 1.
11899 ("true" is a fixed value perhaps depending on the language.) */
11900 /* If first arg is constant zero, return it. */
11901 if (integer_zerop (arg0))
11902 return fold_convert_loc (loc, type, arg0);
11903 /* FALLTHRU */
11904 case TRUTH_AND_EXPR:
11905 /* If either arg is constant true, drop it. */
11906 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11907 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11908 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11909 /* Preserve sequence points. */
11910 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11911 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11912 /* If second arg is constant zero, result is zero, but first arg
11913 must be evaluated. */
11914 if (integer_zerop (arg1))
11915 return omit_one_operand_loc (loc, type, arg1, arg0);
11916 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11917 case will be handled here. */
11918 if (integer_zerop (arg0))
11919 return omit_one_operand_loc (loc, type, arg0, arg1);
11921 /* !X && X is always false. */
11922 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11923 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11924 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11925 /* X && !X is always false. */
11926 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11927 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11928 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11930 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11931 means A >= Y && A != MAX, but in this case we know that
11932 A < X <= MAX. */
11934 if (!TREE_SIDE_EFFECTS (arg0)
11935 && !TREE_SIDE_EFFECTS (arg1))
11937 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11938 if (tem && !operand_equal_p (tem, arg0, 0))
11939 return fold_build2_loc (loc, code, type, tem, arg1);
11941 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11942 if (tem && !operand_equal_p (tem, arg1, 0))
11943 return fold_build2_loc (loc, code, type, arg0, tem);
11946 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11947 != NULL_TREE)
11948 return tem;
11950 return NULL_TREE;
11952 case TRUTH_ORIF_EXPR:
11953 /* Note that the operands of this must be ints
11954 and their values must be 0 or true.
11955 ("true" is a fixed value perhaps depending on the language.) */
11956 /* If first arg is constant true, return it. */
11957 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11958 return fold_convert_loc (loc, type, arg0);
11959 /* FALLTHRU */
11960 case TRUTH_OR_EXPR:
11961 /* If either arg is constant zero, drop it. */
11962 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11963 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11964 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11965 /* Preserve sequence points. */
11966 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11967 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11968 /* If second arg is constant true, result is true, but we must
11969 evaluate first arg. */
11970 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11971 return omit_one_operand_loc (loc, type, arg1, arg0);
11972 /* Likewise for first arg, but note this only occurs here for
11973 TRUTH_OR_EXPR. */
11974 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11975 return omit_one_operand_loc (loc, type, arg0, arg1);
11977 /* !X || X is always true. */
11978 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11979 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11980 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11981 /* X || !X is always true. */
11982 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11983 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11984 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11986 /* (X && !Y) || (!X && Y) is X ^ Y */
11987 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
11988 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
11990 tree a0, a1, l0, l1, n0, n1;
11992 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11993 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11995 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11996 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11998 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
11999 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12001 if ((operand_equal_p (n0, a0, 0)
12002 && operand_equal_p (n1, a1, 0))
12003 || (operand_equal_p (n0, a1, 0)
12004 && operand_equal_p (n1, a0, 0)))
12005 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12008 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12009 != NULL_TREE)
12010 return tem;
12012 return NULL_TREE;
12014 case TRUTH_XOR_EXPR:
12015 /* If the second arg is constant zero, drop it. */
12016 if (integer_zerop (arg1))
12017 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12018 /* If the second arg is constant true, this is a logical inversion. */
12019 if (integer_onep (arg1))
12021 tem = invert_truthvalue_loc (loc, arg0);
12022 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12024 /* Identical arguments cancel to zero. */
12025 if (operand_equal_p (arg0, arg1, 0))
12026 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12028 /* !X ^ X is always true. */
12029 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12030 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12031 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12033 /* X ^ !X is always true. */
12034 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12035 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12036 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12038 return NULL_TREE;
12040 case EQ_EXPR:
12041 case NE_EXPR:
12042 STRIP_NOPS (arg0);
12043 STRIP_NOPS (arg1);
12045 tem = fold_comparison (loc, code, type, op0, op1);
12046 if (tem != NULL_TREE)
12047 return tem;
12049 /* bool_var != 1 becomes !bool_var. */
12050 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12051 && code == NE_EXPR)
12052 return fold_convert_loc (loc, type,
12053 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12054 TREE_TYPE (arg0), arg0));
12056 /* bool_var == 0 becomes !bool_var. */
12057 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12058 && code == EQ_EXPR)
12059 return fold_convert_loc (loc, type,
12060 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12061 TREE_TYPE (arg0), arg0));
12063 /* !exp != 0 becomes !exp */
12064 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12065 && code == NE_EXPR)
12066 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12068 /* If this is an EQ or NE comparison with zero and ARG0 is
12069 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12070 two operations, but the latter can be done in one less insn
12071 on machines that have only two-operand insns or on which a
12072 constant cannot be the first operand. */
12073 if (TREE_CODE (arg0) == BIT_AND_EXPR
12074 && integer_zerop (arg1))
12076 tree arg00 = TREE_OPERAND (arg0, 0);
12077 tree arg01 = TREE_OPERAND (arg0, 1);
12078 if (TREE_CODE (arg00) == LSHIFT_EXPR
12079 && integer_onep (TREE_OPERAND (arg00, 0)))
12081 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12082 arg01, TREE_OPERAND (arg00, 1));
12083 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12084 build_one_cst (TREE_TYPE (arg0)));
12085 return fold_build2_loc (loc, code, type,
12086 fold_convert_loc (loc, TREE_TYPE (arg1),
12087 tem), arg1);
12089 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12090 && integer_onep (TREE_OPERAND (arg01, 0)))
12092 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12093 arg00, TREE_OPERAND (arg01, 1));
12094 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12095 build_one_cst (TREE_TYPE (arg0)));
12096 return fold_build2_loc (loc, code, type,
12097 fold_convert_loc (loc, TREE_TYPE (arg1),
12098 tem), arg1);
12102 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12103 C1 is a valid shift constant, and C2 is a power of two, i.e.
12104 a single bit. */
12105 if (TREE_CODE (arg0) == BIT_AND_EXPR
12106 && integer_pow2p (TREE_OPERAND (arg0, 1))
12107 && integer_zerop (arg1))
12109 tree arg00 = TREE_OPERAND (arg0, 0);
12110 STRIP_NOPS (arg00);
12111 if (TREE_CODE (arg00) == RSHIFT_EXPR
12112 && TREE_CODE (TREE_OPERAND (arg00, 1)) == INTEGER_CST)
12114 tree itype = TREE_TYPE (arg00);
12115 tree arg001 = TREE_OPERAND (arg00, 1);
12116 prec = TYPE_PRECISION (itype);
12118 /* Check for a valid shift count. */
12119 if (wi::ltu_p (wi::to_wide (arg001), prec))
12121 tree arg01 = TREE_OPERAND (arg0, 1);
12122 tree arg000 = TREE_OPERAND (arg00, 0);
12123 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12124 /* If (C2 << C1) doesn't overflow, then
12125 ((X >> C1) & C2) != 0 can be rewritten as
12126 (X & (C2 << C1)) != 0. */
12127 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12129 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype,
12130 arg01, arg001);
12131 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype,
12132 arg000, tem);
12133 return fold_build2_loc (loc, code, type, tem,
12134 fold_convert_loc (loc, itype, arg1));
12136 /* Otherwise, for signed (arithmetic) shifts,
12137 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12138 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12139 else if (!TYPE_UNSIGNED (itype))
12140 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR
12141 : LT_EXPR,
12142 type, arg000,
12143 build_int_cst (itype, 0));
12144 /* Otherwise, of unsigned (logical) shifts,
12145 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12146 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12147 else
12148 return omit_one_operand_loc (loc, type,
12149 code == EQ_EXPR ? integer_one_node
12150 : integer_zero_node,
12151 arg000);
12156 /* If this is a comparison of a field, we may be able to simplify it. */
12157 if ((TREE_CODE (arg0) == COMPONENT_REF
12158 || TREE_CODE (arg0) == BIT_FIELD_REF)
12159 /* Handle the constant case even without -O
12160 to make sure the warnings are given. */
12161 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12163 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12164 if (t1)
12165 return t1;
12168 /* Optimize comparisons of strlen vs zero to a compare of the
12169 first character of the string vs zero. To wit,
12170 strlen(ptr) == 0 => *ptr == 0
12171 strlen(ptr) != 0 => *ptr != 0
12172 Other cases should reduce to one of these two (or a constant)
12173 due to the return value of strlen being unsigned. */
12174 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
12176 tree fndecl = get_callee_fndecl (arg0);
12178 if (fndecl
12179 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
12180 && call_expr_nargs (arg0) == 1
12181 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
12182 == POINTER_TYPE))
12184 tree ptrtype
12185 = build_pointer_type (build_qualified_type (char_type_node,
12186 TYPE_QUAL_CONST));
12187 tree ptr = fold_convert_loc (loc, ptrtype,
12188 CALL_EXPR_ARG (arg0, 0));
12189 tree iref = build_fold_indirect_ref_loc (loc, ptr);
12190 return fold_build2_loc (loc, code, type, iref,
12191 build_int_cst (TREE_TYPE (iref), 0));
12195 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12196 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12197 if (TREE_CODE (arg0) == RSHIFT_EXPR
12198 && integer_zerop (arg1)
12199 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12201 tree arg00 = TREE_OPERAND (arg0, 0);
12202 tree arg01 = TREE_OPERAND (arg0, 1);
12203 tree itype = TREE_TYPE (arg00);
12204 if (wi::to_wide (arg01) == element_precision (itype) - 1)
12206 if (TYPE_UNSIGNED (itype))
12208 itype = signed_type_for (itype);
12209 arg00 = fold_convert_loc (loc, itype, arg00);
12211 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12212 type, arg00, build_zero_cst (itype));
12216 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12217 (X & C) == 0 when C is a single bit. */
12218 if (TREE_CODE (arg0) == BIT_AND_EXPR
12219 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12220 && integer_zerop (arg1)
12221 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12223 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12224 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12225 TREE_OPERAND (arg0, 1));
12226 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12227 type, tem,
12228 fold_convert_loc (loc, TREE_TYPE (arg0),
12229 arg1));
12232 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12233 constant C is a power of two, i.e. a single bit. */
12234 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12235 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12236 && integer_zerop (arg1)
12237 && integer_pow2p (TREE_OPERAND (arg0, 1))
12238 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12239 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12241 tree arg00 = TREE_OPERAND (arg0, 0);
12242 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12243 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12246 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12247 when is C is a power of two, i.e. a single bit. */
12248 if (TREE_CODE (arg0) == BIT_AND_EXPR
12249 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12250 && integer_zerop (arg1)
12251 && integer_pow2p (TREE_OPERAND (arg0, 1))
12252 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12253 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12255 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12256 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12257 arg000, TREE_OPERAND (arg0, 1));
12258 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12259 tem, build_int_cst (TREE_TYPE (tem), 0));
12262 if (integer_zerop (arg1)
12263 && tree_expr_nonzero_p (arg0))
12265 tree res = constant_boolean_node (code==NE_EXPR, type);
12266 return omit_one_operand_loc (loc, type, res, arg0);
12269 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12270 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12272 tree arg00 = TREE_OPERAND (arg0, 0);
12273 tree arg01 = TREE_OPERAND (arg0, 1);
12274 tree arg10 = TREE_OPERAND (arg1, 0);
12275 tree arg11 = TREE_OPERAND (arg1, 1);
12276 tree itype = TREE_TYPE (arg0);
12278 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12279 operand_equal_p guarantees no side-effects so we don't need
12280 to use omit_one_operand on Z. */
12281 if (operand_equal_p (arg01, arg11, 0))
12282 return fold_build2_loc (loc, code, type, arg00,
12283 fold_convert_loc (loc, TREE_TYPE (arg00),
12284 arg10));
12285 if (operand_equal_p (arg01, arg10, 0))
12286 return fold_build2_loc (loc, code, type, arg00,
12287 fold_convert_loc (loc, TREE_TYPE (arg00),
12288 arg11));
12289 if (operand_equal_p (arg00, arg11, 0))
12290 return fold_build2_loc (loc, code, type, arg01,
12291 fold_convert_loc (loc, TREE_TYPE (arg01),
12292 arg10));
12293 if (operand_equal_p (arg00, arg10, 0))
12294 return fold_build2_loc (loc, code, type, arg01,
12295 fold_convert_loc (loc, TREE_TYPE (arg01),
12296 arg11));
12298 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12299 if (TREE_CODE (arg01) == INTEGER_CST
12300 && TREE_CODE (arg11) == INTEGER_CST)
12302 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12303 fold_convert_loc (loc, itype, arg11));
12304 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12305 return fold_build2_loc (loc, code, type, tem,
12306 fold_convert_loc (loc, itype, arg10));
12310 /* Attempt to simplify equality/inequality comparisons of complex
12311 values. Only lower the comparison if the result is known or
12312 can be simplified to a single scalar comparison. */
12313 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12314 || TREE_CODE (arg0) == COMPLEX_CST)
12315 && (TREE_CODE (arg1) == COMPLEX_EXPR
12316 || TREE_CODE (arg1) == COMPLEX_CST))
12318 tree real0, imag0, real1, imag1;
12319 tree rcond, icond;
12321 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12323 real0 = TREE_OPERAND (arg0, 0);
12324 imag0 = TREE_OPERAND (arg0, 1);
12326 else
12328 real0 = TREE_REALPART (arg0);
12329 imag0 = TREE_IMAGPART (arg0);
12332 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12334 real1 = TREE_OPERAND (arg1, 0);
12335 imag1 = TREE_OPERAND (arg1, 1);
12337 else
12339 real1 = TREE_REALPART (arg1);
12340 imag1 = TREE_IMAGPART (arg1);
12343 rcond = fold_binary_loc (loc, code, type, real0, real1);
12344 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12346 if (integer_zerop (rcond))
12348 if (code == EQ_EXPR)
12349 return omit_two_operands_loc (loc, type, boolean_false_node,
12350 imag0, imag1);
12351 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12353 else
12355 if (code == NE_EXPR)
12356 return omit_two_operands_loc (loc, type, boolean_true_node,
12357 imag0, imag1);
12358 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12362 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12363 if (icond && TREE_CODE (icond) == INTEGER_CST)
12365 if (integer_zerop (icond))
12367 if (code == EQ_EXPR)
12368 return omit_two_operands_loc (loc, type, boolean_false_node,
12369 real0, real1);
12370 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12372 else
12374 if (code == NE_EXPR)
12375 return omit_two_operands_loc (loc, type, boolean_true_node,
12376 real0, real1);
12377 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12382 return NULL_TREE;
12384 case LT_EXPR:
12385 case GT_EXPR:
12386 case LE_EXPR:
12387 case GE_EXPR:
12388 tem = fold_comparison (loc, code, type, op0, op1);
12389 if (tem != NULL_TREE)
12390 return tem;
12392 /* Transform comparisons of the form X +- C CMP X. */
12393 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12394 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12395 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12396 && !HONOR_SNANS (arg0))
12398 tree arg01 = TREE_OPERAND (arg0, 1);
12399 enum tree_code code0 = TREE_CODE (arg0);
12400 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12402 /* (X - c) > X becomes false. */
12403 if (code == GT_EXPR
12404 && ((code0 == MINUS_EXPR && is_positive >= 0)
12405 || (code0 == PLUS_EXPR && is_positive <= 0)))
12406 return constant_boolean_node (0, type);
12408 /* Likewise (X + c) < X becomes false. */
12409 if (code == LT_EXPR
12410 && ((code0 == PLUS_EXPR && is_positive >= 0)
12411 || (code0 == MINUS_EXPR && is_positive <= 0)))
12412 return constant_boolean_node (0, type);
12414 /* Convert (X - c) <= X to true. */
12415 if (!HONOR_NANS (arg1)
12416 && code == LE_EXPR
12417 && ((code0 == MINUS_EXPR && is_positive >= 0)
12418 || (code0 == PLUS_EXPR && is_positive <= 0)))
12419 return constant_boolean_node (1, type);
12421 /* Convert (X + c) >= X to true. */
12422 if (!HONOR_NANS (arg1)
12423 && code == GE_EXPR
12424 && ((code0 == PLUS_EXPR && is_positive >= 0)
12425 || (code0 == MINUS_EXPR && is_positive <= 0)))
12426 return constant_boolean_node (1, type);
12429 /* If we are comparing an ABS_EXPR with a constant, we can
12430 convert all the cases into explicit comparisons, but they may
12431 well not be faster than doing the ABS and one comparison.
12432 But ABS (X) <= C is a range comparison, which becomes a subtraction
12433 and a comparison, and is probably faster. */
12434 if (code == LE_EXPR
12435 && TREE_CODE (arg1) == INTEGER_CST
12436 && TREE_CODE (arg0) == ABS_EXPR
12437 && ! TREE_SIDE_EFFECTS (arg0)
12438 && (tem = negate_expr (arg1)) != 0
12439 && TREE_CODE (tem) == INTEGER_CST
12440 && !TREE_OVERFLOW (tem))
12441 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12442 build2 (GE_EXPR, type,
12443 TREE_OPERAND (arg0, 0), tem),
12444 build2 (LE_EXPR, type,
12445 TREE_OPERAND (arg0, 0), arg1));
12447 /* Convert ABS_EXPR<x> >= 0 to true. */
12448 strict_overflow_p = false;
12449 if (code == GE_EXPR
12450 && (integer_zerop (arg1)
12451 || (! HONOR_NANS (arg0)
12452 && real_zerop (arg1)))
12453 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12455 if (strict_overflow_p)
12456 fold_overflow_warning (("assuming signed overflow does not occur "
12457 "when simplifying comparison of "
12458 "absolute value and zero"),
12459 WARN_STRICT_OVERFLOW_CONDITIONAL);
12460 return omit_one_operand_loc (loc, type,
12461 constant_boolean_node (true, type),
12462 arg0);
12465 /* Convert ABS_EXPR<x> < 0 to false. */
12466 strict_overflow_p = false;
12467 if (code == LT_EXPR
12468 && (integer_zerop (arg1) || real_zerop (arg1))
12469 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12471 if (strict_overflow_p)
12472 fold_overflow_warning (("assuming signed overflow does not occur "
12473 "when simplifying comparison of "
12474 "absolute value and zero"),
12475 WARN_STRICT_OVERFLOW_CONDITIONAL);
12476 return omit_one_operand_loc (loc, type,
12477 constant_boolean_node (false, type),
12478 arg0);
12481 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12482 and similarly for >= into !=. */
12483 if ((code == LT_EXPR || code == GE_EXPR)
12484 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12485 && TREE_CODE (arg1) == LSHIFT_EXPR
12486 && integer_onep (TREE_OPERAND (arg1, 0)))
12487 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12488 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12489 TREE_OPERAND (arg1, 1)),
12490 build_zero_cst (TREE_TYPE (arg0)));
12492 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12493 otherwise Y might be >= # of bits in X's type and thus e.g.
12494 (unsigned char) (1 << Y) for Y 15 might be 0.
12495 If the cast is widening, then 1 << Y should have unsigned type,
12496 otherwise if Y is number of bits in the signed shift type minus 1,
12497 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12498 31 might be 0xffffffff80000000. */
12499 if ((code == LT_EXPR || code == GE_EXPR)
12500 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12501 || VECTOR_INTEGER_TYPE_P (TREE_TYPE (arg0)))
12502 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12503 && CONVERT_EXPR_P (arg1)
12504 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12505 && (element_precision (TREE_TYPE (arg1))
12506 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12507 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12508 || (element_precision (TREE_TYPE (arg1))
12509 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12510 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12512 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12513 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12514 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12515 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12516 build_zero_cst (TREE_TYPE (arg0)));
12519 return NULL_TREE;
12521 case UNORDERED_EXPR:
12522 case ORDERED_EXPR:
12523 case UNLT_EXPR:
12524 case UNLE_EXPR:
12525 case UNGT_EXPR:
12526 case UNGE_EXPR:
12527 case UNEQ_EXPR:
12528 case LTGT_EXPR:
12529 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12531 tree targ0 = strip_float_extensions (arg0);
12532 tree targ1 = strip_float_extensions (arg1);
12533 tree newtype = TREE_TYPE (targ0);
12535 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12536 newtype = TREE_TYPE (targ1);
12538 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12539 return fold_build2_loc (loc, code, type,
12540 fold_convert_loc (loc, newtype, targ0),
12541 fold_convert_loc (loc, newtype, targ1));
12544 return NULL_TREE;
12546 case COMPOUND_EXPR:
12547 /* When pedantic, a compound expression can be neither an lvalue
12548 nor an integer constant expression. */
12549 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12550 return NULL_TREE;
12551 /* Don't let (0, 0) be null pointer constant. */
12552 tem = integer_zerop (arg1) ? build1_loc (loc, NOP_EXPR, type, arg1)
12553 : fold_convert_loc (loc, type, arg1);
12554 return tem;
12556 case ASSERT_EXPR:
12557 /* An ASSERT_EXPR should never be passed to fold_binary. */
12558 gcc_unreachable ();
12560 default:
12561 return NULL_TREE;
12562 } /* switch (code) */
12565 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12566 ((A & N) + B) & M -> (A + B) & M
12567 Similarly if (N & M) == 0,
12568 ((A | N) + B) & M -> (A + B) & M
12569 and for - instead of + (or unary - instead of +)
12570 and/or ^ instead of |.
12571 If B is constant and (B & M) == 0, fold into A & M.
12573 This function is a helper for match.pd patterns. Return non-NULL
12574 type in which the simplified operation should be performed only
12575 if any optimization is possible.
12577 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12578 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12579 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12580 +/-. */
12581 tree
12582 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12583 tree arg00, enum tree_code code00, tree arg000, tree arg001,
12584 tree arg01, enum tree_code code01, tree arg010, tree arg011,
12585 tree *pmop)
12587 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12588 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12589 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12590 if (~cst1 == 0
12591 || (cst1 & (cst1 + 1)) != 0
12592 || !INTEGRAL_TYPE_P (type)
12593 || (!TYPE_OVERFLOW_WRAPS (type)
12594 && TREE_CODE (type) != INTEGER_TYPE)
12595 || (wi::max_value (type) & cst1) != cst1)
12596 return NULL_TREE;
12598 enum tree_code codes[2] = { code00, code01 };
12599 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12600 int which = 0;
12601 wide_int cst0;
12603 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12604 arg1 (M) is == (1LL << cst) - 1.
12605 Store C into PMOP[0] and D into PMOP[1]. */
12606 pmop[0] = arg00;
12607 pmop[1] = arg01;
12608 which = code != NEGATE_EXPR;
12610 for (; which >= 0; which--)
12611 switch (codes[which])
12613 case BIT_AND_EXPR:
12614 case BIT_IOR_EXPR:
12615 case BIT_XOR_EXPR:
12616 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12617 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12618 if (codes[which] == BIT_AND_EXPR)
12620 if (cst0 != cst1)
12621 break;
12623 else if (cst0 != 0)
12624 break;
12625 /* If C or D is of the form (A & N) where
12626 (N & M) == M, or of the form (A | N) or
12627 (A ^ N) where (N & M) == 0, replace it with A. */
12628 pmop[which] = arg0xx[2 * which];
12629 break;
12630 case ERROR_MARK:
12631 if (TREE_CODE (pmop[which]) != INTEGER_CST)
12632 break;
12633 /* If C or D is a N where (N & M) == 0, it can be
12634 omitted (replaced with 0). */
12635 if ((code == PLUS_EXPR
12636 || (code == MINUS_EXPR && which == 0))
12637 && (cst1 & wi::to_wide (pmop[which])) == 0)
12638 pmop[which] = build_int_cst (type, 0);
12639 /* Similarly, with C - N where (-N & M) == 0. */
12640 if (code == MINUS_EXPR
12641 && which == 1
12642 && (cst1 & -wi::to_wide (pmop[which])) == 0)
12643 pmop[which] = build_int_cst (type, 0);
12644 break;
12645 default:
12646 gcc_unreachable ();
12649 /* Only build anything new if we optimized one or both arguments above. */
12650 if (pmop[0] == arg00 && pmop[1] == arg01)
12651 return NULL_TREE;
12653 if (TYPE_OVERFLOW_WRAPS (type))
12654 return type;
12655 else
12656 return unsigned_type_for (type);
12659 /* Used by contains_label_[p1]. */
12661 struct contains_label_data
12663 hash_set<tree> *pset;
12664 bool inside_switch_p;
12667 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12668 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12669 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12671 static tree
12672 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12674 contains_label_data *d = (contains_label_data *) data;
12675 switch (TREE_CODE (*tp))
12677 case LABEL_EXPR:
12678 return *tp;
12680 case CASE_LABEL_EXPR:
12681 if (!d->inside_switch_p)
12682 return *tp;
12683 return NULL_TREE;
12685 case SWITCH_EXPR:
12686 if (!d->inside_switch_p)
12688 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12689 return *tp;
12690 d->inside_switch_p = true;
12691 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12692 return *tp;
12693 d->inside_switch_p = false;
12694 *walk_subtrees = 0;
12696 return NULL_TREE;
12698 case GOTO_EXPR:
12699 *walk_subtrees = 0;
12700 return NULL_TREE;
12702 default:
12703 return NULL_TREE;
12707 /* Return whether the sub-tree ST contains a label which is accessible from
12708 outside the sub-tree. */
12710 static bool
12711 contains_label_p (tree st)
12713 hash_set<tree> pset;
12714 contains_label_data data = { &pset, false };
12715 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12718 /* Fold a ternary expression of code CODE and type TYPE with operands
12719 OP0, OP1, and OP2. Return the folded expression if folding is
12720 successful. Otherwise, return NULL_TREE. */
12722 tree
12723 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12724 tree op0, tree op1, tree op2)
12726 tree tem;
12727 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12728 enum tree_code_class kind = TREE_CODE_CLASS (code);
12730 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12731 && TREE_CODE_LENGTH (code) == 3);
12733 /* If this is a commutative operation, and OP0 is a constant, move it
12734 to OP1 to reduce the number of tests below. */
12735 if (commutative_ternary_tree_code (code)
12736 && tree_swap_operands_p (op0, op1))
12737 return fold_build3_loc (loc, code, type, op1, op0, op2);
12739 tem = generic_simplify (loc, code, type, op0, op1, op2);
12740 if (tem)
12741 return tem;
12743 /* Strip any conversions that don't change the mode. This is safe
12744 for every expression, except for a comparison expression because
12745 its signedness is derived from its operands. So, in the latter
12746 case, only strip conversions that don't change the signedness.
12748 Note that this is done as an internal manipulation within the
12749 constant folder, in order to find the simplest representation of
12750 the arguments so that their form can be studied. In any cases,
12751 the appropriate type conversions should be put back in the tree
12752 that will get out of the constant folder. */
12753 if (op0)
12755 arg0 = op0;
12756 STRIP_NOPS (arg0);
12759 if (op1)
12761 arg1 = op1;
12762 STRIP_NOPS (arg1);
12765 if (op2)
12767 arg2 = op2;
12768 STRIP_NOPS (arg2);
12771 switch (code)
12773 case COMPONENT_REF:
12774 if (TREE_CODE (arg0) == CONSTRUCTOR
12775 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12777 unsigned HOST_WIDE_INT idx;
12778 tree field, value;
12779 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12780 if (field == arg1)
12781 return value;
12783 return NULL_TREE;
12785 case COND_EXPR:
12786 case VEC_COND_EXPR:
12787 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12788 so all simple results must be passed through pedantic_non_lvalue. */
12789 if (TREE_CODE (arg0) == INTEGER_CST)
12791 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12792 tem = integer_zerop (arg0) ? op2 : op1;
12793 /* Only optimize constant conditions when the selected branch
12794 has the same type as the COND_EXPR. This avoids optimizing
12795 away "c ? x : throw", where the throw has a void type.
12796 Avoid throwing away that operand which contains label. */
12797 if ((!TREE_SIDE_EFFECTS (unused_op)
12798 || !contains_label_p (unused_op))
12799 && (! VOID_TYPE_P (TREE_TYPE (tem))
12800 || VOID_TYPE_P (type)))
12801 return protected_set_expr_location_unshare (tem, loc);
12802 return NULL_TREE;
12804 else if (TREE_CODE (arg0) == VECTOR_CST)
12806 unsigned HOST_WIDE_INT nelts;
12807 if ((TREE_CODE (arg1) == VECTOR_CST
12808 || TREE_CODE (arg1) == CONSTRUCTOR)
12809 && (TREE_CODE (arg2) == VECTOR_CST
12810 || TREE_CODE (arg2) == CONSTRUCTOR)
12811 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12813 vec_perm_builder sel (nelts, nelts, 1);
12814 for (unsigned int i = 0; i < nelts; i++)
12816 tree val = VECTOR_CST_ELT (arg0, i);
12817 if (integer_all_onesp (val))
12818 sel.quick_push (i);
12819 else if (integer_zerop (val))
12820 sel.quick_push (nelts + i);
12821 else /* Currently unreachable. */
12822 return NULL_TREE;
12824 vec_perm_indices indices (sel, 2, nelts);
12825 tree t = fold_vec_perm (type, arg1, arg2, indices);
12826 if (t != NULL_TREE)
12827 return t;
12831 /* If we have A op B ? A : C, we may be able to convert this to a
12832 simpler expression, depending on the operation and the values
12833 of B and C. Signed zeros prevent all of these transformations,
12834 for reasons given above each one.
12836 Also try swapping the arguments and inverting the conditional. */
12837 if (COMPARISON_CLASS_P (arg0)
12838 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
12839 && !HONOR_SIGNED_ZEROS (op1))
12841 tem = fold_cond_expr_with_comparison (loc, type, TREE_CODE (arg0),
12842 TREE_OPERAND (arg0, 0),
12843 TREE_OPERAND (arg0, 1),
12844 op1, op2);
12845 if (tem)
12846 return tem;
12849 if (COMPARISON_CLASS_P (arg0)
12850 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
12851 && !HONOR_SIGNED_ZEROS (op2))
12853 enum tree_code comp_code = TREE_CODE (arg0);
12854 tree arg00 = TREE_OPERAND (arg0, 0);
12855 tree arg01 = TREE_OPERAND (arg0, 1);
12856 comp_code = invert_tree_comparison (comp_code, HONOR_NANS (arg00));
12857 if (comp_code != ERROR_MARK)
12858 tem = fold_cond_expr_with_comparison (loc, type, comp_code,
12859 arg00,
12860 arg01,
12861 op2, op1);
12862 if (tem)
12863 return tem;
12866 /* If the second operand is simpler than the third, swap them
12867 since that produces better jump optimization results. */
12868 if (truth_value_p (TREE_CODE (arg0))
12869 && tree_swap_operands_p (op1, op2))
12871 location_t loc0 = expr_location_or (arg0, loc);
12872 /* See if this can be inverted. If it can't, possibly because
12873 it was a floating-point inequality comparison, don't do
12874 anything. */
12875 tem = fold_invert_truthvalue (loc0, arg0);
12876 if (tem)
12877 return fold_build3_loc (loc, code, type, tem, op2, op1);
12880 /* Convert A ? 1 : 0 to simply A. */
12881 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12882 : (integer_onep (op1)
12883 && !VECTOR_TYPE_P (type)))
12884 && integer_zerop (op2)
12885 /* If we try to convert OP0 to our type, the
12886 call to fold will try to move the conversion inside
12887 a COND, which will recurse. In that case, the COND_EXPR
12888 is probably the best choice, so leave it alone. */
12889 && type == TREE_TYPE (arg0))
12890 return protected_set_expr_location_unshare (arg0, loc);
12892 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12893 over COND_EXPR in cases such as floating point comparisons. */
12894 if (integer_zerop (op1)
12895 && code == COND_EXPR
12896 && integer_onep (op2)
12897 && !VECTOR_TYPE_P (type)
12898 && truth_value_p (TREE_CODE (arg0)))
12899 return fold_convert_loc (loc, type,
12900 invert_truthvalue_loc (loc, arg0));
12902 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12903 if (TREE_CODE (arg0) == LT_EXPR
12904 && integer_zerop (TREE_OPERAND (arg0, 1))
12905 && integer_zerop (op2)
12906 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12908 /* sign_bit_p looks through both zero and sign extensions,
12909 but for this optimization only sign extensions are
12910 usable. */
12911 tree tem2 = TREE_OPERAND (arg0, 0);
12912 while (tem != tem2)
12914 if (TREE_CODE (tem2) != NOP_EXPR
12915 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12917 tem = NULL_TREE;
12918 break;
12920 tem2 = TREE_OPERAND (tem2, 0);
12922 /* sign_bit_p only checks ARG1 bits within A's precision.
12923 If <sign bit of A> has wider type than A, bits outside
12924 of A's precision in <sign bit of A> need to be checked.
12925 If they are all 0, this optimization needs to be done
12926 in unsigned A's type, if they are all 1 in signed A's type,
12927 otherwise this can't be done. */
12928 if (tem
12929 && TYPE_PRECISION (TREE_TYPE (tem))
12930 < TYPE_PRECISION (TREE_TYPE (arg1))
12931 && TYPE_PRECISION (TREE_TYPE (tem))
12932 < TYPE_PRECISION (type))
12934 int inner_width, outer_width;
12935 tree tem_type;
12937 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12938 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12939 if (outer_width > TYPE_PRECISION (type))
12940 outer_width = TYPE_PRECISION (type);
12942 wide_int mask = wi::shifted_mask
12943 (inner_width, outer_width - inner_width, false,
12944 TYPE_PRECISION (TREE_TYPE (arg1)));
12946 wide_int common = mask & wi::to_wide (arg1);
12947 if (common == mask)
12949 tem_type = signed_type_for (TREE_TYPE (tem));
12950 tem = fold_convert_loc (loc, tem_type, tem);
12952 else if (common == 0)
12954 tem_type = unsigned_type_for (TREE_TYPE (tem));
12955 tem = fold_convert_loc (loc, tem_type, tem);
12957 else
12958 tem = NULL;
12961 if (tem)
12962 return
12963 fold_convert_loc (loc, type,
12964 fold_build2_loc (loc, BIT_AND_EXPR,
12965 TREE_TYPE (tem), tem,
12966 fold_convert_loc (loc,
12967 TREE_TYPE (tem),
12968 arg1)));
12971 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12972 already handled above. */
12973 if (TREE_CODE (arg0) == BIT_AND_EXPR
12974 && integer_onep (TREE_OPERAND (arg0, 1))
12975 && integer_zerop (op2)
12976 && integer_pow2p (arg1))
12978 tree tem = TREE_OPERAND (arg0, 0);
12979 STRIP_NOPS (tem);
12980 if (TREE_CODE (tem) == RSHIFT_EXPR
12981 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
12982 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
12983 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
12984 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12985 fold_convert_loc (loc, type,
12986 TREE_OPERAND (tem, 0)),
12987 op1);
12990 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12991 is probably obsolete because the first operand should be a
12992 truth value (that's why we have the two cases above), but let's
12993 leave it in until we can confirm this for all front-ends. */
12994 if (integer_zerop (op2)
12995 && TREE_CODE (arg0) == NE_EXPR
12996 && integer_zerop (TREE_OPERAND (arg0, 1))
12997 && integer_pow2p (arg1)
12998 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12999 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13000 arg1, OEP_ONLY_CONST)
13001 /* operand_equal_p compares just value, not precision, so e.g.
13002 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
13003 second operand 32-bit -128, which is not a power of two (or vice
13004 versa. */
13005 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
13006 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
13008 /* Disable the transformations below for vectors, since
13009 fold_binary_op_with_conditional_arg may undo them immediately,
13010 yielding an infinite loop. */
13011 if (code == VEC_COND_EXPR)
13012 return NULL_TREE;
13014 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13015 if (integer_zerop (op2)
13016 && truth_value_p (TREE_CODE (arg0))
13017 && truth_value_p (TREE_CODE (arg1))
13018 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13019 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13020 : TRUTH_ANDIF_EXPR,
13021 type, fold_convert_loc (loc, type, arg0), op1);
13023 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13024 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13025 && truth_value_p (TREE_CODE (arg0))
13026 && truth_value_p (TREE_CODE (arg1))
13027 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13029 location_t loc0 = expr_location_or (arg0, loc);
13030 /* Only perform transformation if ARG0 is easily inverted. */
13031 tem = fold_invert_truthvalue (loc0, arg0);
13032 if (tem)
13033 return fold_build2_loc (loc, code == VEC_COND_EXPR
13034 ? BIT_IOR_EXPR
13035 : TRUTH_ORIF_EXPR,
13036 type, fold_convert_loc (loc, type, tem),
13037 op1);
13040 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13041 if (integer_zerop (arg1)
13042 && truth_value_p (TREE_CODE (arg0))
13043 && truth_value_p (TREE_CODE (op2))
13044 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13046 location_t loc0 = expr_location_or (arg0, loc);
13047 /* Only perform transformation if ARG0 is easily inverted. */
13048 tem = fold_invert_truthvalue (loc0, arg0);
13049 if (tem)
13050 return fold_build2_loc (loc, code == VEC_COND_EXPR
13051 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13052 type, fold_convert_loc (loc, type, tem),
13053 op2);
13056 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13057 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13058 && truth_value_p (TREE_CODE (arg0))
13059 && truth_value_p (TREE_CODE (op2))
13060 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13061 return fold_build2_loc (loc, code == VEC_COND_EXPR
13062 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13063 type, fold_convert_loc (loc, type, arg0), op2);
13065 return NULL_TREE;
13067 case CALL_EXPR:
13068 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13069 of fold_ternary on them. */
13070 gcc_unreachable ();
13072 case BIT_FIELD_REF:
13073 if (TREE_CODE (arg0) == VECTOR_CST
13074 && (type == TREE_TYPE (TREE_TYPE (arg0))
13075 || (VECTOR_TYPE_P (type)
13076 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
13077 && tree_fits_uhwi_p (op1)
13078 && tree_fits_uhwi_p (op2))
13080 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13081 unsigned HOST_WIDE_INT width
13082 = (TREE_CODE (eltype) == BOOLEAN_TYPE
13083 ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
13084 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13085 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13087 if (n != 0
13088 && (idx % width) == 0
13089 && (n % width) == 0
13090 && known_le ((idx + n) / width,
13091 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
13093 idx = idx / width;
13094 n = n / width;
13096 if (TREE_CODE (arg0) == VECTOR_CST)
13098 if (n == 1)
13100 tem = VECTOR_CST_ELT (arg0, idx);
13101 if (VECTOR_TYPE_P (type))
13102 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
13103 return tem;
13106 tree_vector_builder vals (type, n, 1);
13107 for (unsigned i = 0; i < n; ++i)
13108 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
13109 return vals.build ();
13114 /* On constants we can use native encode/interpret to constant
13115 fold (nearly) all BIT_FIELD_REFs. */
13116 if (CONSTANT_CLASS_P (arg0)
13117 && can_native_interpret_type_p (type)
13118 && BITS_PER_UNIT == 8
13119 && tree_fits_uhwi_p (op1)
13120 && tree_fits_uhwi_p (op2))
13122 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13123 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13124 /* Limit us to a reasonable amount of work. To relax the
13125 other limitations we need bit-shifting of the buffer
13126 and rounding up the size. */
13127 if (bitpos % BITS_PER_UNIT == 0
13128 && bitsize % BITS_PER_UNIT == 0
13129 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
13131 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
13132 unsigned HOST_WIDE_INT len
13133 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
13134 bitpos / BITS_PER_UNIT);
13135 if (len > 0
13136 && len * BITS_PER_UNIT >= bitsize)
13138 tree v = native_interpret_expr (type, b,
13139 bitsize / BITS_PER_UNIT);
13140 if (v)
13141 return v;
13146 return NULL_TREE;
13148 case VEC_PERM_EXPR:
13149 /* Perform constant folding of BIT_INSERT_EXPR. */
13150 if (TREE_CODE (arg2) == VECTOR_CST
13151 && TREE_CODE (op0) == VECTOR_CST
13152 && TREE_CODE (op1) == VECTOR_CST)
13154 /* Build a vector of integers from the tree mask. */
13155 vec_perm_builder builder;
13156 if (!tree_to_vec_perm_builder (&builder, arg2))
13157 return NULL_TREE;
13159 /* Create a vec_perm_indices for the integer vector. */
13160 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
13161 bool single_arg = (op0 == op1);
13162 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
13163 return fold_vec_perm (type, op0, op1, sel);
13165 return NULL_TREE;
13167 case BIT_INSERT_EXPR:
13168 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13169 if (TREE_CODE (arg0) == INTEGER_CST
13170 && TREE_CODE (arg1) == INTEGER_CST)
13172 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13173 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
13174 wide_int tem = (wi::to_wide (arg0)
13175 & wi::shifted_mask (bitpos, bitsize, true,
13176 TYPE_PRECISION (type)));
13177 wide_int tem2
13178 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
13179 bitsize), bitpos);
13180 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
13182 else if (TREE_CODE (arg0) == VECTOR_CST
13183 && CONSTANT_CLASS_P (arg1)
13184 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
13185 TREE_TYPE (arg1)))
13187 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13188 unsigned HOST_WIDE_INT elsize
13189 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
13190 if (bitpos % elsize == 0)
13192 unsigned k = bitpos / elsize;
13193 unsigned HOST_WIDE_INT nelts;
13194 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
13195 return arg0;
13196 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
13198 tree_vector_builder elts (type, nelts, 1);
13199 elts.quick_grow (nelts);
13200 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
13201 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
13202 return elts.build ();
13206 return NULL_TREE;
13208 default:
13209 return NULL_TREE;
13210 } /* switch (code) */
13213 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13214 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13215 constructor element index of the value returned. If the element is
13216 not found NULL_TREE is returned and *CTOR_IDX is updated to
13217 the index of the element after the ACCESS_INDEX position (which
13218 may be outside of the CTOR array). */
13220 tree
13221 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
13222 unsigned *ctor_idx)
13224 tree index_type = NULL_TREE;
13225 signop index_sgn = UNSIGNED;
13226 offset_int low_bound = 0;
13228 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
13230 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
13231 if (domain_type && TYPE_MIN_VALUE (domain_type))
13233 /* Static constructors for variably sized objects makes no sense. */
13234 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
13235 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
13236 /* ??? When it is obvious that the range is signed, treat it so. */
13237 if (TYPE_UNSIGNED (index_type)
13238 && TYPE_MAX_VALUE (domain_type)
13239 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
13240 TYPE_MIN_VALUE (domain_type)))
13242 index_sgn = SIGNED;
13243 low_bound
13244 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
13245 SIGNED);
13247 else
13249 index_sgn = TYPE_SIGN (index_type);
13250 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
13255 if (index_type)
13256 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
13257 index_sgn);
13259 offset_int index = low_bound;
13260 if (index_type)
13261 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13263 offset_int max_index = index;
13264 unsigned cnt;
13265 tree cfield, cval;
13266 bool first_p = true;
13268 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
13270 /* Array constructor might explicitly set index, or specify a range,
13271 or leave index NULL meaning that it is next index after previous
13272 one. */
13273 if (cfield)
13275 if (TREE_CODE (cfield) == INTEGER_CST)
13276 max_index = index
13277 = offset_int::from (wi::to_wide (cfield), index_sgn);
13278 else
13280 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
13281 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
13282 index_sgn);
13283 max_index
13284 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
13285 index_sgn);
13286 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
13289 else if (!first_p)
13291 index = max_index + 1;
13292 if (index_type)
13293 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13294 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
13295 max_index = index;
13297 else
13298 first_p = false;
13300 /* Do we have match? */
13301 if (wi::cmp (access_index, index, index_sgn) >= 0)
13303 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
13305 if (ctor_idx)
13306 *ctor_idx = cnt;
13307 return cval;
13310 else if (in_gimple_form)
13311 /* We're past the element we search for. Note during parsing
13312 the elements might not be sorted.
13313 ??? We should use a binary search and a flag on the
13314 CONSTRUCTOR as to whether elements are sorted in declaration
13315 order. */
13316 break;
13318 if (ctor_idx)
13319 *ctor_idx = cnt;
13320 return NULL_TREE;
13323 /* Perform constant folding and related simplification of EXPR.
13324 The related simplifications include x*1 => x, x*0 => 0, etc.,
13325 and application of the associative law.
13326 NOP_EXPR conversions may be removed freely (as long as we
13327 are careful not to change the type of the overall expression).
13328 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13329 but we can constant-fold them if they have constant operands. */
13331 #ifdef ENABLE_FOLD_CHECKING
13332 # define fold(x) fold_1 (x)
13333 static tree fold_1 (tree);
13334 static
13335 #endif
13336 tree
13337 fold (tree expr)
13339 const tree t = expr;
13340 enum tree_code code = TREE_CODE (t);
13341 enum tree_code_class kind = TREE_CODE_CLASS (code);
13342 tree tem;
13343 location_t loc = EXPR_LOCATION (expr);
13345 /* Return right away if a constant. */
13346 if (kind == tcc_constant)
13347 return t;
13349 /* CALL_EXPR-like objects with variable numbers of operands are
13350 treated specially. */
13351 if (kind == tcc_vl_exp)
13353 if (code == CALL_EXPR)
13355 tem = fold_call_expr (loc, expr, false);
13356 return tem ? tem : expr;
13358 return expr;
13361 if (IS_EXPR_CODE_CLASS (kind))
13363 tree type = TREE_TYPE (t);
13364 tree op0, op1, op2;
13366 switch (TREE_CODE_LENGTH (code))
13368 case 1:
13369 op0 = TREE_OPERAND (t, 0);
13370 tem = fold_unary_loc (loc, code, type, op0);
13371 return tem ? tem : expr;
13372 case 2:
13373 op0 = TREE_OPERAND (t, 0);
13374 op1 = TREE_OPERAND (t, 1);
13375 tem = fold_binary_loc (loc, code, type, op0, op1);
13376 return tem ? tem : expr;
13377 case 3:
13378 op0 = TREE_OPERAND (t, 0);
13379 op1 = TREE_OPERAND (t, 1);
13380 op2 = TREE_OPERAND (t, 2);
13381 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13382 return tem ? tem : expr;
13383 default:
13384 break;
13388 switch (code)
13390 case ARRAY_REF:
13392 tree op0 = TREE_OPERAND (t, 0);
13393 tree op1 = TREE_OPERAND (t, 1);
13395 if (TREE_CODE (op1) == INTEGER_CST
13396 && TREE_CODE (op0) == CONSTRUCTOR
13397 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13399 tree val = get_array_ctor_element_at_index (op0,
13400 wi::to_offset (op1));
13401 if (val)
13402 return val;
13405 return t;
13408 /* Return a VECTOR_CST if possible. */
13409 case CONSTRUCTOR:
13411 tree type = TREE_TYPE (t);
13412 if (TREE_CODE (type) != VECTOR_TYPE)
13413 return t;
13415 unsigned i;
13416 tree val;
13417 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
13418 if (! CONSTANT_CLASS_P (val))
13419 return t;
13421 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
13424 case CONST_DECL:
13425 return fold (DECL_INITIAL (t));
13427 default:
13428 return t;
13429 } /* switch (code) */
13432 #ifdef ENABLE_FOLD_CHECKING
13433 #undef fold
13435 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13436 hash_table<nofree_ptr_hash<const tree_node> > *);
13437 static void fold_check_failed (const_tree, const_tree);
13438 void print_fold_checksum (const_tree);
13440 /* When --enable-checking=fold, compute a digest of expr before
13441 and after actual fold call to see if fold did not accidentally
13442 change original expr. */
13444 tree
13445 fold (tree expr)
13447 tree ret;
13448 struct md5_ctx ctx;
13449 unsigned char checksum_before[16], checksum_after[16];
13450 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13452 md5_init_ctx (&ctx);
13453 fold_checksum_tree (expr, &ctx, &ht);
13454 md5_finish_ctx (&ctx, checksum_before);
13455 ht.empty ();
13457 ret = fold_1 (expr);
13459 md5_init_ctx (&ctx);
13460 fold_checksum_tree (expr, &ctx, &ht);
13461 md5_finish_ctx (&ctx, checksum_after);
13463 if (memcmp (checksum_before, checksum_after, 16))
13464 fold_check_failed (expr, ret);
13466 return ret;
13469 void
13470 print_fold_checksum (const_tree expr)
13472 struct md5_ctx ctx;
13473 unsigned char checksum[16], cnt;
13474 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13476 md5_init_ctx (&ctx);
13477 fold_checksum_tree (expr, &ctx, &ht);
13478 md5_finish_ctx (&ctx, checksum);
13479 for (cnt = 0; cnt < 16; ++cnt)
13480 fprintf (stderr, "%02x", checksum[cnt]);
13481 putc ('\n', stderr);
13484 static void
13485 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13487 internal_error ("fold check: original tree changed by fold");
13490 static void
13491 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13492 hash_table<nofree_ptr_hash <const tree_node> > *ht)
13494 const tree_node **slot;
13495 enum tree_code code;
13496 union tree_node *buf;
13497 int i, len;
13499 recursive_label:
13500 if (expr == NULL)
13501 return;
13502 slot = ht->find_slot (expr, INSERT);
13503 if (*slot != NULL)
13504 return;
13505 *slot = expr;
13506 code = TREE_CODE (expr);
13507 if (TREE_CODE_CLASS (code) == tcc_declaration
13508 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13510 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13511 size_t sz = tree_size (expr);
13512 buf = XALLOCAVAR (union tree_node, sz);
13513 memcpy ((char *) buf, expr, sz);
13514 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13515 buf->decl_with_vis.symtab_node = NULL;
13516 buf->base.nowarning_flag = 0;
13517 expr = (tree) buf;
13519 else if (TREE_CODE_CLASS (code) == tcc_type
13520 && (TYPE_POINTER_TO (expr)
13521 || TYPE_REFERENCE_TO (expr)
13522 || TYPE_CACHED_VALUES_P (expr)
13523 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13524 || TYPE_NEXT_VARIANT (expr)
13525 || TYPE_ALIAS_SET_KNOWN_P (expr)))
13527 /* Allow these fields to be modified. */
13528 tree tmp;
13529 size_t sz = tree_size (expr);
13530 buf = XALLOCAVAR (union tree_node, sz);
13531 memcpy ((char *) buf, expr, sz);
13532 expr = tmp = (tree) buf;
13533 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13534 TYPE_POINTER_TO (tmp) = NULL;
13535 TYPE_REFERENCE_TO (tmp) = NULL;
13536 TYPE_NEXT_VARIANT (tmp) = NULL;
13537 TYPE_ALIAS_SET (tmp) = -1;
13538 if (TYPE_CACHED_VALUES_P (tmp))
13540 TYPE_CACHED_VALUES_P (tmp) = 0;
13541 TYPE_CACHED_VALUES (tmp) = NULL;
13544 else if (warning_suppressed_p (expr) && (DECL_P (expr) || EXPR_P (expr)))
13546 /* Allow the no-warning bit to be set. Perhaps we shouldn't allow
13547 that and change builtins.c etc. instead - see PR89543. */
13548 size_t sz = tree_size (expr);
13549 buf = XALLOCAVAR (union tree_node, sz);
13550 memcpy ((char *) buf, expr, sz);
13551 buf->base.nowarning_flag = 0;
13552 expr = (tree) buf;
13554 md5_process_bytes (expr, tree_size (expr), ctx);
13555 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13556 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13557 if (TREE_CODE_CLASS (code) != tcc_type
13558 && TREE_CODE_CLASS (code) != tcc_declaration
13559 && code != TREE_LIST
13560 && code != SSA_NAME
13561 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13562 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13563 switch (TREE_CODE_CLASS (code))
13565 case tcc_constant:
13566 switch (code)
13568 case STRING_CST:
13569 md5_process_bytes (TREE_STRING_POINTER (expr),
13570 TREE_STRING_LENGTH (expr), ctx);
13571 break;
13572 case COMPLEX_CST:
13573 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13574 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13575 break;
13576 case VECTOR_CST:
13577 len = vector_cst_encoded_nelts (expr);
13578 for (i = 0; i < len; ++i)
13579 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13580 break;
13581 default:
13582 break;
13584 break;
13585 case tcc_exceptional:
13586 switch (code)
13588 case TREE_LIST:
13589 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13590 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13591 expr = TREE_CHAIN (expr);
13592 goto recursive_label;
13593 break;
13594 case TREE_VEC:
13595 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13596 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13597 break;
13598 default:
13599 break;
13601 break;
13602 case tcc_expression:
13603 case tcc_reference:
13604 case tcc_comparison:
13605 case tcc_unary:
13606 case tcc_binary:
13607 case tcc_statement:
13608 case tcc_vl_exp:
13609 len = TREE_OPERAND_LENGTH (expr);
13610 for (i = 0; i < len; ++i)
13611 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13612 break;
13613 case tcc_declaration:
13614 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13615 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13616 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13618 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13619 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13620 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13621 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13622 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13625 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13627 if (TREE_CODE (expr) == FUNCTION_DECL)
13629 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13630 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13632 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13634 break;
13635 case tcc_type:
13636 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13637 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13638 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13639 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13640 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13641 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13642 if (INTEGRAL_TYPE_P (expr)
13643 || SCALAR_FLOAT_TYPE_P (expr))
13645 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13646 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13648 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13649 if (TREE_CODE (expr) == RECORD_TYPE
13650 || TREE_CODE (expr) == UNION_TYPE
13651 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13652 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13653 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13654 break;
13655 default:
13656 break;
13660 /* Helper function for outputting the checksum of a tree T. When
13661 debugging with gdb, you can "define mynext" to be "next" followed
13662 by "call debug_fold_checksum (op0)", then just trace down till the
13663 outputs differ. */
13665 DEBUG_FUNCTION void
13666 debug_fold_checksum (const_tree t)
13668 int i;
13669 unsigned char checksum[16];
13670 struct md5_ctx ctx;
13671 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13673 md5_init_ctx (&ctx);
13674 fold_checksum_tree (t, &ctx, &ht);
13675 md5_finish_ctx (&ctx, checksum);
13676 ht.empty ();
13678 for (i = 0; i < 16; i++)
13679 fprintf (stderr, "%d ", checksum[i]);
13681 fprintf (stderr, "\n");
13684 #endif
13686 /* Fold a unary tree expression with code CODE of type TYPE with an
13687 operand OP0. LOC is the location of the resulting expression.
13688 Return a folded expression if successful. Otherwise, return a tree
13689 expression with code CODE of type TYPE with an operand OP0. */
13691 tree
13692 fold_build1_loc (location_t loc,
13693 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13695 tree tem;
13696 #ifdef ENABLE_FOLD_CHECKING
13697 unsigned char checksum_before[16], checksum_after[16];
13698 struct md5_ctx ctx;
13699 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13701 md5_init_ctx (&ctx);
13702 fold_checksum_tree (op0, &ctx, &ht);
13703 md5_finish_ctx (&ctx, checksum_before);
13704 ht.empty ();
13705 #endif
13707 tem = fold_unary_loc (loc, code, type, op0);
13708 if (!tem)
13709 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13711 #ifdef ENABLE_FOLD_CHECKING
13712 md5_init_ctx (&ctx);
13713 fold_checksum_tree (op0, &ctx, &ht);
13714 md5_finish_ctx (&ctx, checksum_after);
13716 if (memcmp (checksum_before, checksum_after, 16))
13717 fold_check_failed (op0, tem);
13718 #endif
13719 return tem;
13722 /* Fold a binary tree expression with code CODE of type TYPE with
13723 operands OP0 and OP1. LOC is the location of the resulting
13724 expression. Return a folded expression if successful. Otherwise,
13725 return a tree expression with code CODE of type TYPE with operands
13726 OP0 and OP1. */
13728 tree
13729 fold_build2_loc (location_t loc,
13730 enum tree_code code, tree type, tree op0, tree op1
13731 MEM_STAT_DECL)
13733 tree tem;
13734 #ifdef ENABLE_FOLD_CHECKING
13735 unsigned char checksum_before_op0[16],
13736 checksum_before_op1[16],
13737 checksum_after_op0[16],
13738 checksum_after_op1[16];
13739 struct md5_ctx ctx;
13740 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13742 md5_init_ctx (&ctx);
13743 fold_checksum_tree (op0, &ctx, &ht);
13744 md5_finish_ctx (&ctx, checksum_before_op0);
13745 ht.empty ();
13747 md5_init_ctx (&ctx);
13748 fold_checksum_tree (op1, &ctx, &ht);
13749 md5_finish_ctx (&ctx, checksum_before_op1);
13750 ht.empty ();
13751 #endif
13753 tem = fold_binary_loc (loc, code, type, op0, op1);
13754 if (!tem)
13755 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13757 #ifdef ENABLE_FOLD_CHECKING
13758 md5_init_ctx (&ctx);
13759 fold_checksum_tree (op0, &ctx, &ht);
13760 md5_finish_ctx (&ctx, checksum_after_op0);
13761 ht.empty ();
13763 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13764 fold_check_failed (op0, tem);
13766 md5_init_ctx (&ctx);
13767 fold_checksum_tree (op1, &ctx, &ht);
13768 md5_finish_ctx (&ctx, checksum_after_op1);
13770 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13771 fold_check_failed (op1, tem);
13772 #endif
13773 return tem;
13776 /* Fold a ternary tree expression with code CODE of type TYPE with
13777 operands OP0, OP1, and OP2. Return a folded expression if
13778 successful. Otherwise, return a tree expression with code CODE of
13779 type TYPE with operands OP0, OP1, and OP2. */
13781 tree
13782 fold_build3_loc (location_t loc, enum tree_code code, tree type,
13783 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13785 tree tem;
13786 #ifdef ENABLE_FOLD_CHECKING
13787 unsigned char checksum_before_op0[16],
13788 checksum_before_op1[16],
13789 checksum_before_op2[16],
13790 checksum_after_op0[16],
13791 checksum_after_op1[16],
13792 checksum_after_op2[16];
13793 struct md5_ctx ctx;
13794 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13796 md5_init_ctx (&ctx);
13797 fold_checksum_tree (op0, &ctx, &ht);
13798 md5_finish_ctx (&ctx, checksum_before_op0);
13799 ht.empty ();
13801 md5_init_ctx (&ctx);
13802 fold_checksum_tree (op1, &ctx, &ht);
13803 md5_finish_ctx (&ctx, checksum_before_op1);
13804 ht.empty ();
13806 md5_init_ctx (&ctx);
13807 fold_checksum_tree (op2, &ctx, &ht);
13808 md5_finish_ctx (&ctx, checksum_before_op2);
13809 ht.empty ();
13810 #endif
13812 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13813 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13814 if (!tem)
13815 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13817 #ifdef ENABLE_FOLD_CHECKING
13818 md5_init_ctx (&ctx);
13819 fold_checksum_tree (op0, &ctx, &ht);
13820 md5_finish_ctx (&ctx, checksum_after_op0);
13821 ht.empty ();
13823 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13824 fold_check_failed (op0, tem);
13826 md5_init_ctx (&ctx);
13827 fold_checksum_tree (op1, &ctx, &ht);
13828 md5_finish_ctx (&ctx, checksum_after_op1);
13829 ht.empty ();
13831 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13832 fold_check_failed (op1, tem);
13834 md5_init_ctx (&ctx);
13835 fold_checksum_tree (op2, &ctx, &ht);
13836 md5_finish_ctx (&ctx, checksum_after_op2);
13838 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13839 fold_check_failed (op2, tem);
13840 #endif
13841 return tem;
13844 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13845 arguments in ARGARRAY, and a null static chain.
13846 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13847 of type TYPE from the given operands as constructed by build_call_array. */
13849 tree
13850 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13851 int nargs, tree *argarray)
13853 tree tem;
13854 #ifdef ENABLE_FOLD_CHECKING
13855 unsigned char checksum_before_fn[16],
13856 checksum_before_arglist[16],
13857 checksum_after_fn[16],
13858 checksum_after_arglist[16];
13859 struct md5_ctx ctx;
13860 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13861 int i;
13863 md5_init_ctx (&ctx);
13864 fold_checksum_tree (fn, &ctx, &ht);
13865 md5_finish_ctx (&ctx, checksum_before_fn);
13866 ht.empty ();
13868 md5_init_ctx (&ctx);
13869 for (i = 0; i < nargs; i++)
13870 fold_checksum_tree (argarray[i], &ctx, &ht);
13871 md5_finish_ctx (&ctx, checksum_before_arglist);
13872 ht.empty ();
13873 #endif
13875 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13876 if (!tem)
13877 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13879 #ifdef ENABLE_FOLD_CHECKING
13880 md5_init_ctx (&ctx);
13881 fold_checksum_tree (fn, &ctx, &ht);
13882 md5_finish_ctx (&ctx, checksum_after_fn);
13883 ht.empty ();
13885 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13886 fold_check_failed (fn, tem);
13888 md5_init_ctx (&ctx);
13889 for (i = 0; i < nargs; i++)
13890 fold_checksum_tree (argarray[i], &ctx, &ht);
13891 md5_finish_ctx (&ctx, checksum_after_arglist);
13893 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13894 fold_check_failed (NULL_TREE, tem);
13895 #endif
13896 return tem;
13899 /* Perform constant folding and related simplification of initializer
13900 expression EXPR. These behave identically to "fold_buildN" but ignore
13901 potential run-time traps and exceptions that fold must preserve. */
13903 #define START_FOLD_INIT \
13904 int saved_signaling_nans = flag_signaling_nans;\
13905 int saved_trapping_math = flag_trapping_math;\
13906 int saved_rounding_math = flag_rounding_math;\
13907 int saved_trapv = flag_trapv;\
13908 int saved_folding_initializer = folding_initializer;\
13909 flag_signaling_nans = 0;\
13910 flag_trapping_math = 0;\
13911 flag_rounding_math = 0;\
13912 flag_trapv = 0;\
13913 folding_initializer = 1;
13915 #define END_FOLD_INIT \
13916 flag_signaling_nans = saved_signaling_nans;\
13917 flag_trapping_math = saved_trapping_math;\
13918 flag_rounding_math = saved_rounding_math;\
13919 flag_trapv = saved_trapv;\
13920 folding_initializer = saved_folding_initializer;
13922 tree
13923 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13924 tree type, tree op)
13926 tree result;
13927 START_FOLD_INIT;
13929 result = fold_build1_loc (loc, code, type, op);
13931 END_FOLD_INIT;
13932 return result;
13935 tree
13936 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13937 tree type, tree op0, tree op1)
13939 tree result;
13940 START_FOLD_INIT;
13942 result = fold_build2_loc (loc, code, type, op0, op1);
13944 END_FOLD_INIT;
13945 return result;
13948 tree
13949 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
13950 int nargs, tree *argarray)
13952 tree result;
13953 START_FOLD_INIT;
13955 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
13957 END_FOLD_INIT;
13958 return result;
13961 #undef START_FOLD_INIT
13962 #undef END_FOLD_INIT
13964 /* Determine if first argument is a multiple of second argument. Return 0 if
13965 it is not, or we cannot easily determined it to be.
13967 An example of the sort of thing we care about (at this point; this routine
13968 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13969 fold cases do now) is discovering that
13971 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13973 is a multiple of
13975 SAVE_EXPR (J * 8)
13977 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13979 This code also handles discovering that
13981 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13983 is a multiple of 8 so we don't have to worry about dealing with a
13984 possible remainder.
13986 Note that we *look* inside a SAVE_EXPR only to determine how it was
13987 calculated; it is not safe for fold to do much of anything else with the
13988 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13989 at run time. For example, the latter example above *cannot* be implemented
13990 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13991 evaluation time of the original SAVE_EXPR is not necessarily the same at
13992 the time the new expression is evaluated. The only optimization of this
13993 sort that would be valid is changing
13995 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13997 divided by 8 to
13999 SAVE_EXPR (I) * SAVE_EXPR (J)
14001 (where the same SAVE_EXPR (J) is used in the original and the
14002 transformed version). */
14005 multiple_of_p (tree type, const_tree top, const_tree bottom)
14007 gimple *stmt;
14008 tree t1, op1, op2;
14010 if (operand_equal_p (top, bottom, 0))
14011 return 1;
14013 if (TREE_CODE (type) != INTEGER_TYPE)
14014 return 0;
14016 switch (TREE_CODE (top))
14018 case BIT_AND_EXPR:
14019 /* Bitwise and provides a power of two multiple. If the mask is
14020 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14021 if (!integer_pow2p (bottom))
14022 return 0;
14023 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14024 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
14026 case MULT_EXPR:
14027 if (TREE_CODE (bottom) == INTEGER_CST)
14029 op1 = TREE_OPERAND (top, 0);
14030 op2 = TREE_OPERAND (top, 1);
14031 if (TREE_CODE (op1) == INTEGER_CST)
14032 std::swap (op1, op2);
14033 if (TREE_CODE (op2) == INTEGER_CST)
14035 if (multiple_of_p (type, op2, bottom))
14036 return 1;
14037 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
14038 if (multiple_of_p (type, bottom, op2))
14040 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
14041 wi::to_widest (op2));
14042 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
14044 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
14045 return multiple_of_p (type, op1, op2);
14048 return multiple_of_p (type, op1, bottom);
14051 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14052 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
14054 case MINUS_EXPR:
14055 /* It is impossible to prove if op0 - op1 is multiple of bottom
14056 precisely, so be conservative here checking if both op0 and op1
14057 are multiple of bottom. Note we check the second operand first
14058 since it's usually simpler. */
14059 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14060 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
14062 case PLUS_EXPR:
14063 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
14064 as op0 - 3 if the expression has unsigned type. For example,
14065 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
14066 op1 = TREE_OPERAND (top, 1);
14067 if (TYPE_UNSIGNED (type)
14068 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
14069 op1 = fold_build1 (NEGATE_EXPR, type, op1);
14070 return (multiple_of_p (type, op1, bottom)
14071 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
14073 case LSHIFT_EXPR:
14074 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14076 op1 = TREE_OPERAND (top, 1);
14077 /* const_binop may not detect overflow correctly,
14078 so check for it explicitly here. */
14079 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
14080 wi::to_wide (op1))
14081 && (t1 = fold_convert (type,
14082 const_binop (LSHIFT_EXPR, size_one_node,
14083 op1))) != 0
14084 && !TREE_OVERFLOW (t1))
14085 return multiple_of_p (type, t1, bottom);
14087 return 0;
14089 case NOP_EXPR:
14090 /* Can't handle conversions from non-integral or wider integral type. */
14091 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14092 || (TYPE_PRECISION (type)
14093 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14094 return 0;
14096 /* fall through */
14098 case SAVE_EXPR:
14099 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14101 case COND_EXPR:
14102 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14103 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14105 case INTEGER_CST:
14106 if (TREE_CODE (bottom) != INTEGER_CST
14107 || integer_zerop (bottom)
14108 || (TYPE_UNSIGNED (type)
14109 && (tree_int_cst_sgn (top) < 0
14110 || tree_int_cst_sgn (bottom) < 0)))
14111 return 0;
14112 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14113 SIGNED);
14115 case SSA_NAME:
14116 if (TREE_CODE (bottom) == INTEGER_CST
14117 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
14118 && gimple_code (stmt) == GIMPLE_ASSIGN)
14120 enum tree_code code = gimple_assign_rhs_code (stmt);
14122 /* Check for special cases to see if top is defined as multiple
14123 of bottom:
14125 top = (X & ~(bottom - 1) ; bottom is power of 2
14129 Y = X % bottom
14130 top = X - Y. */
14131 if (code == BIT_AND_EXPR
14132 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14133 && TREE_CODE (op2) == INTEGER_CST
14134 && integer_pow2p (bottom)
14135 && wi::multiple_of_p (wi::to_widest (op2),
14136 wi::to_widest (bottom), UNSIGNED))
14137 return 1;
14139 op1 = gimple_assign_rhs1 (stmt);
14140 if (code == MINUS_EXPR
14141 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14142 && TREE_CODE (op2) == SSA_NAME
14143 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
14144 && gimple_code (stmt) == GIMPLE_ASSIGN
14145 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
14146 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
14147 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
14148 return 1;
14151 /* fall through */
14153 default:
14154 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
14155 return multiple_p (wi::to_poly_widest (top),
14156 wi::to_poly_widest (bottom));
14158 return 0;
14162 /* Return true if expression X cannot be (or contain) a NaN or infinity.
14163 This function returns true for integer expressions, and returns
14164 false if uncertain. */
14166 bool
14167 tree_expr_finite_p (const_tree x)
14169 machine_mode mode = element_mode (x);
14170 if (!HONOR_NANS (mode) && !HONOR_INFINITIES (mode))
14171 return true;
14172 switch (TREE_CODE (x))
14174 case REAL_CST:
14175 return real_isfinite (TREE_REAL_CST_PTR (x));
14176 case COMPLEX_CST:
14177 return tree_expr_finite_p (TREE_REALPART (x))
14178 && tree_expr_finite_p (TREE_IMAGPART (x));
14179 case FLOAT_EXPR:
14180 return true;
14181 case ABS_EXPR:
14182 case CONVERT_EXPR:
14183 case NON_LVALUE_EXPR:
14184 case NEGATE_EXPR:
14185 case SAVE_EXPR:
14186 return tree_expr_finite_p (TREE_OPERAND (x, 0));
14187 case MIN_EXPR:
14188 case MAX_EXPR:
14189 return tree_expr_finite_p (TREE_OPERAND (x, 0))
14190 && tree_expr_finite_p (TREE_OPERAND (x, 1));
14191 case COND_EXPR:
14192 return tree_expr_finite_p (TREE_OPERAND (x, 1))
14193 && tree_expr_finite_p (TREE_OPERAND (x, 2));
14194 case CALL_EXPR:
14195 switch (get_call_combined_fn (x))
14197 CASE_CFN_FABS:
14198 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0));
14199 CASE_CFN_FMAX:
14200 CASE_CFN_FMIN:
14201 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0))
14202 && tree_expr_finite_p (CALL_EXPR_ARG (x, 1));
14203 default:
14204 return false;
14207 default:
14208 return false;
14212 /* Return true if expression X evaluates to an infinity.
14213 This function returns false for integer expressions. */
14215 bool
14216 tree_expr_infinite_p (const_tree x)
14218 if (!HONOR_INFINITIES (x))
14219 return false;
14220 switch (TREE_CODE (x))
14222 case REAL_CST:
14223 return real_isinf (TREE_REAL_CST_PTR (x));
14224 case ABS_EXPR:
14225 case NEGATE_EXPR:
14226 case NON_LVALUE_EXPR:
14227 case SAVE_EXPR:
14228 return tree_expr_infinite_p (TREE_OPERAND (x, 0));
14229 case COND_EXPR:
14230 return tree_expr_infinite_p (TREE_OPERAND (x, 1))
14231 && tree_expr_infinite_p (TREE_OPERAND (x, 2));
14232 default:
14233 return false;
14237 /* Return true if expression X could evaluate to an infinity.
14238 This function returns false for integer expressions, and returns
14239 true if uncertain. */
14241 bool
14242 tree_expr_maybe_infinite_p (const_tree x)
14244 if (!HONOR_INFINITIES (x))
14245 return false;
14246 switch (TREE_CODE (x))
14248 case REAL_CST:
14249 return real_isinf (TREE_REAL_CST_PTR (x));
14250 case FLOAT_EXPR:
14251 return false;
14252 case ABS_EXPR:
14253 case NEGATE_EXPR:
14254 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 0));
14255 case COND_EXPR:
14256 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 1))
14257 || tree_expr_maybe_infinite_p (TREE_OPERAND (x, 2));
14258 default:
14259 return true;
14263 /* Return true if expression X evaluates to a signaling NaN.
14264 This function returns false for integer expressions. */
14266 bool
14267 tree_expr_signaling_nan_p (const_tree x)
14269 if (!HONOR_SNANS (x))
14270 return false;
14271 switch (TREE_CODE (x))
14273 case REAL_CST:
14274 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14275 case NON_LVALUE_EXPR:
14276 case SAVE_EXPR:
14277 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 0));
14278 case COND_EXPR:
14279 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 1))
14280 && tree_expr_signaling_nan_p (TREE_OPERAND (x, 2));
14281 default:
14282 return false;
14286 /* Return true if expression X could evaluate to a signaling NaN.
14287 This function returns false for integer expressions, and returns
14288 true if uncertain. */
14290 bool
14291 tree_expr_maybe_signaling_nan_p (const_tree x)
14293 if (!HONOR_SNANS (x))
14294 return false;
14295 switch (TREE_CODE (x))
14297 case REAL_CST:
14298 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14299 case FLOAT_EXPR:
14300 return false;
14301 case ABS_EXPR:
14302 case CONVERT_EXPR:
14303 case NEGATE_EXPR:
14304 case NON_LVALUE_EXPR:
14305 case SAVE_EXPR:
14306 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0));
14307 case MIN_EXPR:
14308 case MAX_EXPR:
14309 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0))
14310 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1));
14311 case COND_EXPR:
14312 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1))
14313 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 2));
14314 case CALL_EXPR:
14315 switch (get_call_combined_fn (x))
14317 CASE_CFN_FABS:
14318 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0));
14319 CASE_CFN_FMAX:
14320 CASE_CFN_FMIN:
14321 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0))
14322 || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 1));
14323 default:
14324 return true;
14326 default:
14327 return true;
14331 /* Return true if expression X evaluates to a NaN.
14332 This function returns false for integer expressions. */
14334 bool
14335 tree_expr_nan_p (const_tree x)
14337 if (!HONOR_NANS (x))
14338 return false;
14339 switch (TREE_CODE (x))
14341 case REAL_CST:
14342 return real_isnan (TREE_REAL_CST_PTR (x));
14343 case NON_LVALUE_EXPR:
14344 case SAVE_EXPR:
14345 return tree_expr_nan_p (TREE_OPERAND (x, 0));
14346 case COND_EXPR:
14347 return tree_expr_nan_p (TREE_OPERAND (x, 1))
14348 && tree_expr_nan_p (TREE_OPERAND (x, 2));
14349 default:
14350 return false;
14354 /* Return true if expression X could evaluate to a NaN.
14355 This function returns false for integer expressions, and returns
14356 true if uncertain. */
14358 bool
14359 tree_expr_maybe_nan_p (const_tree x)
14361 if (!HONOR_NANS (x))
14362 return false;
14363 switch (TREE_CODE (x))
14365 case REAL_CST:
14366 return real_isnan (TREE_REAL_CST_PTR (x));
14367 case FLOAT_EXPR:
14368 return false;
14369 case PLUS_EXPR:
14370 case MINUS_EXPR:
14371 case MULT_EXPR:
14372 return !tree_expr_finite_p (TREE_OPERAND (x, 0))
14373 || !tree_expr_finite_p (TREE_OPERAND (x, 1));
14374 case ABS_EXPR:
14375 case CONVERT_EXPR:
14376 case NEGATE_EXPR:
14377 case NON_LVALUE_EXPR:
14378 case SAVE_EXPR:
14379 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0));
14380 case MIN_EXPR:
14381 case MAX_EXPR:
14382 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0))
14383 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 1));
14384 case COND_EXPR:
14385 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 1))
14386 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 2));
14387 case CALL_EXPR:
14388 switch (get_call_combined_fn (x))
14390 CASE_CFN_FABS:
14391 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0));
14392 CASE_CFN_FMAX:
14393 CASE_CFN_FMIN:
14394 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0))
14395 || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 1));
14396 default:
14397 return true;
14399 default:
14400 return true;
14404 /* Return true if expression X could evaluate to -0.0.
14405 This function returns true if uncertain. */
14407 bool
14408 tree_expr_maybe_real_minus_zero_p (const_tree x)
14410 if (!HONOR_SIGNED_ZEROS (x))
14411 return false;
14412 switch (TREE_CODE (x))
14414 case REAL_CST:
14415 return REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (x));
14416 case INTEGER_CST:
14417 case FLOAT_EXPR:
14418 case ABS_EXPR:
14419 return false;
14420 case NON_LVALUE_EXPR:
14421 case SAVE_EXPR:
14422 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 0));
14423 case COND_EXPR:
14424 return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 1))
14425 || tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 2));
14426 case CALL_EXPR:
14427 switch (get_call_combined_fn (x))
14429 CASE_CFN_FABS:
14430 return false;
14431 default:
14432 break;
14434 default:
14435 break;
14437 /* Ideally !(tree_expr_nonzero_p (X) || tree_expr_nonnegative_p (X))
14438 * but currently those predicates require tree and not const_tree. */
14439 return true;
14442 #define tree_expr_nonnegative_warnv_p(X, Y) \
14443 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14445 #define RECURSE(X) \
14446 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14448 /* Return true if CODE or TYPE is known to be non-negative. */
14450 static bool
14451 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14453 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14454 && truth_value_p (code))
14455 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14456 have a signed:1 type (where the value is -1 and 0). */
14457 return true;
14458 return false;
14461 /* Return true if (CODE OP0) is known to be non-negative. If the return
14462 value is based on the assumption that signed overflow is undefined,
14463 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14464 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14466 bool
14467 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14468 bool *strict_overflow_p, int depth)
14470 if (TYPE_UNSIGNED (type))
14471 return true;
14473 switch (code)
14475 case ABS_EXPR:
14476 /* We can't return 1 if flag_wrapv is set because
14477 ABS_EXPR<INT_MIN> = INT_MIN. */
14478 if (!ANY_INTEGRAL_TYPE_P (type))
14479 return true;
14480 if (TYPE_OVERFLOW_UNDEFINED (type))
14482 *strict_overflow_p = true;
14483 return true;
14485 break;
14487 case NON_LVALUE_EXPR:
14488 case FLOAT_EXPR:
14489 case FIX_TRUNC_EXPR:
14490 return RECURSE (op0);
14492 CASE_CONVERT:
14494 tree inner_type = TREE_TYPE (op0);
14495 tree outer_type = type;
14497 if (TREE_CODE (outer_type) == REAL_TYPE)
14499 if (TREE_CODE (inner_type) == REAL_TYPE)
14500 return RECURSE (op0);
14501 if (INTEGRAL_TYPE_P (inner_type))
14503 if (TYPE_UNSIGNED (inner_type))
14504 return true;
14505 return RECURSE (op0);
14508 else if (INTEGRAL_TYPE_P (outer_type))
14510 if (TREE_CODE (inner_type) == REAL_TYPE)
14511 return RECURSE (op0);
14512 if (INTEGRAL_TYPE_P (inner_type))
14513 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14514 && TYPE_UNSIGNED (inner_type);
14517 break;
14519 default:
14520 return tree_simple_nonnegative_warnv_p (code, type);
14523 /* We don't know sign of `t', so be conservative and return false. */
14524 return false;
14527 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14528 value is based on the assumption that signed overflow is undefined,
14529 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14530 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14532 bool
14533 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14534 tree op1, bool *strict_overflow_p,
14535 int depth)
14537 if (TYPE_UNSIGNED (type))
14538 return true;
14540 switch (code)
14542 case POINTER_PLUS_EXPR:
14543 case PLUS_EXPR:
14544 if (FLOAT_TYPE_P (type))
14545 return RECURSE (op0) && RECURSE (op1);
14547 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14548 both unsigned and at least 2 bits shorter than the result. */
14549 if (TREE_CODE (type) == INTEGER_TYPE
14550 && TREE_CODE (op0) == NOP_EXPR
14551 && TREE_CODE (op1) == NOP_EXPR)
14553 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14554 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14555 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14556 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14558 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14559 TYPE_PRECISION (inner2)) + 1;
14560 return prec < TYPE_PRECISION (type);
14563 break;
14565 case MULT_EXPR:
14566 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14568 /* x * x is always non-negative for floating point x
14569 or without overflow. */
14570 if (operand_equal_p (op0, op1, 0)
14571 || (RECURSE (op0) && RECURSE (op1)))
14573 if (ANY_INTEGRAL_TYPE_P (type)
14574 && TYPE_OVERFLOW_UNDEFINED (type))
14575 *strict_overflow_p = true;
14576 return true;
14580 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14581 both unsigned and their total bits is shorter than the result. */
14582 if (TREE_CODE (type) == INTEGER_TYPE
14583 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14584 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14586 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14587 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14588 : TREE_TYPE (op0);
14589 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14590 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14591 : TREE_TYPE (op1);
14593 bool unsigned0 = TYPE_UNSIGNED (inner0);
14594 bool unsigned1 = TYPE_UNSIGNED (inner1);
14596 if (TREE_CODE (op0) == INTEGER_CST)
14597 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14599 if (TREE_CODE (op1) == INTEGER_CST)
14600 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14602 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14603 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14605 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14606 ? tree_int_cst_min_precision (op0, UNSIGNED)
14607 : TYPE_PRECISION (inner0);
14609 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14610 ? tree_int_cst_min_precision (op1, UNSIGNED)
14611 : TYPE_PRECISION (inner1);
14613 return precision0 + precision1 < TYPE_PRECISION (type);
14616 return false;
14618 case BIT_AND_EXPR:
14619 return RECURSE (op0) || RECURSE (op1);
14621 case MAX_EXPR:
14622 /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
14623 things. */
14624 if (tree_expr_maybe_nan_p (op0) || tree_expr_maybe_nan_p (op1))
14625 return RECURSE (op0) && RECURSE (op1);
14626 return RECURSE (op0) || RECURSE (op1);
14628 case BIT_IOR_EXPR:
14629 case BIT_XOR_EXPR:
14630 case MIN_EXPR:
14631 case RDIV_EXPR:
14632 case TRUNC_DIV_EXPR:
14633 case CEIL_DIV_EXPR:
14634 case FLOOR_DIV_EXPR:
14635 case ROUND_DIV_EXPR:
14636 return RECURSE (op0) && RECURSE (op1);
14638 case TRUNC_MOD_EXPR:
14639 return RECURSE (op0);
14641 case FLOOR_MOD_EXPR:
14642 return RECURSE (op1);
14644 case CEIL_MOD_EXPR:
14645 case ROUND_MOD_EXPR:
14646 default:
14647 return tree_simple_nonnegative_warnv_p (code, type);
14650 /* We don't know sign of `t', so be conservative and return false. */
14651 return false;
14654 /* Return true if T is known to be non-negative. If the return
14655 value is based on the assumption that signed overflow is undefined,
14656 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14657 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14659 bool
14660 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14662 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14663 return true;
14665 switch (TREE_CODE (t))
14667 case INTEGER_CST:
14668 return tree_int_cst_sgn (t) >= 0;
14670 case REAL_CST:
14671 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14673 case FIXED_CST:
14674 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14676 case COND_EXPR:
14677 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14679 case SSA_NAME:
14680 /* Limit the depth of recursion to avoid quadratic behavior.
14681 This is expected to catch almost all occurrences in practice.
14682 If this code misses important cases that unbounded recursion
14683 would not, passes that need this information could be revised
14684 to provide it through dataflow propagation. */
14685 return (!name_registered_for_update_p (t)
14686 && depth < param_max_ssa_name_query_depth
14687 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
14688 strict_overflow_p, depth));
14690 default:
14691 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14695 /* Return true if T is known to be non-negative. If the return
14696 value is based on the assumption that signed overflow is undefined,
14697 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14698 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14700 bool
14701 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
14702 bool *strict_overflow_p, int depth)
14704 switch (fn)
14706 CASE_CFN_ACOS:
14707 CASE_CFN_ACOSH:
14708 CASE_CFN_CABS:
14709 CASE_CFN_COSH:
14710 CASE_CFN_ERFC:
14711 CASE_CFN_EXP:
14712 CASE_CFN_EXP10:
14713 CASE_CFN_EXP2:
14714 CASE_CFN_FABS:
14715 CASE_CFN_FDIM:
14716 CASE_CFN_HYPOT:
14717 CASE_CFN_POW10:
14718 CASE_CFN_FFS:
14719 CASE_CFN_PARITY:
14720 CASE_CFN_POPCOUNT:
14721 CASE_CFN_CLZ:
14722 CASE_CFN_CLRSB:
14723 case CFN_BUILT_IN_BSWAP16:
14724 case CFN_BUILT_IN_BSWAP32:
14725 case CFN_BUILT_IN_BSWAP64:
14726 case CFN_BUILT_IN_BSWAP128:
14727 /* Always true. */
14728 return true;
14730 CASE_CFN_SQRT:
14731 CASE_CFN_SQRT_FN:
14732 /* sqrt(-0.0) is -0.0. */
14733 if (!HONOR_SIGNED_ZEROS (type))
14734 return true;
14735 return RECURSE (arg0);
14737 CASE_CFN_ASINH:
14738 CASE_CFN_ATAN:
14739 CASE_CFN_ATANH:
14740 CASE_CFN_CBRT:
14741 CASE_CFN_CEIL:
14742 CASE_CFN_CEIL_FN:
14743 CASE_CFN_ERF:
14744 CASE_CFN_EXPM1:
14745 CASE_CFN_FLOOR:
14746 CASE_CFN_FLOOR_FN:
14747 CASE_CFN_FMOD:
14748 CASE_CFN_FREXP:
14749 CASE_CFN_ICEIL:
14750 CASE_CFN_IFLOOR:
14751 CASE_CFN_IRINT:
14752 CASE_CFN_IROUND:
14753 CASE_CFN_LCEIL:
14754 CASE_CFN_LDEXP:
14755 CASE_CFN_LFLOOR:
14756 CASE_CFN_LLCEIL:
14757 CASE_CFN_LLFLOOR:
14758 CASE_CFN_LLRINT:
14759 CASE_CFN_LLROUND:
14760 CASE_CFN_LRINT:
14761 CASE_CFN_LROUND:
14762 CASE_CFN_MODF:
14763 CASE_CFN_NEARBYINT:
14764 CASE_CFN_NEARBYINT_FN:
14765 CASE_CFN_RINT:
14766 CASE_CFN_RINT_FN:
14767 CASE_CFN_ROUND:
14768 CASE_CFN_ROUND_FN:
14769 CASE_CFN_ROUNDEVEN:
14770 CASE_CFN_ROUNDEVEN_FN:
14771 CASE_CFN_SCALB:
14772 CASE_CFN_SCALBLN:
14773 CASE_CFN_SCALBN:
14774 CASE_CFN_SIGNBIT:
14775 CASE_CFN_SIGNIFICAND:
14776 CASE_CFN_SINH:
14777 CASE_CFN_TANH:
14778 CASE_CFN_TRUNC:
14779 CASE_CFN_TRUNC_FN:
14780 /* True if the 1st argument is nonnegative. */
14781 return RECURSE (arg0);
14783 CASE_CFN_FMAX:
14784 CASE_CFN_FMAX_FN:
14785 /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
14786 things. In the presence of sNaNs, we're only guaranteed to be
14787 non-negative if both operands are non-negative. In the presence
14788 of qNaNs, we're non-negative if either operand is non-negative
14789 and can't be a qNaN, or if both operands are non-negative. */
14790 if (tree_expr_maybe_signaling_nan_p (arg0) ||
14791 tree_expr_maybe_signaling_nan_p (arg1))
14792 return RECURSE (arg0) && RECURSE (arg1);
14793 return RECURSE (arg0) ? (!tree_expr_maybe_nan_p (arg0)
14794 || RECURSE (arg1))
14795 : (RECURSE (arg1)
14796 && !tree_expr_maybe_nan_p (arg1));
14798 CASE_CFN_FMIN:
14799 CASE_CFN_FMIN_FN:
14800 /* True if the 1st AND 2nd arguments are nonnegative. */
14801 return RECURSE (arg0) && RECURSE (arg1);
14803 CASE_CFN_COPYSIGN:
14804 CASE_CFN_COPYSIGN_FN:
14805 /* True if the 2nd argument is nonnegative. */
14806 return RECURSE (arg1);
14808 CASE_CFN_POWI:
14809 /* True if the 1st argument is nonnegative or the second
14810 argument is an even integer. */
14811 if (TREE_CODE (arg1) == INTEGER_CST
14812 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14813 return true;
14814 return RECURSE (arg0);
14816 CASE_CFN_POW:
14817 /* True if the 1st argument is nonnegative or the second
14818 argument is an even integer valued real. */
14819 if (TREE_CODE (arg1) == REAL_CST)
14821 REAL_VALUE_TYPE c;
14822 HOST_WIDE_INT n;
14824 c = TREE_REAL_CST (arg1);
14825 n = real_to_integer (&c);
14826 if ((n & 1) == 0)
14828 REAL_VALUE_TYPE cint;
14829 real_from_integer (&cint, VOIDmode, n, SIGNED);
14830 if (real_identical (&c, &cint))
14831 return true;
14834 return RECURSE (arg0);
14836 default:
14837 break;
14839 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
14842 /* Return true if T is known to be non-negative. If the return
14843 value is based on the assumption that signed overflow is undefined,
14844 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14845 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14847 static bool
14848 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14850 enum tree_code code = TREE_CODE (t);
14851 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14852 return true;
14854 switch (code)
14856 case TARGET_EXPR:
14858 tree temp = TARGET_EXPR_SLOT (t);
14859 t = TARGET_EXPR_INITIAL (t);
14861 /* If the initializer is non-void, then it's a normal expression
14862 that will be assigned to the slot. */
14863 if (!VOID_TYPE_P (t))
14864 return RECURSE (t);
14866 /* Otherwise, the initializer sets the slot in some way. One common
14867 way is an assignment statement at the end of the initializer. */
14868 while (1)
14870 if (TREE_CODE (t) == BIND_EXPR)
14871 t = expr_last (BIND_EXPR_BODY (t));
14872 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14873 || TREE_CODE (t) == TRY_CATCH_EXPR)
14874 t = expr_last (TREE_OPERAND (t, 0));
14875 else if (TREE_CODE (t) == STATEMENT_LIST)
14876 t = expr_last (t);
14877 else
14878 break;
14880 if (TREE_CODE (t) == MODIFY_EXPR
14881 && TREE_OPERAND (t, 0) == temp)
14882 return RECURSE (TREE_OPERAND (t, 1));
14884 return false;
14887 case CALL_EXPR:
14889 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14890 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14892 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14893 get_call_combined_fn (t),
14894 arg0,
14895 arg1,
14896 strict_overflow_p, depth);
14898 case COMPOUND_EXPR:
14899 case MODIFY_EXPR:
14900 return RECURSE (TREE_OPERAND (t, 1));
14902 case BIND_EXPR:
14903 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
14905 case SAVE_EXPR:
14906 return RECURSE (TREE_OPERAND (t, 0));
14908 default:
14909 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14913 #undef RECURSE
14914 #undef tree_expr_nonnegative_warnv_p
14916 /* Return true if T is known to be non-negative. If the return
14917 value is based on the assumption that signed overflow is undefined,
14918 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14919 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14921 bool
14922 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14924 enum tree_code code;
14925 if (t == error_mark_node)
14926 return false;
14928 code = TREE_CODE (t);
14929 switch (TREE_CODE_CLASS (code))
14931 case tcc_binary:
14932 case tcc_comparison:
14933 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14934 TREE_TYPE (t),
14935 TREE_OPERAND (t, 0),
14936 TREE_OPERAND (t, 1),
14937 strict_overflow_p, depth);
14939 case tcc_unary:
14940 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14941 TREE_TYPE (t),
14942 TREE_OPERAND (t, 0),
14943 strict_overflow_p, depth);
14945 case tcc_constant:
14946 case tcc_declaration:
14947 case tcc_reference:
14948 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14950 default:
14951 break;
14954 switch (code)
14956 case TRUTH_AND_EXPR:
14957 case TRUTH_OR_EXPR:
14958 case TRUTH_XOR_EXPR:
14959 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14960 TREE_TYPE (t),
14961 TREE_OPERAND (t, 0),
14962 TREE_OPERAND (t, 1),
14963 strict_overflow_p, depth);
14964 case TRUTH_NOT_EXPR:
14965 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14966 TREE_TYPE (t),
14967 TREE_OPERAND (t, 0),
14968 strict_overflow_p, depth);
14970 case COND_EXPR:
14971 case CONSTRUCTOR:
14972 case OBJ_TYPE_REF:
14973 case ASSERT_EXPR:
14974 case ADDR_EXPR:
14975 case WITH_SIZE_EXPR:
14976 case SSA_NAME:
14977 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14979 default:
14980 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
14984 /* Return true if `t' is known to be non-negative. Handle warnings
14985 about undefined signed overflow. */
14987 bool
14988 tree_expr_nonnegative_p (tree t)
14990 bool ret, strict_overflow_p;
14992 strict_overflow_p = false;
14993 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14994 if (strict_overflow_p)
14995 fold_overflow_warning (("assuming signed overflow does not occur when "
14996 "determining that expression is always "
14997 "non-negative"),
14998 WARN_STRICT_OVERFLOW_MISC);
14999 return ret;
15003 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15004 For floating point we further ensure that T is not denormal.
15005 Similar logic is present in nonzero_address in rtlanal.h.
15007 If the return value is based on the assumption that signed overflow
15008 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15009 change *STRICT_OVERFLOW_P. */
15011 bool
15012 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15013 bool *strict_overflow_p)
15015 switch (code)
15017 case ABS_EXPR:
15018 return tree_expr_nonzero_warnv_p (op0,
15019 strict_overflow_p);
15021 case NOP_EXPR:
15023 tree inner_type = TREE_TYPE (op0);
15024 tree outer_type = type;
15026 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15027 && tree_expr_nonzero_warnv_p (op0,
15028 strict_overflow_p));
15030 break;
15032 case NON_LVALUE_EXPR:
15033 return tree_expr_nonzero_warnv_p (op0,
15034 strict_overflow_p);
15036 default:
15037 break;
15040 return false;
15043 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15044 For floating point we further ensure that T is not denormal.
15045 Similar logic is present in nonzero_address in rtlanal.h.
15047 If the return value is based on the assumption that signed overflow
15048 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15049 change *STRICT_OVERFLOW_P. */
15051 bool
15052 tree_binary_nonzero_warnv_p (enum tree_code code,
15053 tree type,
15054 tree op0,
15055 tree op1, bool *strict_overflow_p)
15057 bool sub_strict_overflow_p;
15058 switch (code)
15060 case POINTER_PLUS_EXPR:
15061 case PLUS_EXPR:
15062 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15064 /* With the presence of negative values it is hard
15065 to say something. */
15066 sub_strict_overflow_p = false;
15067 if (!tree_expr_nonnegative_warnv_p (op0,
15068 &sub_strict_overflow_p)
15069 || !tree_expr_nonnegative_warnv_p (op1,
15070 &sub_strict_overflow_p))
15071 return false;
15072 /* One of operands must be positive and the other non-negative. */
15073 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15074 overflows, on a twos-complement machine the sum of two
15075 nonnegative numbers can never be zero. */
15076 return (tree_expr_nonzero_warnv_p (op0,
15077 strict_overflow_p)
15078 || tree_expr_nonzero_warnv_p (op1,
15079 strict_overflow_p));
15081 break;
15083 case MULT_EXPR:
15084 if (TYPE_OVERFLOW_UNDEFINED (type))
15086 if (tree_expr_nonzero_warnv_p (op0,
15087 strict_overflow_p)
15088 && tree_expr_nonzero_warnv_p (op1,
15089 strict_overflow_p))
15091 *strict_overflow_p = true;
15092 return true;
15095 break;
15097 case MIN_EXPR:
15098 sub_strict_overflow_p = false;
15099 if (tree_expr_nonzero_warnv_p (op0,
15100 &sub_strict_overflow_p)
15101 && tree_expr_nonzero_warnv_p (op1,
15102 &sub_strict_overflow_p))
15104 if (sub_strict_overflow_p)
15105 *strict_overflow_p = true;
15107 break;
15109 case MAX_EXPR:
15110 sub_strict_overflow_p = false;
15111 if (tree_expr_nonzero_warnv_p (op0,
15112 &sub_strict_overflow_p))
15114 if (sub_strict_overflow_p)
15115 *strict_overflow_p = true;
15117 /* When both operands are nonzero, then MAX must be too. */
15118 if (tree_expr_nonzero_warnv_p (op1,
15119 strict_overflow_p))
15120 return true;
15122 /* MAX where operand 0 is positive is positive. */
15123 return tree_expr_nonnegative_warnv_p (op0,
15124 strict_overflow_p);
15126 /* MAX where operand 1 is positive is positive. */
15127 else if (tree_expr_nonzero_warnv_p (op1,
15128 &sub_strict_overflow_p)
15129 && tree_expr_nonnegative_warnv_p (op1,
15130 &sub_strict_overflow_p))
15132 if (sub_strict_overflow_p)
15133 *strict_overflow_p = true;
15134 return true;
15136 break;
15138 case BIT_IOR_EXPR:
15139 return (tree_expr_nonzero_warnv_p (op1,
15140 strict_overflow_p)
15141 || tree_expr_nonzero_warnv_p (op0,
15142 strict_overflow_p));
15144 default:
15145 break;
15148 return false;
15151 /* Return true when T is an address and is known to be nonzero.
15152 For floating point we further ensure that T is not denormal.
15153 Similar logic is present in nonzero_address in rtlanal.h.
15155 If the return value is based on the assumption that signed overflow
15156 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15157 change *STRICT_OVERFLOW_P. */
15159 bool
15160 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15162 bool sub_strict_overflow_p;
15163 switch (TREE_CODE (t))
15165 case INTEGER_CST:
15166 return !integer_zerop (t);
15168 case ADDR_EXPR:
15170 tree base = TREE_OPERAND (t, 0);
15172 if (!DECL_P (base))
15173 base = get_base_address (base);
15175 if (base && TREE_CODE (base) == TARGET_EXPR)
15176 base = TARGET_EXPR_SLOT (base);
15178 if (!base)
15179 return false;
15181 /* For objects in symbol table check if we know they are non-zero.
15182 Don't do anything for variables and functions before symtab is built;
15183 it is quite possible that they will be declared weak later. */
15184 int nonzero_addr = maybe_nonzero_address (base);
15185 if (nonzero_addr >= 0)
15186 return nonzero_addr;
15188 /* Constants are never weak. */
15189 if (CONSTANT_CLASS_P (base))
15190 return true;
15192 return false;
15195 case COND_EXPR:
15196 sub_strict_overflow_p = false;
15197 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15198 &sub_strict_overflow_p)
15199 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15200 &sub_strict_overflow_p))
15202 if (sub_strict_overflow_p)
15203 *strict_overflow_p = true;
15204 return true;
15206 break;
15208 case SSA_NAME:
15209 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
15210 break;
15211 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
15213 default:
15214 break;
15216 return false;
15219 #define integer_valued_real_p(X) \
15220 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15222 #define RECURSE(X) \
15223 ((integer_valued_real_p) (X, depth + 1))
15225 /* Return true if the floating point result of (CODE OP0) has an
15226 integer value. We also allow +Inf, -Inf and NaN to be considered
15227 integer values. Return false for signaling NaN.
15229 DEPTH is the current nesting depth of the query. */
15231 bool
15232 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
15234 switch (code)
15236 case FLOAT_EXPR:
15237 return true;
15239 case ABS_EXPR:
15240 return RECURSE (op0);
15242 CASE_CONVERT:
15244 tree type = TREE_TYPE (op0);
15245 if (TREE_CODE (type) == INTEGER_TYPE)
15246 return true;
15247 if (TREE_CODE (type) == REAL_TYPE)
15248 return RECURSE (op0);
15249 break;
15252 default:
15253 break;
15255 return false;
15258 /* Return true if the floating point result of (CODE OP0 OP1) has an
15259 integer value. We also allow +Inf, -Inf and NaN to be considered
15260 integer values. Return false for signaling NaN.
15262 DEPTH is the current nesting depth of the query. */
15264 bool
15265 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
15267 switch (code)
15269 case PLUS_EXPR:
15270 case MINUS_EXPR:
15271 case MULT_EXPR:
15272 case MIN_EXPR:
15273 case MAX_EXPR:
15274 return RECURSE (op0) && RECURSE (op1);
15276 default:
15277 break;
15279 return false;
15282 /* Return true if the floating point result of calling FNDECL with arguments
15283 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15284 considered integer values. Return false for signaling NaN. If FNDECL
15285 takes fewer than 2 arguments, the remaining ARGn are null.
15287 DEPTH is the current nesting depth of the query. */
15289 bool
15290 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
15292 switch (fn)
15294 CASE_CFN_CEIL:
15295 CASE_CFN_CEIL_FN:
15296 CASE_CFN_FLOOR:
15297 CASE_CFN_FLOOR_FN:
15298 CASE_CFN_NEARBYINT:
15299 CASE_CFN_NEARBYINT_FN:
15300 CASE_CFN_RINT:
15301 CASE_CFN_RINT_FN:
15302 CASE_CFN_ROUND:
15303 CASE_CFN_ROUND_FN:
15304 CASE_CFN_ROUNDEVEN:
15305 CASE_CFN_ROUNDEVEN_FN:
15306 CASE_CFN_TRUNC:
15307 CASE_CFN_TRUNC_FN:
15308 return true;
15310 CASE_CFN_FMIN:
15311 CASE_CFN_FMIN_FN:
15312 CASE_CFN_FMAX:
15313 CASE_CFN_FMAX_FN:
15314 return RECURSE (arg0) && RECURSE (arg1);
15316 default:
15317 break;
15319 return false;
15322 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15323 has an integer value. We also allow +Inf, -Inf and NaN to be
15324 considered integer values. Return false for signaling NaN.
15326 DEPTH is the current nesting depth of the query. */
15328 bool
15329 integer_valued_real_single_p (tree t, int depth)
15331 switch (TREE_CODE (t))
15333 case REAL_CST:
15334 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
15336 case COND_EXPR:
15337 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15339 case SSA_NAME:
15340 /* Limit the depth of recursion to avoid quadratic behavior.
15341 This is expected to catch almost all occurrences in practice.
15342 If this code misses important cases that unbounded recursion
15343 would not, passes that need this information could be revised
15344 to provide it through dataflow propagation. */
15345 return (!name_registered_for_update_p (t)
15346 && depth < param_max_ssa_name_query_depth
15347 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
15348 depth));
15350 default:
15351 break;
15353 return false;
15356 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15357 has an integer value. We also allow +Inf, -Inf and NaN to be
15358 considered integer values. Return false for signaling NaN.
15360 DEPTH is the current nesting depth of the query. */
15362 static bool
15363 integer_valued_real_invalid_p (tree t, int depth)
15365 switch (TREE_CODE (t))
15367 case COMPOUND_EXPR:
15368 case MODIFY_EXPR:
15369 case BIND_EXPR:
15370 return RECURSE (TREE_OPERAND (t, 1));
15372 case SAVE_EXPR:
15373 return RECURSE (TREE_OPERAND (t, 0));
15375 default:
15376 break;
15378 return false;
15381 #undef RECURSE
15382 #undef integer_valued_real_p
15384 /* Return true if the floating point expression T has an integer value.
15385 We also allow +Inf, -Inf and NaN to be considered integer values.
15386 Return false for signaling NaN.
15388 DEPTH is the current nesting depth of the query. */
15390 bool
15391 integer_valued_real_p (tree t, int depth)
15393 if (t == error_mark_node)
15394 return false;
15396 STRIP_ANY_LOCATION_WRAPPER (t);
15398 tree_code code = TREE_CODE (t);
15399 switch (TREE_CODE_CLASS (code))
15401 case tcc_binary:
15402 case tcc_comparison:
15403 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
15404 TREE_OPERAND (t, 1), depth);
15406 case tcc_unary:
15407 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
15409 case tcc_constant:
15410 case tcc_declaration:
15411 case tcc_reference:
15412 return integer_valued_real_single_p (t, depth);
15414 default:
15415 break;
15418 switch (code)
15420 case COND_EXPR:
15421 case SSA_NAME:
15422 return integer_valued_real_single_p (t, depth);
15424 case CALL_EXPR:
15426 tree arg0 = (call_expr_nargs (t) > 0
15427 ? CALL_EXPR_ARG (t, 0)
15428 : NULL_TREE);
15429 tree arg1 = (call_expr_nargs (t) > 1
15430 ? CALL_EXPR_ARG (t, 1)
15431 : NULL_TREE);
15432 return integer_valued_real_call_p (get_call_combined_fn (t),
15433 arg0, arg1, depth);
15436 default:
15437 return integer_valued_real_invalid_p (t, depth);
15441 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15442 attempt to fold the expression to a constant without modifying TYPE,
15443 OP0 or OP1.
15445 If the expression could be simplified to a constant, then return
15446 the constant. If the expression would not be simplified to a
15447 constant, then return NULL_TREE. */
15449 tree
15450 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15452 tree tem = fold_binary (code, type, op0, op1);
15453 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15456 /* Given the components of a unary expression CODE, TYPE and OP0,
15457 attempt to fold the expression to a constant without modifying
15458 TYPE or OP0.
15460 If the expression could be simplified to a constant, then return
15461 the constant. If the expression would not be simplified to a
15462 constant, then return NULL_TREE. */
15464 tree
15465 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15467 tree tem = fold_unary (code, type, op0);
15468 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15471 /* If EXP represents referencing an element in a constant string
15472 (either via pointer arithmetic or array indexing), return the
15473 tree representing the value accessed, otherwise return NULL. */
15475 tree
15476 fold_read_from_constant_string (tree exp)
15478 if ((TREE_CODE (exp) == INDIRECT_REF
15479 || TREE_CODE (exp) == ARRAY_REF)
15480 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15482 tree exp1 = TREE_OPERAND (exp, 0);
15483 tree index;
15484 tree string;
15485 location_t loc = EXPR_LOCATION (exp);
15487 if (TREE_CODE (exp) == INDIRECT_REF)
15488 string = string_constant (exp1, &index, NULL, NULL);
15489 else
15491 tree low_bound = array_ref_low_bound (exp);
15492 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15494 /* Optimize the special-case of a zero lower bound.
15496 We convert the low_bound to sizetype to avoid some problems
15497 with constant folding. (E.g. suppose the lower bound is 1,
15498 and its mode is QI. Without the conversion,l (ARRAY
15499 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15500 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15501 if (! integer_zerop (low_bound))
15502 index = size_diffop_loc (loc, index,
15503 fold_convert_loc (loc, sizetype, low_bound));
15505 string = exp1;
15508 scalar_int_mode char_mode;
15509 if (string
15510 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15511 && TREE_CODE (string) == STRING_CST
15512 && tree_fits_uhwi_p (index)
15513 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15514 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
15515 &char_mode)
15516 && GET_MODE_SIZE (char_mode) == 1)
15517 return build_int_cst_type (TREE_TYPE (exp),
15518 (TREE_STRING_POINTER (string)
15519 [TREE_INT_CST_LOW (index)]));
15521 return NULL;
15524 /* Folds a read from vector element at IDX of vector ARG. */
15526 tree
15527 fold_read_from_vector (tree arg, poly_uint64 idx)
15529 unsigned HOST_WIDE_INT i;
15530 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
15531 && known_ge (idx, 0u)
15532 && idx.is_constant (&i))
15534 if (TREE_CODE (arg) == VECTOR_CST)
15535 return VECTOR_CST_ELT (arg, i);
15536 else if (TREE_CODE (arg) == CONSTRUCTOR)
15538 if (CONSTRUCTOR_NELTS (arg)
15539 && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg, 0)->value)))
15540 return NULL_TREE;
15541 if (i >= CONSTRUCTOR_NELTS (arg))
15542 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
15543 return CONSTRUCTOR_ELT (arg, i)->value;
15546 return NULL_TREE;
15549 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15550 an integer constant, real, or fixed-point constant.
15552 TYPE is the type of the result. */
15554 static tree
15555 fold_negate_const (tree arg0, tree type)
15557 tree t = NULL_TREE;
15559 switch (TREE_CODE (arg0))
15561 case REAL_CST:
15562 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15563 break;
15565 case FIXED_CST:
15567 FIXED_VALUE_TYPE f;
15568 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15569 &(TREE_FIXED_CST (arg0)), NULL,
15570 TYPE_SATURATING (type));
15571 t = build_fixed (type, f);
15572 /* Propagate overflow flags. */
15573 if (overflow_p | TREE_OVERFLOW (arg0))
15574 TREE_OVERFLOW (t) = 1;
15575 break;
15578 default:
15579 if (poly_int_tree_p (arg0))
15581 wi::overflow_type overflow;
15582 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
15583 t = force_fit_type (type, res, 1,
15584 (overflow && ! TYPE_UNSIGNED (type))
15585 || TREE_OVERFLOW (arg0));
15586 break;
15589 gcc_unreachable ();
15592 return t;
15595 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15596 an integer constant or real constant.
15598 TYPE is the type of the result. */
15600 tree
15601 fold_abs_const (tree arg0, tree type)
15603 tree t = NULL_TREE;
15605 switch (TREE_CODE (arg0))
15607 case INTEGER_CST:
15609 /* If the value is unsigned or non-negative, then the absolute value
15610 is the same as the ordinary value. */
15611 wide_int val = wi::to_wide (arg0);
15612 wi::overflow_type overflow = wi::OVF_NONE;
15613 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
15616 /* If the value is negative, then the absolute value is
15617 its negation. */
15618 else
15619 val = wi::neg (val, &overflow);
15621 /* Force to the destination type, set TREE_OVERFLOW for signed
15622 TYPE only. */
15623 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
15625 break;
15627 case REAL_CST:
15628 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15629 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15630 else
15631 t = arg0;
15632 break;
15634 default:
15635 gcc_unreachable ();
15638 return t;
15641 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15642 constant. TYPE is the type of the result. */
15644 static tree
15645 fold_not_const (const_tree arg0, tree type)
15647 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15649 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
15652 /* Given CODE, a relational operator, the target type, TYPE and two
15653 constant operands OP0 and OP1, return the result of the
15654 relational operation. If the result is not a compile time
15655 constant, then return NULL_TREE. */
15657 static tree
15658 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15660 int result, invert;
15662 /* From here on, the only cases we handle are when the result is
15663 known to be a constant. */
15665 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15667 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15668 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15670 /* Handle the cases where either operand is a NaN. */
15671 if (real_isnan (c0) || real_isnan (c1))
15673 switch (code)
15675 case EQ_EXPR:
15676 case ORDERED_EXPR:
15677 result = 0;
15678 break;
15680 case NE_EXPR:
15681 case UNORDERED_EXPR:
15682 case UNLT_EXPR:
15683 case UNLE_EXPR:
15684 case UNGT_EXPR:
15685 case UNGE_EXPR:
15686 case UNEQ_EXPR:
15687 result = 1;
15688 break;
15690 case LT_EXPR:
15691 case LE_EXPR:
15692 case GT_EXPR:
15693 case GE_EXPR:
15694 case LTGT_EXPR:
15695 if (flag_trapping_math)
15696 return NULL_TREE;
15697 result = 0;
15698 break;
15700 default:
15701 gcc_unreachable ();
15704 return constant_boolean_node (result, type);
15707 return constant_boolean_node (real_compare (code, c0, c1), type);
15710 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15712 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15713 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15714 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15717 /* Handle equality/inequality of complex constants. */
15718 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15720 tree rcond = fold_relational_const (code, type,
15721 TREE_REALPART (op0),
15722 TREE_REALPART (op1));
15723 tree icond = fold_relational_const (code, type,
15724 TREE_IMAGPART (op0),
15725 TREE_IMAGPART (op1));
15726 if (code == EQ_EXPR)
15727 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15728 else if (code == NE_EXPR)
15729 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15730 else
15731 return NULL_TREE;
15734 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15736 if (!VECTOR_TYPE_P (type))
15738 /* Have vector comparison with scalar boolean result. */
15739 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
15740 && known_eq (VECTOR_CST_NELTS (op0),
15741 VECTOR_CST_NELTS (op1)));
15742 unsigned HOST_WIDE_INT nunits;
15743 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
15744 return NULL_TREE;
15745 for (unsigned i = 0; i < nunits; i++)
15747 tree elem0 = VECTOR_CST_ELT (op0, i);
15748 tree elem1 = VECTOR_CST_ELT (op1, i);
15749 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
15750 if (tmp == NULL_TREE)
15751 return NULL_TREE;
15752 if (integer_zerop (tmp))
15753 return constant_boolean_node (code == NE_EXPR, type);
15755 return constant_boolean_node (code == EQ_EXPR, type);
15757 tree_vector_builder elts;
15758 if (!elts.new_binary_operation (type, op0, op1, false))
15759 return NULL_TREE;
15760 unsigned int count = elts.encoded_nelts ();
15761 for (unsigned i = 0; i < count; i++)
15763 tree elem_type = TREE_TYPE (type);
15764 tree elem0 = VECTOR_CST_ELT (op0, i);
15765 tree elem1 = VECTOR_CST_ELT (op1, i);
15767 tree tem = fold_relational_const (code, elem_type,
15768 elem0, elem1);
15770 if (tem == NULL_TREE)
15771 return NULL_TREE;
15773 elts.quick_push (build_int_cst (elem_type,
15774 integer_zerop (tem) ? 0 : -1));
15777 return elts.build ();
15780 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15782 To compute GT, swap the arguments and do LT.
15783 To compute GE, do LT and invert the result.
15784 To compute LE, swap the arguments, do LT and invert the result.
15785 To compute NE, do EQ and invert the result.
15787 Therefore, the code below must handle only EQ and LT. */
15789 if (code == LE_EXPR || code == GT_EXPR)
15791 std::swap (op0, op1);
15792 code = swap_tree_comparison (code);
15795 /* Note that it is safe to invert for real values here because we
15796 have already handled the one case that it matters. */
15798 invert = 0;
15799 if (code == NE_EXPR || code == GE_EXPR)
15801 invert = 1;
15802 code = invert_tree_comparison (code, false);
15805 /* Compute a result for LT or EQ if args permit;
15806 Otherwise return T. */
15807 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15809 if (code == EQ_EXPR)
15810 result = tree_int_cst_equal (op0, op1);
15811 else
15812 result = tree_int_cst_lt (op0, op1);
15814 else
15815 return NULL_TREE;
15817 if (invert)
15818 result ^= 1;
15819 return constant_boolean_node (result, type);
15822 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15823 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15824 itself. */
15826 tree
15827 fold_build_cleanup_point_expr (tree type, tree expr)
15829 /* If the expression does not have side effects then we don't have to wrap
15830 it with a cleanup point expression. */
15831 if (!TREE_SIDE_EFFECTS (expr))
15832 return expr;
15834 /* If the expression is a return, check to see if the expression inside the
15835 return has no side effects or the right hand side of the modify expression
15836 inside the return. If either don't have side effects set we don't need to
15837 wrap the expression in a cleanup point expression. Note we don't check the
15838 left hand side of the modify because it should always be a return decl. */
15839 if (TREE_CODE (expr) == RETURN_EXPR)
15841 tree op = TREE_OPERAND (expr, 0);
15842 if (!op || !TREE_SIDE_EFFECTS (op))
15843 return expr;
15844 op = TREE_OPERAND (op, 1);
15845 if (!TREE_SIDE_EFFECTS (op))
15846 return expr;
15849 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
15852 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15853 of an indirection through OP0, or NULL_TREE if no simplification is
15854 possible. */
15856 tree
15857 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15859 tree sub = op0;
15860 tree subtype;
15861 poly_uint64 const_op01;
15863 STRIP_NOPS (sub);
15864 subtype = TREE_TYPE (sub);
15865 if (!POINTER_TYPE_P (subtype)
15866 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
15867 return NULL_TREE;
15869 if (TREE_CODE (sub) == ADDR_EXPR)
15871 tree op = TREE_OPERAND (sub, 0);
15872 tree optype = TREE_TYPE (op);
15874 /* *&CONST_DECL -> to the value of the const decl. */
15875 if (TREE_CODE (op) == CONST_DECL)
15876 return DECL_INITIAL (op);
15877 /* *&p => p; make sure to handle *&"str"[cst] here. */
15878 if (type == optype)
15880 tree fop = fold_read_from_constant_string (op);
15881 if (fop)
15882 return fop;
15883 else
15884 return op;
15886 /* *(foo *)&fooarray => fooarray[0] */
15887 else if (TREE_CODE (optype) == ARRAY_TYPE
15888 && type == TREE_TYPE (optype)
15889 && (!in_gimple_form
15890 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15892 tree type_domain = TYPE_DOMAIN (optype);
15893 tree min_val = size_zero_node;
15894 if (type_domain && TYPE_MIN_VALUE (type_domain))
15895 min_val = TYPE_MIN_VALUE (type_domain);
15896 if (in_gimple_form
15897 && TREE_CODE (min_val) != INTEGER_CST)
15898 return NULL_TREE;
15899 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15900 NULL_TREE, NULL_TREE);
15902 /* *(foo *)&complexfoo => __real__ complexfoo */
15903 else if (TREE_CODE (optype) == COMPLEX_TYPE
15904 && type == TREE_TYPE (optype))
15905 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15906 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15907 else if (VECTOR_TYPE_P (optype)
15908 && type == TREE_TYPE (optype))
15910 tree part_width = TYPE_SIZE (type);
15911 tree index = bitsize_int (0);
15912 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
15913 index);
15917 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15918 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
15920 tree op00 = TREE_OPERAND (sub, 0);
15921 tree op01 = TREE_OPERAND (sub, 1);
15923 STRIP_NOPS (op00);
15924 if (TREE_CODE (op00) == ADDR_EXPR)
15926 tree op00type;
15927 op00 = TREE_OPERAND (op00, 0);
15928 op00type = TREE_TYPE (op00);
15930 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15931 if (VECTOR_TYPE_P (op00type)
15932 && type == TREE_TYPE (op00type)
15933 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
15934 but we want to treat offsets with MSB set as negative.
15935 For the code below negative offsets are invalid and
15936 TYPE_SIZE of the element is something unsigned, so
15937 check whether op01 fits into poly_int64, which implies
15938 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
15939 then just use poly_uint64 because we want to treat the
15940 value as unsigned. */
15941 && tree_fits_poly_int64_p (op01))
15943 tree part_width = TYPE_SIZE (type);
15944 poly_uint64 max_offset
15945 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
15946 * TYPE_VECTOR_SUBPARTS (op00type));
15947 if (known_lt (const_op01, max_offset))
15949 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
15950 return fold_build3_loc (loc,
15951 BIT_FIELD_REF, type, op00,
15952 part_width, index);
15955 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15956 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15957 && type == TREE_TYPE (op00type))
15959 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
15960 const_op01))
15961 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15963 /* ((foo *)&fooarray)[1] => fooarray[1] */
15964 else if (TREE_CODE (op00type) == ARRAY_TYPE
15965 && type == TREE_TYPE (op00type))
15967 tree type_domain = TYPE_DOMAIN (op00type);
15968 tree min_val = size_zero_node;
15969 if (type_domain && TYPE_MIN_VALUE (type_domain))
15970 min_val = TYPE_MIN_VALUE (type_domain);
15971 poly_uint64 type_size, index;
15972 if (poly_int_tree_p (min_val)
15973 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
15974 && multiple_p (const_op01, type_size, &index))
15976 poly_offset_int off = index + wi::to_poly_offset (min_val);
15977 op01 = wide_int_to_tree (sizetype, off);
15978 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15979 NULL_TREE, NULL_TREE);
15985 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15986 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15987 && type == TREE_TYPE (TREE_TYPE (subtype))
15988 && (!in_gimple_form
15989 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15991 tree type_domain;
15992 tree min_val = size_zero_node;
15993 sub = build_fold_indirect_ref_loc (loc, sub);
15994 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15995 if (type_domain && TYPE_MIN_VALUE (type_domain))
15996 min_val = TYPE_MIN_VALUE (type_domain);
15997 if (in_gimple_form
15998 && TREE_CODE (min_val) != INTEGER_CST)
15999 return NULL_TREE;
16000 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16001 NULL_TREE);
16004 return NULL_TREE;
16007 /* Builds an expression for an indirection through T, simplifying some
16008 cases. */
16010 tree
16011 build_fold_indirect_ref_loc (location_t loc, tree t)
16013 tree type = TREE_TYPE (TREE_TYPE (t));
16014 tree sub = fold_indirect_ref_1 (loc, type, t);
16016 if (sub)
16017 return sub;
16019 return build1_loc (loc, INDIRECT_REF, type, t);
16022 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16024 tree
16025 fold_indirect_ref_loc (location_t loc, tree t)
16027 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16029 if (sub)
16030 return sub;
16031 else
16032 return t;
16035 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16036 whose result is ignored. The type of the returned tree need not be
16037 the same as the original expression. */
16039 tree
16040 fold_ignored_result (tree t)
16042 if (!TREE_SIDE_EFFECTS (t))
16043 return integer_zero_node;
16045 for (;;)
16046 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16048 case tcc_unary:
16049 t = TREE_OPERAND (t, 0);
16050 break;
16052 case tcc_binary:
16053 case tcc_comparison:
16054 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16055 t = TREE_OPERAND (t, 0);
16056 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16057 t = TREE_OPERAND (t, 1);
16058 else
16059 return t;
16060 break;
16062 case tcc_expression:
16063 switch (TREE_CODE (t))
16065 case COMPOUND_EXPR:
16066 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16067 return t;
16068 t = TREE_OPERAND (t, 0);
16069 break;
16071 case COND_EXPR:
16072 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16073 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16074 return t;
16075 t = TREE_OPERAND (t, 0);
16076 break;
16078 default:
16079 return t;
16081 break;
16083 default:
16084 return t;
16088 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16090 tree
16091 round_up_loc (location_t loc, tree value, unsigned int divisor)
16093 tree div = NULL_TREE;
16095 if (divisor == 1)
16096 return value;
16098 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16099 have to do anything. Only do this when we are not given a const,
16100 because in that case, this check is more expensive than just
16101 doing it. */
16102 if (TREE_CODE (value) != INTEGER_CST)
16104 div = build_int_cst (TREE_TYPE (value), divisor);
16106 if (multiple_of_p (TREE_TYPE (value), value, div))
16107 return value;
16110 /* If divisor is a power of two, simplify this to bit manipulation. */
16111 if (pow2_or_zerop (divisor))
16113 if (TREE_CODE (value) == INTEGER_CST)
16115 wide_int val = wi::to_wide (value);
16116 bool overflow_p;
16118 if ((val & (divisor - 1)) == 0)
16119 return value;
16121 overflow_p = TREE_OVERFLOW (value);
16122 val += divisor - 1;
16123 val &= (int) -divisor;
16124 if (val == 0)
16125 overflow_p = true;
16127 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16129 else
16131 tree t;
16133 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16134 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16135 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16136 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16139 else
16141 if (!div)
16142 div = build_int_cst (TREE_TYPE (value), divisor);
16143 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16144 value = size_binop_loc (loc, MULT_EXPR, value, div);
16147 return value;
16150 /* Likewise, but round down. */
16152 tree
16153 round_down_loc (location_t loc, tree value, int divisor)
16155 tree div = NULL_TREE;
16157 gcc_assert (divisor > 0);
16158 if (divisor == 1)
16159 return value;
16161 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16162 have to do anything. Only do this when we are not given a const,
16163 because in that case, this check is more expensive than just
16164 doing it. */
16165 if (TREE_CODE (value) != INTEGER_CST)
16167 div = build_int_cst (TREE_TYPE (value), divisor);
16169 if (multiple_of_p (TREE_TYPE (value), value, div))
16170 return value;
16173 /* If divisor is a power of two, simplify this to bit manipulation. */
16174 if (pow2_or_zerop (divisor))
16176 tree t;
16178 t = build_int_cst (TREE_TYPE (value), -divisor);
16179 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16181 else
16183 if (!div)
16184 div = build_int_cst (TREE_TYPE (value), divisor);
16185 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16186 value = size_binop_loc (loc, MULT_EXPR, value, div);
16189 return value;
16192 /* Returns the pointer to the base of the object addressed by EXP and
16193 extracts the information about the offset of the access, storing it
16194 to PBITPOS and POFFSET. */
16196 static tree
16197 split_address_to_core_and_offset (tree exp,
16198 poly_int64_pod *pbitpos, tree *poffset)
16200 tree core;
16201 machine_mode mode;
16202 int unsignedp, reversep, volatilep;
16203 poly_int64 bitsize;
16204 location_t loc = EXPR_LOCATION (exp);
16206 if (TREE_CODE (exp) == ADDR_EXPR)
16208 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16209 poffset, &mode, &unsignedp, &reversep,
16210 &volatilep);
16211 core = build_fold_addr_expr_loc (loc, core);
16213 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
16215 core = TREE_OPERAND (exp, 0);
16216 STRIP_NOPS (core);
16217 *pbitpos = 0;
16218 *poffset = TREE_OPERAND (exp, 1);
16219 if (poly_int_tree_p (*poffset))
16221 poly_offset_int tem
16222 = wi::sext (wi::to_poly_offset (*poffset),
16223 TYPE_PRECISION (TREE_TYPE (*poffset)));
16224 tem <<= LOG2_BITS_PER_UNIT;
16225 if (tem.to_shwi (pbitpos))
16226 *poffset = NULL_TREE;
16229 else
16231 core = exp;
16232 *pbitpos = 0;
16233 *poffset = NULL_TREE;
16236 return core;
16239 /* Returns true if addresses of E1 and E2 differ by a constant, false
16240 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16242 bool
16243 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
16245 tree core1, core2;
16246 poly_int64 bitpos1, bitpos2;
16247 tree toffset1, toffset2, tdiff, type;
16249 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16250 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16252 poly_int64 bytepos1, bytepos2;
16253 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
16254 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
16255 || !operand_equal_p (core1, core2, 0))
16256 return false;
16258 if (toffset1 && toffset2)
16260 type = TREE_TYPE (toffset1);
16261 if (type != TREE_TYPE (toffset2))
16262 toffset2 = fold_convert (type, toffset2);
16264 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16265 if (!cst_and_fits_in_hwi (tdiff))
16266 return false;
16268 *diff = int_cst_value (tdiff);
16270 else if (toffset1 || toffset2)
16272 /* If only one of the offsets is non-constant, the difference cannot
16273 be a constant. */
16274 return false;
16276 else
16277 *diff = 0;
16279 *diff += bytepos1 - bytepos2;
16280 return true;
16283 /* Return OFF converted to a pointer offset type suitable as offset for
16284 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16285 tree
16286 convert_to_ptrofftype_loc (location_t loc, tree off)
16288 if (ptrofftype_p (TREE_TYPE (off)))
16289 return off;
16290 return fold_convert_loc (loc, sizetype, off);
16293 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16294 tree
16295 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16297 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16298 ptr, convert_to_ptrofftype_loc (loc, off));
16301 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16302 tree
16303 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16305 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16306 ptr, size_int (off));
16309 /* Return a pointer to a NUL-terminated string containing the sequence
16310 of bytes corresponding to the representation of the object referred to
16311 by SRC (or a subsequence of such bytes within it if SRC is a reference
16312 to an initialized constant array plus some constant offset).
16313 Set *STRSIZE the number of bytes in the constant sequence including
16314 the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16315 where A is the array that stores the constant sequence that SRC points
16316 to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16317 need not point to a string or even an array of characters but may point
16318 to an object of any type. */
16320 const char *
16321 getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
16323 /* The offset into the array A storing the string, and A's byte size. */
16324 tree offset_node;
16325 tree mem_size;
16327 if (strsize)
16328 *strsize = 0;
16330 if (strsize)
16331 src = byte_representation (src, &offset_node, &mem_size, NULL);
16332 else
16333 src = string_constant (src, &offset_node, &mem_size, NULL);
16334 if (!src)
16335 return NULL;
16337 unsigned HOST_WIDE_INT offset = 0;
16338 if (offset_node != NULL_TREE)
16340 if (!tree_fits_uhwi_p (offset_node))
16341 return NULL;
16342 else
16343 offset = tree_to_uhwi (offset_node);
16346 if (!tree_fits_uhwi_p (mem_size))
16347 return NULL;
16349 /* ARRAY_SIZE is the byte size of the array the constant sequence
16350 is stored in and equal to sizeof A. INIT_BYTES is the number
16351 of bytes in the constant sequence used to initialize the array,
16352 including any embedded NULs as well as the terminating NUL (for
16353 strings), but not including any trailing zeros/NULs past
16354 the terminating one appended implicitly to a string literal to
16355 zero out the remainder of the array it's stored in. For example,
16356 given:
16357 const char a[7] = "abc\0d";
16358 n = strlen (a + 1);
16359 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16360 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16361 is equal to strlen (A) + 1. */
16362 const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
16363 unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
16364 const char *string = TREE_STRING_POINTER (src);
16366 /* Ideally this would turn into a gcc_checking_assert over time. */
16367 if (init_bytes > array_size)
16368 init_bytes = array_size;
16370 if (init_bytes == 0 || offset >= array_size)
16371 return NULL;
16373 if (strsize)
16375 /* Compute and store the number of characters from the beginning
16376 of the substring at OFFSET to the end, including the terminating
16377 nul. Offsets past the initial length refer to null strings. */
16378 if (offset < init_bytes)
16379 *strsize = init_bytes - offset;
16380 else
16381 *strsize = 1;
16383 else
16385 tree eltype = TREE_TYPE (TREE_TYPE (src));
16386 /* Support only properly NUL-terminated single byte strings. */
16387 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
16388 return NULL;
16389 if (string[init_bytes - 1] != '\0')
16390 return NULL;
16393 return offset < init_bytes ? string + offset : "";
16396 /* Return a pointer to a NUL-terminated string corresponding to
16397 the expression STR referencing a constant string, possibly
16398 involving a constant offset. Return null if STR either doesn't
16399 reference a constant string or if it involves a nonconstant
16400 offset. */
16402 const char *
16403 c_getstr (tree str)
16405 return getbyterep (str, NULL);
16408 /* Given a tree T, compute which bits in T may be nonzero. */
16410 wide_int
16411 tree_nonzero_bits (const_tree t)
16413 switch (TREE_CODE (t))
16415 case INTEGER_CST:
16416 return wi::to_wide (t);
16417 case SSA_NAME:
16418 return get_nonzero_bits (t);
16419 case NON_LVALUE_EXPR:
16420 case SAVE_EXPR:
16421 return tree_nonzero_bits (TREE_OPERAND (t, 0));
16422 case BIT_AND_EXPR:
16423 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16424 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16425 case BIT_IOR_EXPR:
16426 case BIT_XOR_EXPR:
16427 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16428 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16429 case COND_EXPR:
16430 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
16431 tree_nonzero_bits (TREE_OPERAND (t, 2)));
16432 CASE_CONVERT:
16433 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16434 TYPE_PRECISION (TREE_TYPE (t)),
16435 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
16436 case PLUS_EXPR:
16437 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
16439 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
16440 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
16441 if (wi::bit_and (nzbits1, nzbits2) == 0)
16442 return wi::bit_or (nzbits1, nzbits2);
16444 break;
16445 case LSHIFT_EXPR:
16446 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16448 tree type = TREE_TYPE (t);
16449 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16450 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16451 TYPE_PRECISION (type));
16452 return wi::neg_p (arg1)
16453 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
16454 : wi::lshift (nzbits, arg1);
16456 break;
16457 case RSHIFT_EXPR:
16458 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16460 tree type = TREE_TYPE (t);
16461 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16462 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16463 TYPE_PRECISION (type));
16464 return wi::neg_p (arg1)
16465 ? wi::lshift (nzbits, -arg1)
16466 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
16468 break;
16469 default:
16470 break;
16473 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
16476 #if CHECKING_P
16478 namespace selftest {
16480 /* Helper functions for writing tests of folding trees. */
16482 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
16484 static void
16485 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
16486 tree constant)
16488 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
16491 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
16492 wrapping WRAPPED_EXPR. */
16494 static void
16495 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
16496 tree wrapped_expr)
16498 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
16499 ASSERT_NE (wrapped_expr, result);
16500 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
16501 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
16504 /* Verify that various arithmetic binary operations are folded
16505 correctly. */
16507 static void
16508 test_arithmetic_folding ()
16510 tree type = integer_type_node;
16511 tree x = create_tmp_var_raw (type, "x");
16512 tree zero = build_zero_cst (type);
16513 tree one = build_int_cst (type, 1);
16515 /* Addition. */
16516 /* 1 <-- (0 + 1) */
16517 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
16518 one);
16519 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
16520 one);
16522 /* (nonlvalue)x <-- (x + 0) */
16523 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
16526 /* Subtraction. */
16527 /* 0 <-- (x - x) */
16528 assert_binop_folds_to_const (x, MINUS_EXPR, x,
16529 zero);
16530 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
16533 /* Multiplication. */
16534 /* 0 <-- (x * 0) */
16535 assert_binop_folds_to_const (x, MULT_EXPR, zero,
16536 zero);
16538 /* (nonlvalue)x <-- (x * 1) */
16539 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
16543 /* Verify that various binary operations on vectors are folded
16544 correctly. */
16546 static void
16547 test_vector_folding ()
16549 tree inner_type = integer_type_node;
16550 tree type = build_vector_type (inner_type, 4);
16551 tree zero = build_zero_cst (type);
16552 tree one = build_one_cst (type);
16553 tree index = build_index_vector (type, 0, 1);
16555 /* Verify equality tests that return a scalar boolean result. */
16556 tree res_type = boolean_type_node;
16557 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
16558 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
16559 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
16560 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
16561 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
16562 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16563 index, one)));
16564 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
16565 index, index)));
16566 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16567 index, index)));
16570 /* Verify folding of VEC_DUPLICATE_EXPRs. */
16572 static void
16573 test_vec_duplicate_folding ()
16575 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
16576 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
16577 /* This will be 1 if VEC_MODE isn't a vector mode. */
16578 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
16580 tree type = build_vector_type (ssizetype, nunits);
16581 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
16582 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
16583 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
16586 /* Run all of the selftests within this file. */
16588 void
16589 fold_const_c_tests ()
16591 test_arithmetic_folding ();
16592 test_vector_folding ();
16593 test_vec_duplicate_folding ();
16596 } // namespace selftest
16598 #endif /* CHECKING_P */