typeck.c (cp_truthvalue_conversion): Add tsubst_flags_t parameter and use it in calls...
[official-gcc.git] / gcc / fold-const.c
blobb48296ca1118041ed2606d14bad0ad10161391f0
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-vrp.h"
79 #include "tree-ssanames.h"
80 #include "selftest.h"
81 #include "stringpool.h"
82 #include "attribs.h"
83 #include "tree-vector-builder.h"
84 #include "vec-perm-indices.h"
86 /* Nonzero if we are folding constants inside an initializer; zero
87 otherwise. */
88 int folding_initializer = 0;
90 /* The following constants represent a bit based encoding of GCC's
91 comparison operators. This encoding simplifies transformations
92 on relational comparison operators, such as AND and OR. */
93 enum comparison_code {
94 COMPCODE_FALSE = 0,
95 COMPCODE_LT = 1,
96 COMPCODE_EQ = 2,
97 COMPCODE_LE = 3,
98 COMPCODE_GT = 4,
99 COMPCODE_LTGT = 5,
100 COMPCODE_GE = 6,
101 COMPCODE_ORD = 7,
102 COMPCODE_UNORD = 8,
103 COMPCODE_UNLT = 9,
104 COMPCODE_UNEQ = 10,
105 COMPCODE_UNLE = 11,
106 COMPCODE_UNGT = 12,
107 COMPCODE_NE = 13,
108 COMPCODE_UNGE = 14,
109 COMPCODE_TRUE = 15
112 static bool negate_expr_p (tree);
113 static tree negate_expr (tree);
114 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static bool twoval_comparison_p (tree, tree *, tree *);
118 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static bool simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
130 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
131 static tree fold_binary_op_with_conditional_arg (location_t,
132 enum tree_code, tree,
133 tree, tree,
134 tree, tree, int);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (const_tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_convert_const (enum tree_code, tree, tree);
139 static tree fold_view_convert_expr (tree, tree);
140 static tree fold_negate_expr (location_t, tree);
143 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
144 Otherwise, return LOC. */
146 static location_t
147 expr_location_or (tree t, location_t loc)
149 location_t tloc = EXPR_LOCATION (t);
150 return tloc == UNKNOWN_LOCATION ? loc : tloc;
153 /* Similar to protected_set_expr_location, but never modify x in place,
154 if location can and needs to be set, unshare it. */
156 static inline tree
157 protected_set_expr_location_unshare (tree x, location_t loc)
159 if (CAN_HAVE_LOCATION_P (x)
160 && EXPR_LOCATION (x) != loc
161 && !(TREE_CODE (x) == SAVE_EXPR
162 || TREE_CODE (x) == TARGET_EXPR
163 || TREE_CODE (x) == BIND_EXPR))
165 x = copy_node (x);
166 SET_EXPR_LOCATION (x, loc);
168 return x;
171 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
172 division and returns the quotient. Otherwise returns
173 NULL_TREE. */
175 tree
176 div_if_zero_remainder (const_tree arg1, const_tree arg2)
178 widest_int quo;
180 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
181 SIGNED, &quo))
182 return wide_int_to_tree (TREE_TYPE (arg1), quo);
184 return NULL_TREE;
187 /* This is nonzero if we should defer warnings about undefined
188 overflow. This facility exists because these warnings are a
189 special case. The code to estimate loop iterations does not want
190 to issue any warnings, since it works with expressions which do not
191 occur in user code. Various bits of cleanup code call fold(), but
192 only use the result if it has certain characteristics (e.g., is a
193 constant); that code only wants to issue a warning if the result is
194 used. */
196 static int fold_deferring_overflow_warnings;
198 /* If a warning about undefined overflow is deferred, this is the
199 warning. Note that this may cause us to turn two warnings into
200 one, but that is fine since it is sufficient to only give one
201 warning per expression. */
203 static const char* fold_deferred_overflow_warning;
205 /* If a warning about undefined overflow is deferred, this is the
206 level at which the warning should be emitted. */
208 static enum warn_strict_overflow_code fold_deferred_overflow_code;
210 /* Start deferring overflow warnings. We could use a stack here to
211 permit nested calls, but at present it is not necessary. */
213 void
214 fold_defer_overflow_warnings (void)
216 ++fold_deferring_overflow_warnings;
219 /* Stop deferring overflow warnings. If there is a pending warning,
220 and ISSUE is true, then issue the warning if appropriate. STMT is
221 the statement with which the warning should be associated (used for
222 location information); STMT may be NULL. CODE is the level of the
223 warning--a warn_strict_overflow_code value. This function will use
224 the smaller of CODE and the deferred code when deciding whether to
225 issue the warning. CODE may be zero to mean to always use the
226 deferred code. */
228 void
229 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
231 const char *warnmsg;
232 location_t locus;
234 gcc_assert (fold_deferring_overflow_warnings > 0);
235 --fold_deferring_overflow_warnings;
236 if (fold_deferring_overflow_warnings > 0)
238 if (fold_deferred_overflow_warning != NULL
239 && code != 0
240 && code < (int) fold_deferred_overflow_code)
241 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
242 return;
245 warnmsg = fold_deferred_overflow_warning;
246 fold_deferred_overflow_warning = NULL;
248 if (!issue || warnmsg == NULL)
249 return;
251 if (gimple_no_warning_p (stmt))
252 return;
254 /* Use the smallest code level when deciding to issue the
255 warning. */
256 if (code == 0 || code > (int) fold_deferred_overflow_code)
257 code = fold_deferred_overflow_code;
259 if (!issue_strict_overflow_warning (code))
260 return;
262 if (stmt == NULL)
263 locus = input_location;
264 else
265 locus = gimple_location (stmt);
266 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
269 /* Stop deferring overflow warnings, ignoring any deferred
270 warnings. */
272 void
273 fold_undefer_and_ignore_overflow_warnings (void)
275 fold_undefer_overflow_warnings (false, NULL, 0);
278 /* Whether we are deferring overflow warnings. */
280 bool
281 fold_deferring_overflow_warnings_p (void)
283 return fold_deferring_overflow_warnings > 0;
286 /* This is called when we fold something based on the fact that signed
287 overflow is undefined. */
289 void
290 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
292 if (fold_deferring_overflow_warnings > 0)
294 if (fold_deferred_overflow_warning == NULL
295 || wc < fold_deferred_overflow_code)
297 fold_deferred_overflow_warning = gmsgid;
298 fold_deferred_overflow_code = wc;
301 else if (issue_strict_overflow_warning (wc))
302 warning (OPT_Wstrict_overflow, gmsgid);
305 /* Return true if the built-in mathematical function specified by CODE
306 is odd, i.e. -f(x) == f(-x). */
308 bool
309 negate_mathfn_p (combined_fn fn)
311 switch (fn)
313 CASE_CFN_ASIN:
314 CASE_CFN_ASINH:
315 CASE_CFN_ATAN:
316 CASE_CFN_ATANH:
317 CASE_CFN_CASIN:
318 CASE_CFN_CASINH:
319 CASE_CFN_CATAN:
320 CASE_CFN_CATANH:
321 CASE_CFN_CBRT:
322 CASE_CFN_CPROJ:
323 CASE_CFN_CSIN:
324 CASE_CFN_CSINH:
325 CASE_CFN_CTAN:
326 CASE_CFN_CTANH:
327 CASE_CFN_ERF:
328 CASE_CFN_LLROUND:
329 CASE_CFN_LROUND:
330 CASE_CFN_ROUND:
331 CASE_CFN_ROUNDEVEN:
332 CASE_CFN_ROUNDEVEN_FN:
333 CASE_CFN_SIN:
334 CASE_CFN_SINH:
335 CASE_CFN_TAN:
336 CASE_CFN_TANH:
337 CASE_CFN_TRUNC:
338 return true;
340 CASE_CFN_LLRINT:
341 CASE_CFN_LRINT:
342 CASE_CFN_NEARBYINT:
343 CASE_CFN_RINT:
344 return !flag_rounding_math;
346 default:
347 break;
349 return false;
352 /* Check whether we may negate an integer constant T without causing
353 overflow. */
355 bool
356 may_negate_without_overflow_p (const_tree t)
358 tree type;
360 gcc_assert (TREE_CODE (t) == INTEGER_CST);
362 type = TREE_TYPE (t);
363 if (TYPE_UNSIGNED (type))
364 return false;
366 return !wi::only_sign_bit_p (wi::to_wide (t));
369 /* Determine whether an expression T can be cheaply negated using
370 the function negate_expr without introducing undefined overflow. */
372 static bool
373 negate_expr_p (tree t)
375 tree type;
377 if (t == 0)
378 return false;
380 type = TREE_TYPE (t);
382 STRIP_SIGN_NOPS (t);
383 switch (TREE_CODE (t))
385 case INTEGER_CST:
386 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
387 return true;
389 /* Check that -CST will not overflow type. */
390 return may_negate_without_overflow_p (t);
391 case BIT_NOT_EXPR:
392 return (INTEGRAL_TYPE_P (type)
393 && TYPE_OVERFLOW_WRAPS (type));
395 case FIXED_CST:
396 return true;
398 case NEGATE_EXPR:
399 return !TYPE_OVERFLOW_SANITIZED (type);
401 case REAL_CST:
402 /* We want to canonicalize to positive real constants. Pretend
403 that only negative ones can be easily negated. */
404 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
406 case COMPLEX_CST:
407 return negate_expr_p (TREE_REALPART (t))
408 && negate_expr_p (TREE_IMAGPART (t));
410 case VECTOR_CST:
412 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
413 return true;
415 /* Steps don't prevent negation. */
416 unsigned int count = vector_cst_encoded_nelts (t);
417 for (unsigned int i = 0; i < count; ++i)
418 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
419 return false;
421 return true;
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
433 || HONOR_SIGNED_ZEROS (element_mode (type))
434 || (ANY_INTEGRAL_TYPE_P (type)
435 && ! TYPE_OVERFLOW_WRAPS (type)))
436 return false;
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
446 && !HONOR_SIGNED_ZEROS (element_mode (type))
447 && (! ANY_INTEGRAL_TYPE_P (type)
448 || TYPE_OVERFLOW_WRAPS (type));
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (type))
452 break;
453 /* INT_MIN/n * n doesn't overflow while negating one operand it does
454 if n is a (negative) power of two. */
455 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
456 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
457 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
458 && (wi::popcount
459 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
460 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
461 && (wi::popcount
462 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
463 break;
465 /* Fall through. */
467 case RDIV_EXPR:
468 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
469 return negate_expr_p (TREE_OPERAND (t, 1))
470 || negate_expr_p (TREE_OPERAND (t, 0));
471 break;
473 case TRUNC_DIV_EXPR:
474 case ROUND_DIV_EXPR:
475 case EXACT_DIV_EXPR:
476 if (TYPE_UNSIGNED (type))
477 break;
478 /* In general we can't negate A in A / B, because if A is INT_MIN and
479 B is not 1 we change the sign of the result. */
480 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
481 && negate_expr_p (TREE_OPERAND (t, 0)))
482 return true;
483 /* In general we can't negate B in A / B, because if A is INT_MIN and
484 B is 1, we may turn this into INT_MIN / -1 which is undefined
485 and actually traps on some architectures. */
486 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
487 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
488 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
489 && ! integer_onep (TREE_OPERAND (t, 1))))
490 return negate_expr_p (TREE_OPERAND (t, 1));
491 break;
493 case NOP_EXPR:
494 /* Negate -((double)float) as (double)(-float). */
495 if (TREE_CODE (type) == REAL_TYPE)
497 tree tem = strip_float_extensions (t);
498 if (tem != t)
499 return negate_expr_p (tem);
501 break;
503 case CALL_EXPR:
504 /* Negate -f(x) as f(-x). */
505 if (negate_mathfn_p (get_call_combined_fn (t)))
506 return negate_expr_p (CALL_EXPR_ARG (t, 0));
507 break;
509 case RSHIFT_EXPR:
510 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
511 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
513 tree op1 = TREE_OPERAND (t, 1);
514 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
515 return true;
517 break;
519 default:
520 break;
522 return false;
525 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
526 simplification is possible.
527 If negate_expr_p would return true for T, NULL_TREE will never be
528 returned. */
530 static tree
531 fold_negate_expr_1 (location_t loc, tree t)
533 tree type = TREE_TYPE (t);
534 tree tem;
536 switch (TREE_CODE (t))
538 /* Convert - (~A) to A + 1. */
539 case BIT_NOT_EXPR:
540 if (INTEGRAL_TYPE_P (type))
541 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
542 build_one_cst (type));
543 break;
545 case INTEGER_CST:
546 tem = fold_negate_const (t, type);
547 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
548 || (ANY_INTEGRAL_TYPE_P (type)
549 && !TYPE_OVERFLOW_TRAPS (type)
550 && TYPE_OVERFLOW_WRAPS (type))
551 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
552 return tem;
553 break;
555 case POLY_INT_CST:
556 case REAL_CST:
557 case FIXED_CST:
558 tem = fold_negate_const (t, type);
559 return tem;
561 case COMPLEX_CST:
563 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
564 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
565 if (rpart && ipart)
566 return build_complex (type, rpart, ipart);
568 break;
570 case VECTOR_CST:
572 tree_vector_builder elts;
573 elts.new_unary_operation (type, t, true);
574 unsigned int count = elts.encoded_nelts ();
575 for (unsigned int i = 0; i < count; ++i)
577 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
578 if (elt == NULL_TREE)
579 return NULL_TREE;
580 elts.quick_push (elt);
583 return elts.build ();
586 case COMPLEX_EXPR:
587 if (negate_expr_p (t))
588 return fold_build2_loc (loc, COMPLEX_EXPR, type,
589 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
590 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
591 break;
593 case CONJ_EXPR:
594 if (negate_expr_p (t))
595 return fold_build1_loc (loc, CONJ_EXPR, type,
596 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
597 break;
599 case NEGATE_EXPR:
600 if (!TYPE_OVERFLOW_SANITIZED (type))
601 return TREE_OPERAND (t, 0);
602 break;
604 case PLUS_EXPR:
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
606 && !HONOR_SIGNED_ZEROS (element_mode (type)))
608 /* -(A + B) -> (-B) - A. */
609 if (negate_expr_p (TREE_OPERAND (t, 1)))
611 tem = negate_expr (TREE_OPERAND (t, 1));
612 return fold_build2_loc (loc, MINUS_EXPR, type,
613 tem, TREE_OPERAND (t, 0));
616 /* -(A + B) -> (-A) - B. */
617 if (negate_expr_p (TREE_OPERAND (t, 0)))
619 tem = negate_expr (TREE_OPERAND (t, 0));
620 return fold_build2_loc (loc, MINUS_EXPR, type,
621 tem, TREE_OPERAND (t, 1));
624 break;
626 case MINUS_EXPR:
627 /* - (A - B) -> B - A */
628 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
629 && !HONOR_SIGNED_ZEROS (element_mode (type)))
630 return fold_build2_loc (loc, MINUS_EXPR, type,
631 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
632 break;
634 case MULT_EXPR:
635 if (TYPE_UNSIGNED (type))
636 break;
638 /* Fall through. */
640 case RDIV_EXPR:
641 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
643 tem = TREE_OPERAND (t, 1);
644 if (negate_expr_p (tem))
645 return fold_build2_loc (loc, TREE_CODE (t), type,
646 TREE_OPERAND (t, 0), negate_expr (tem));
647 tem = TREE_OPERAND (t, 0);
648 if (negate_expr_p (tem))
649 return fold_build2_loc (loc, TREE_CODE (t), type,
650 negate_expr (tem), TREE_OPERAND (t, 1));
652 break;
654 case TRUNC_DIV_EXPR:
655 case ROUND_DIV_EXPR:
656 case EXACT_DIV_EXPR:
657 if (TYPE_UNSIGNED (type))
658 break;
659 /* In general we can't negate A in A / B, because if A is INT_MIN and
660 B is not 1 we change the sign of the result. */
661 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
662 && negate_expr_p (TREE_OPERAND (t, 0)))
663 return fold_build2_loc (loc, TREE_CODE (t), type,
664 negate_expr (TREE_OPERAND (t, 0)),
665 TREE_OPERAND (t, 1));
666 /* In general we can't negate B in A / B, because if A is INT_MIN and
667 B is 1, we may turn this into INT_MIN / -1 which is undefined
668 and actually traps on some architectures. */
669 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
670 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
671 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
672 && ! integer_onep (TREE_OPERAND (t, 1))))
673 && negate_expr_p (TREE_OPERAND (t, 1)))
674 return fold_build2_loc (loc, TREE_CODE (t), type,
675 TREE_OPERAND (t, 0),
676 negate_expr (TREE_OPERAND (t, 1)));
677 break;
679 case NOP_EXPR:
680 /* Convert -((double)float) into (double)(-float). */
681 if (TREE_CODE (type) == REAL_TYPE)
683 tem = strip_float_extensions (t);
684 if (tem != t && negate_expr_p (tem))
685 return fold_convert_loc (loc, type, negate_expr (tem));
687 break;
689 case CALL_EXPR:
690 /* Negate -f(x) as f(-x). */
691 if (negate_mathfn_p (get_call_combined_fn (t))
692 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
694 tree fndecl, arg;
696 fndecl = get_callee_fndecl (t);
697 arg = negate_expr (CALL_EXPR_ARG (t, 0));
698 return build_call_expr_loc (loc, fndecl, 1, arg);
700 break;
702 case RSHIFT_EXPR:
703 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
704 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
706 tree op1 = TREE_OPERAND (t, 1);
707 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
709 tree ntype = TYPE_UNSIGNED (type)
710 ? signed_type_for (type)
711 : unsigned_type_for (type);
712 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
713 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
714 return fold_convert_loc (loc, type, temp);
717 break;
719 default:
720 break;
723 return NULL_TREE;
726 /* A wrapper for fold_negate_expr_1. */
728 static tree
729 fold_negate_expr (location_t loc, tree t)
731 tree type = TREE_TYPE (t);
732 STRIP_SIGN_NOPS (t);
733 tree tem = fold_negate_expr_1 (loc, t);
734 if (tem == NULL_TREE)
735 return NULL_TREE;
736 return fold_convert_loc (loc, type, tem);
739 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
740 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
741 return NULL_TREE. */
743 static tree
744 negate_expr (tree t)
746 tree type, tem;
747 location_t loc;
749 if (t == NULL_TREE)
750 return NULL_TREE;
752 loc = EXPR_LOCATION (t);
753 type = TREE_TYPE (t);
754 STRIP_SIGN_NOPS (t);
756 tem = fold_negate_expr (loc, t);
757 if (!tem)
758 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
759 return fold_convert_loc (loc, type, tem);
762 /* Split a tree IN into a constant, literal and variable parts that could be
763 combined with CODE to make IN. "constant" means an expression with
764 TREE_CONSTANT but that isn't an actual constant. CODE must be a
765 commutative arithmetic operation. Store the constant part into *CONP,
766 the literal in *LITP and return the variable part. If a part isn't
767 present, set it to null. If the tree does not decompose in this way,
768 return the entire tree as the variable part and the other parts as null.
770 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
771 case, we negate an operand that was subtracted. Except if it is a
772 literal for which we use *MINUS_LITP instead.
774 If NEGATE_P is true, we are negating all of IN, again except a literal
775 for which we use *MINUS_LITP instead. If a variable part is of pointer
776 type, it is negated after converting to TYPE. This prevents us from
777 generating illegal MINUS pointer expression. LOC is the location of
778 the converted variable part.
780 If IN is itself a literal or constant, return it as appropriate.
782 Note that we do not guarantee that any of the three values will be the
783 same type as IN, but they will have the same signedness and mode. */
785 static tree
786 split_tree (tree in, tree type, enum tree_code code,
787 tree *minus_varp, tree *conp, tree *minus_conp,
788 tree *litp, tree *minus_litp, int negate_p)
790 tree var = 0;
791 *minus_varp = 0;
792 *conp = 0;
793 *minus_conp = 0;
794 *litp = 0;
795 *minus_litp = 0;
797 /* Strip any conversions that don't change the machine mode or signedness. */
798 STRIP_SIGN_NOPS (in);
800 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
801 || TREE_CODE (in) == FIXED_CST)
802 *litp = in;
803 else if (TREE_CODE (in) == code
804 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
805 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
806 /* We can associate addition and subtraction together (even
807 though the C standard doesn't say so) for integers because
808 the value is not affected. For reals, the value might be
809 affected, so we can't. */
810 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
811 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
812 || (code == MINUS_EXPR
813 && (TREE_CODE (in) == PLUS_EXPR
814 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
816 tree op0 = TREE_OPERAND (in, 0);
817 tree op1 = TREE_OPERAND (in, 1);
818 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
819 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
821 /* First see if either of the operands is a literal, then a constant. */
822 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
823 || TREE_CODE (op0) == FIXED_CST)
824 *litp = op0, op0 = 0;
825 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
826 || TREE_CODE (op1) == FIXED_CST)
827 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
829 if (op0 != 0 && TREE_CONSTANT (op0))
830 *conp = op0, op0 = 0;
831 else if (op1 != 0 && TREE_CONSTANT (op1))
832 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
834 /* If we haven't dealt with either operand, this is not a case we can
835 decompose. Otherwise, VAR is either of the ones remaining, if any. */
836 if (op0 != 0 && op1 != 0)
837 var = in;
838 else if (op0 != 0)
839 var = op0;
840 else
841 var = op1, neg_var_p = neg1_p;
843 /* Now do any needed negations. */
844 if (neg_litp_p)
845 *minus_litp = *litp, *litp = 0;
846 if (neg_conp_p && *conp)
847 *minus_conp = *conp, *conp = 0;
848 if (neg_var_p && var)
849 *minus_varp = var, var = 0;
851 else if (TREE_CONSTANT (in))
852 *conp = in;
853 else if (TREE_CODE (in) == BIT_NOT_EXPR
854 && code == PLUS_EXPR)
856 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
857 when IN is constant. */
858 *litp = build_minus_one_cst (type);
859 *minus_varp = TREE_OPERAND (in, 0);
861 else
862 var = in;
864 if (negate_p)
866 if (*litp)
867 *minus_litp = *litp, *litp = 0;
868 else if (*minus_litp)
869 *litp = *minus_litp, *minus_litp = 0;
870 if (*conp)
871 *minus_conp = *conp, *conp = 0;
872 else if (*minus_conp)
873 *conp = *minus_conp, *minus_conp = 0;
874 if (var)
875 *minus_varp = var, var = 0;
876 else if (*minus_varp)
877 var = *minus_varp, *minus_varp = 0;
880 if (*litp
881 && TREE_OVERFLOW_P (*litp))
882 *litp = drop_tree_overflow (*litp);
883 if (*minus_litp
884 && TREE_OVERFLOW_P (*minus_litp))
885 *minus_litp = drop_tree_overflow (*minus_litp);
887 return var;
890 /* Re-associate trees split by the above function. T1 and T2 are
891 either expressions to associate or null. Return the new
892 expression, if any. LOC is the location of the new expression. If
893 we build an operation, do it in TYPE and with CODE. */
895 static tree
896 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
898 if (t1 == 0)
900 gcc_assert (t2 == 0 || code != MINUS_EXPR);
901 return t2;
903 else if (t2 == 0)
904 return t1;
906 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
907 try to fold this since we will have infinite recursion. But do
908 deal with any NEGATE_EXPRs. */
909 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
910 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
911 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
913 if (code == PLUS_EXPR)
915 if (TREE_CODE (t1) == NEGATE_EXPR)
916 return build2_loc (loc, MINUS_EXPR, type,
917 fold_convert_loc (loc, type, t2),
918 fold_convert_loc (loc, type,
919 TREE_OPERAND (t1, 0)));
920 else if (TREE_CODE (t2) == NEGATE_EXPR)
921 return build2_loc (loc, MINUS_EXPR, type,
922 fold_convert_loc (loc, type, t1),
923 fold_convert_loc (loc, type,
924 TREE_OPERAND (t2, 0)));
925 else if (integer_zerop (t2))
926 return fold_convert_loc (loc, type, t1);
928 else if (code == MINUS_EXPR)
930 if (integer_zerop (t2))
931 return fold_convert_loc (loc, type, t1);
934 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
935 fold_convert_loc (loc, type, t2));
938 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
939 fold_convert_loc (loc, type, t2));
942 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
943 for use in int_const_binop, size_binop and size_diffop. */
945 static bool
946 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
948 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
949 return false;
950 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
951 return false;
953 switch (code)
955 case LSHIFT_EXPR:
956 case RSHIFT_EXPR:
957 case LROTATE_EXPR:
958 case RROTATE_EXPR:
959 return true;
961 default:
962 break;
965 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
966 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
967 && TYPE_MODE (type1) == TYPE_MODE (type2);
970 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
971 a new constant in RES. Return FALSE if we don't know how to
972 evaluate CODE at compile-time. */
974 bool
975 wide_int_binop (wide_int &res,
976 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
977 signop sign, wi::overflow_type *overflow)
979 wide_int tmp;
980 *overflow = wi::OVF_NONE;
981 switch (code)
983 case BIT_IOR_EXPR:
984 res = wi::bit_or (arg1, arg2);
985 break;
987 case BIT_XOR_EXPR:
988 res = wi::bit_xor (arg1, arg2);
989 break;
991 case BIT_AND_EXPR:
992 res = wi::bit_and (arg1, arg2);
993 break;
995 case RSHIFT_EXPR:
996 case LSHIFT_EXPR:
997 if (wi::neg_p (arg2))
999 tmp = -arg2;
1000 if (code == RSHIFT_EXPR)
1001 code = LSHIFT_EXPR;
1002 else
1003 code = RSHIFT_EXPR;
1005 else
1006 tmp = arg2;
1008 if (code == RSHIFT_EXPR)
1009 /* It's unclear from the C standard whether shifts can overflow.
1010 The following code ignores overflow; perhaps a C standard
1011 interpretation ruling is needed. */
1012 res = wi::rshift (arg1, tmp, sign);
1013 else
1014 res = wi::lshift (arg1, tmp);
1015 break;
1017 case RROTATE_EXPR:
1018 case LROTATE_EXPR:
1019 if (wi::neg_p (arg2))
1021 tmp = -arg2;
1022 if (code == RROTATE_EXPR)
1023 code = LROTATE_EXPR;
1024 else
1025 code = RROTATE_EXPR;
1027 else
1028 tmp = arg2;
1030 if (code == RROTATE_EXPR)
1031 res = wi::rrotate (arg1, tmp);
1032 else
1033 res = wi::lrotate (arg1, tmp);
1034 break;
1036 case PLUS_EXPR:
1037 res = wi::add (arg1, arg2, sign, overflow);
1038 break;
1040 case MINUS_EXPR:
1041 res = wi::sub (arg1, arg2, sign, overflow);
1042 break;
1044 case MULT_EXPR:
1045 res = wi::mul (arg1, arg2, sign, overflow);
1046 break;
1048 case MULT_HIGHPART_EXPR:
1049 res = wi::mul_high (arg1, arg2, sign);
1050 break;
1052 case TRUNC_DIV_EXPR:
1053 case EXACT_DIV_EXPR:
1054 if (arg2 == 0)
1055 return false;
1056 res = wi::div_trunc (arg1, arg2, sign, overflow);
1057 break;
1059 case FLOOR_DIV_EXPR:
1060 if (arg2 == 0)
1061 return false;
1062 res = wi::div_floor (arg1, arg2, sign, overflow);
1063 break;
1065 case CEIL_DIV_EXPR:
1066 if (arg2 == 0)
1067 return false;
1068 res = wi::div_ceil (arg1, arg2, sign, overflow);
1069 break;
1071 case ROUND_DIV_EXPR:
1072 if (arg2 == 0)
1073 return false;
1074 res = wi::div_round (arg1, arg2, sign, overflow);
1075 break;
1077 case TRUNC_MOD_EXPR:
1078 if (arg2 == 0)
1079 return false;
1080 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1081 break;
1083 case FLOOR_MOD_EXPR:
1084 if (arg2 == 0)
1085 return false;
1086 res = wi::mod_floor (arg1, arg2, sign, overflow);
1087 break;
1089 case CEIL_MOD_EXPR:
1090 if (arg2 == 0)
1091 return false;
1092 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1093 break;
1095 case ROUND_MOD_EXPR:
1096 if (arg2 == 0)
1097 return false;
1098 res = wi::mod_round (arg1, arg2, sign, overflow);
1099 break;
1101 case MIN_EXPR:
1102 res = wi::min (arg1, arg2, sign);
1103 break;
1105 case MAX_EXPR:
1106 res = wi::max (arg1, arg2, sign);
1107 break;
1109 default:
1110 return false;
1112 return true;
1115 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1116 produce a new constant in RES. Return FALSE if we don't know how
1117 to evaluate CODE at compile-time. */
1119 static bool
1120 poly_int_binop (poly_wide_int &res, enum tree_code code,
1121 const_tree arg1, const_tree arg2,
1122 signop sign, wi::overflow_type *overflow)
1124 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1125 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1126 switch (code)
1128 case PLUS_EXPR:
1129 res = wi::add (wi::to_poly_wide (arg1),
1130 wi::to_poly_wide (arg2), sign, overflow);
1131 break;
1133 case MINUS_EXPR:
1134 res = wi::sub (wi::to_poly_wide (arg1),
1135 wi::to_poly_wide (arg2), sign, overflow);
1136 break;
1138 case MULT_EXPR:
1139 if (TREE_CODE (arg2) == INTEGER_CST)
1140 res = wi::mul (wi::to_poly_wide (arg1),
1141 wi::to_wide (arg2), sign, overflow);
1142 else if (TREE_CODE (arg1) == INTEGER_CST)
1143 res = wi::mul (wi::to_poly_wide (arg2),
1144 wi::to_wide (arg1), sign, overflow);
1145 else
1146 return NULL_TREE;
1147 break;
1149 case LSHIFT_EXPR:
1150 if (TREE_CODE (arg2) == INTEGER_CST)
1151 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1152 else
1153 return false;
1154 break;
1156 case BIT_IOR_EXPR:
1157 if (TREE_CODE (arg2) != INTEGER_CST
1158 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1159 &res))
1160 return false;
1161 break;
1163 default:
1164 return false;
1166 return true;
1169 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1170 produce a new constant. Return NULL_TREE if we don't know how to
1171 evaluate CODE at compile-time. */
1173 tree
1174 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1175 int overflowable)
1177 poly_wide_int poly_res;
1178 tree type = TREE_TYPE (arg1);
1179 signop sign = TYPE_SIGN (type);
1180 wi::overflow_type overflow = wi::OVF_NONE;
1182 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1184 wide_int warg1 = wi::to_wide (arg1), res;
1185 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1186 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1187 return NULL_TREE;
1188 poly_res = res;
1190 else if (!poly_int_tree_p (arg1)
1191 || !poly_int_tree_p (arg2)
1192 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1193 return NULL_TREE;
1194 return force_fit_type (type, poly_res, overflowable,
1195 (((sign == SIGNED || overflowable == -1)
1196 && overflow)
1197 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1200 /* Return true if binary operation OP distributes over addition in operand
1201 OPNO, with the other operand being held constant. OPNO counts from 1. */
1203 static bool
1204 distributes_over_addition_p (tree_code op, int opno)
1206 switch (op)
1208 case PLUS_EXPR:
1209 case MINUS_EXPR:
1210 case MULT_EXPR:
1211 return true;
1213 case LSHIFT_EXPR:
1214 return opno == 1;
1216 default:
1217 return false;
1221 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1222 constant. We assume ARG1 and ARG2 have the same data type, or at least
1223 are the same kind of constant and the same machine mode. Return zero if
1224 combining the constants is not allowed in the current operating mode. */
1226 static tree
1227 const_binop (enum tree_code code, tree arg1, tree arg2)
1229 /* Sanity check for the recursive cases. */
1230 if (!arg1 || !arg2)
1231 return NULL_TREE;
1233 STRIP_NOPS (arg1);
1234 STRIP_NOPS (arg2);
1236 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1238 if (code == POINTER_PLUS_EXPR)
1239 return int_const_binop (PLUS_EXPR,
1240 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1242 return int_const_binop (code, arg1, arg2);
1245 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1247 machine_mode mode;
1248 REAL_VALUE_TYPE d1;
1249 REAL_VALUE_TYPE d2;
1250 REAL_VALUE_TYPE value;
1251 REAL_VALUE_TYPE result;
1252 bool inexact;
1253 tree t, type;
1255 /* The following codes are handled by real_arithmetic. */
1256 switch (code)
1258 case PLUS_EXPR:
1259 case MINUS_EXPR:
1260 case MULT_EXPR:
1261 case RDIV_EXPR:
1262 case MIN_EXPR:
1263 case MAX_EXPR:
1264 break;
1266 default:
1267 return NULL_TREE;
1270 d1 = TREE_REAL_CST (arg1);
1271 d2 = TREE_REAL_CST (arg2);
1273 type = TREE_TYPE (arg1);
1274 mode = TYPE_MODE (type);
1276 /* Don't perform operation if we honor signaling NaNs and
1277 either operand is a signaling NaN. */
1278 if (HONOR_SNANS (mode)
1279 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1280 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1281 return NULL_TREE;
1283 /* Don't perform operation if it would raise a division
1284 by zero exception. */
1285 if (code == RDIV_EXPR
1286 && real_equal (&d2, &dconst0)
1287 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1288 return NULL_TREE;
1290 /* If either operand is a NaN, just return it. Otherwise, set up
1291 for floating-point trap; we return an overflow. */
1292 if (REAL_VALUE_ISNAN (d1))
1294 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1295 is off. */
1296 d1.signalling = 0;
1297 t = build_real (type, d1);
1298 return t;
1300 else if (REAL_VALUE_ISNAN (d2))
1302 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1303 is off. */
1304 d2.signalling = 0;
1305 t = build_real (type, d2);
1306 return t;
1309 inexact = real_arithmetic (&value, code, &d1, &d2);
1310 real_convert (&result, mode, &value);
1312 /* Don't constant fold this floating point operation if
1313 the result has overflowed and flag_trapping_math. */
1314 if (flag_trapping_math
1315 && MODE_HAS_INFINITIES (mode)
1316 && REAL_VALUE_ISINF (result)
1317 && !REAL_VALUE_ISINF (d1)
1318 && !REAL_VALUE_ISINF (d2))
1319 return NULL_TREE;
1321 /* Don't constant fold this floating point operation if the
1322 result may dependent upon the run-time rounding mode and
1323 flag_rounding_math is set, or if GCC's software emulation
1324 is unable to accurately represent the result. */
1325 if ((flag_rounding_math
1326 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1327 && (inexact || !real_identical (&result, &value)))
1328 return NULL_TREE;
1330 t = build_real (type, result);
1332 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1333 return t;
1336 if (TREE_CODE (arg1) == FIXED_CST)
1338 FIXED_VALUE_TYPE f1;
1339 FIXED_VALUE_TYPE f2;
1340 FIXED_VALUE_TYPE result;
1341 tree t, type;
1342 int sat_p;
1343 bool overflow_p;
1345 /* The following codes are handled by fixed_arithmetic. */
1346 switch (code)
1348 case PLUS_EXPR:
1349 case MINUS_EXPR:
1350 case MULT_EXPR:
1351 case TRUNC_DIV_EXPR:
1352 if (TREE_CODE (arg2) != FIXED_CST)
1353 return NULL_TREE;
1354 f2 = TREE_FIXED_CST (arg2);
1355 break;
1357 case LSHIFT_EXPR:
1358 case RSHIFT_EXPR:
1360 if (TREE_CODE (arg2) != INTEGER_CST)
1361 return NULL_TREE;
1362 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1363 f2.data.high = w2.elt (1);
1364 f2.data.low = w2.ulow ();
1365 f2.mode = SImode;
1367 break;
1369 default:
1370 return NULL_TREE;
1373 f1 = TREE_FIXED_CST (arg1);
1374 type = TREE_TYPE (arg1);
1375 sat_p = TYPE_SATURATING (type);
1376 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1377 t = build_fixed (type, result);
1378 /* Propagate overflow flags. */
1379 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1380 TREE_OVERFLOW (t) = 1;
1381 return t;
1384 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1386 tree type = TREE_TYPE (arg1);
1387 tree r1 = TREE_REALPART (arg1);
1388 tree i1 = TREE_IMAGPART (arg1);
1389 tree r2 = TREE_REALPART (arg2);
1390 tree i2 = TREE_IMAGPART (arg2);
1391 tree real, imag;
1393 switch (code)
1395 case PLUS_EXPR:
1396 case MINUS_EXPR:
1397 real = const_binop (code, r1, r2);
1398 imag = const_binop (code, i1, i2);
1399 break;
1401 case MULT_EXPR:
1402 if (COMPLEX_FLOAT_TYPE_P (type))
1403 return do_mpc_arg2 (arg1, arg2, type,
1404 /* do_nonfinite= */ folding_initializer,
1405 mpc_mul);
1407 real = const_binop (MINUS_EXPR,
1408 const_binop (MULT_EXPR, r1, r2),
1409 const_binop (MULT_EXPR, i1, i2));
1410 imag = const_binop (PLUS_EXPR,
1411 const_binop (MULT_EXPR, r1, i2),
1412 const_binop (MULT_EXPR, i1, r2));
1413 break;
1415 case RDIV_EXPR:
1416 if (COMPLEX_FLOAT_TYPE_P (type))
1417 return do_mpc_arg2 (arg1, arg2, type,
1418 /* do_nonfinite= */ folding_initializer,
1419 mpc_div);
1420 /* Fallthru. */
1421 case TRUNC_DIV_EXPR:
1422 case CEIL_DIV_EXPR:
1423 case FLOOR_DIV_EXPR:
1424 case ROUND_DIV_EXPR:
1425 if (flag_complex_method == 0)
1427 /* Keep this algorithm in sync with
1428 tree-complex.c:expand_complex_div_straight().
1430 Expand complex division to scalars, straightforward algorithm.
1431 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1432 t = br*br + bi*bi
1434 tree magsquared
1435 = const_binop (PLUS_EXPR,
1436 const_binop (MULT_EXPR, r2, r2),
1437 const_binop (MULT_EXPR, i2, i2));
1438 tree t1
1439 = const_binop (PLUS_EXPR,
1440 const_binop (MULT_EXPR, r1, r2),
1441 const_binop (MULT_EXPR, i1, i2));
1442 tree t2
1443 = const_binop (MINUS_EXPR,
1444 const_binop (MULT_EXPR, i1, r2),
1445 const_binop (MULT_EXPR, r1, i2));
1447 real = const_binop (code, t1, magsquared);
1448 imag = const_binop (code, t2, magsquared);
1450 else
1452 /* Keep this algorithm in sync with
1453 tree-complex.c:expand_complex_div_wide().
1455 Expand complex division to scalars, modified algorithm to minimize
1456 overflow with wide input ranges. */
1457 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1458 fold_abs_const (r2, TREE_TYPE (type)),
1459 fold_abs_const (i2, TREE_TYPE (type)));
1461 if (integer_nonzerop (compare))
1463 /* In the TRUE branch, we compute
1464 ratio = br/bi;
1465 div = (br * ratio) + bi;
1466 tr = (ar * ratio) + ai;
1467 ti = (ai * ratio) - ar;
1468 tr = tr / div;
1469 ti = ti / div; */
1470 tree ratio = const_binop (code, r2, i2);
1471 tree div = const_binop (PLUS_EXPR, i2,
1472 const_binop (MULT_EXPR, r2, ratio));
1473 real = const_binop (MULT_EXPR, r1, ratio);
1474 real = const_binop (PLUS_EXPR, real, i1);
1475 real = const_binop (code, real, div);
1477 imag = const_binop (MULT_EXPR, i1, ratio);
1478 imag = const_binop (MINUS_EXPR, imag, r1);
1479 imag = const_binop (code, imag, div);
1481 else
1483 /* In the FALSE branch, we compute
1484 ratio = d/c;
1485 divisor = (d * ratio) + c;
1486 tr = (b * ratio) + a;
1487 ti = b - (a * ratio);
1488 tr = tr / div;
1489 ti = ti / div; */
1490 tree ratio = const_binop (code, i2, r2);
1491 tree div = const_binop (PLUS_EXPR, r2,
1492 const_binop (MULT_EXPR, i2, ratio));
1494 real = const_binop (MULT_EXPR, i1, ratio);
1495 real = const_binop (PLUS_EXPR, real, r1);
1496 real = const_binop (code, real, div);
1498 imag = const_binop (MULT_EXPR, r1, ratio);
1499 imag = const_binop (MINUS_EXPR, i1, imag);
1500 imag = const_binop (code, imag, div);
1503 break;
1505 default:
1506 return NULL_TREE;
1509 if (real && imag)
1510 return build_complex (type, real, imag);
1513 if (TREE_CODE (arg1) == VECTOR_CST
1514 && TREE_CODE (arg2) == VECTOR_CST
1515 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1516 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1518 tree type = TREE_TYPE (arg1);
1519 bool step_ok_p;
1520 if (VECTOR_CST_STEPPED_P (arg1)
1521 && VECTOR_CST_STEPPED_P (arg2))
1522 /* We can operate directly on the encoding if:
1524 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1525 implies
1526 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1528 Addition and subtraction are the supported operators
1529 for which this is true. */
1530 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1531 else if (VECTOR_CST_STEPPED_P (arg1))
1532 /* We can operate directly on stepped encodings if:
1534 a3 - a2 == a2 - a1
1535 implies:
1536 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1538 which is true if (x -> x op c) distributes over addition. */
1539 step_ok_p = distributes_over_addition_p (code, 1);
1540 else
1541 /* Similarly in reverse. */
1542 step_ok_p = distributes_over_addition_p (code, 2);
1543 tree_vector_builder elts;
1544 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1545 return NULL_TREE;
1546 unsigned int count = elts.encoded_nelts ();
1547 for (unsigned int i = 0; i < count; ++i)
1549 tree elem1 = VECTOR_CST_ELT (arg1, i);
1550 tree elem2 = VECTOR_CST_ELT (arg2, i);
1552 tree elt = const_binop (code, elem1, elem2);
1554 /* It is possible that const_binop cannot handle the given
1555 code and return NULL_TREE */
1556 if (elt == NULL_TREE)
1557 return NULL_TREE;
1558 elts.quick_push (elt);
1561 return elts.build ();
1564 /* Shifts allow a scalar offset for a vector. */
1565 if (TREE_CODE (arg1) == VECTOR_CST
1566 && TREE_CODE (arg2) == INTEGER_CST)
1568 tree type = TREE_TYPE (arg1);
1569 bool step_ok_p = distributes_over_addition_p (code, 1);
1570 tree_vector_builder elts;
1571 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1572 return NULL_TREE;
1573 unsigned int count = elts.encoded_nelts ();
1574 for (unsigned int i = 0; i < count; ++i)
1576 tree elem1 = VECTOR_CST_ELT (arg1, i);
1578 tree elt = const_binop (code, elem1, arg2);
1580 /* It is possible that const_binop cannot handle the given
1581 code and return NULL_TREE. */
1582 if (elt == NULL_TREE)
1583 return NULL_TREE;
1584 elts.quick_push (elt);
1587 return elts.build ();
1589 return NULL_TREE;
1592 /* Overload that adds a TYPE parameter to be able to dispatch
1593 to fold_relational_const. */
1595 tree
1596 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1598 if (TREE_CODE_CLASS (code) == tcc_comparison)
1599 return fold_relational_const (code, type, arg1, arg2);
1601 /* ??? Until we make the const_binop worker take the type of the
1602 result as argument put those cases that need it here. */
1603 switch (code)
1605 case VEC_SERIES_EXPR:
1606 if (CONSTANT_CLASS_P (arg1)
1607 && CONSTANT_CLASS_P (arg2))
1608 return build_vec_series (type, arg1, arg2);
1609 return NULL_TREE;
1611 case COMPLEX_EXPR:
1612 if ((TREE_CODE (arg1) == REAL_CST
1613 && TREE_CODE (arg2) == REAL_CST)
1614 || (TREE_CODE (arg1) == INTEGER_CST
1615 && TREE_CODE (arg2) == INTEGER_CST))
1616 return build_complex (type, arg1, arg2);
1617 return NULL_TREE;
1619 case POINTER_DIFF_EXPR:
1620 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1622 poly_offset_int res = (wi::to_poly_offset (arg1)
1623 - wi::to_poly_offset (arg2));
1624 return force_fit_type (type, res, 1,
1625 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1627 return NULL_TREE;
1629 case VEC_PACK_TRUNC_EXPR:
1630 case VEC_PACK_FIX_TRUNC_EXPR:
1631 case VEC_PACK_FLOAT_EXPR:
1633 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1635 if (TREE_CODE (arg1) != VECTOR_CST
1636 || TREE_CODE (arg2) != VECTOR_CST)
1637 return NULL_TREE;
1639 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1640 return NULL_TREE;
1642 out_nelts = in_nelts * 2;
1643 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1644 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1646 tree_vector_builder elts (type, out_nelts, 1);
1647 for (i = 0; i < out_nelts; i++)
1649 tree elt = (i < in_nelts
1650 ? VECTOR_CST_ELT (arg1, i)
1651 : VECTOR_CST_ELT (arg2, i - in_nelts));
1652 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1653 ? NOP_EXPR
1654 : code == VEC_PACK_FLOAT_EXPR
1655 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1656 TREE_TYPE (type), elt);
1657 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1658 return NULL_TREE;
1659 elts.quick_push (elt);
1662 return elts.build ();
1665 case VEC_WIDEN_MULT_LO_EXPR:
1666 case VEC_WIDEN_MULT_HI_EXPR:
1667 case VEC_WIDEN_MULT_EVEN_EXPR:
1668 case VEC_WIDEN_MULT_ODD_EXPR:
1670 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1672 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1673 return NULL_TREE;
1675 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1676 return NULL_TREE;
1677 out_nelts = in_nelts / 2;
1678 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1679 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1681 if (code == VEC_WIDEN_MULT_LO_EXPR)
1682 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1683 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1684 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1685 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1686 scale = 1, ofs = 0;
1687 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1688 scale = 1, ofs = 1;
1690 tree_vector_builder elts (type, out_nelts, 1);
1691 for (out = 0; out < out_nelts; out++)
1693 unsigned int in = (out << scale) + ofs;
1694 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1695 VECTOR_CST_ELT (arg1, in));
1696 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1697 VECTOR_CST_ELT (arg2, in));
1699 if (t1 == NULL_TREE || t2 == NULL_TREE)
1700 return NULL_TREE;
1701 tree elt = const_binop (MULT_EXPR, t1, t2);
1702 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1703 return NULL_TREE;
1704 elts.quick_push (elt);
1707 return elts.build ();
1710 default:;
1713 if (TREE_CODE_CLASS (code) != tcc_binary)
1714 return NULL_TREE;
1716 /* Make sure type and arg0 have the same saturating flag. */
1717 gcc_checking_assert (TYPE_SATURATING (type)
1718 == TYPE_SATURATING (TREE_TYPE (arg1)));
1720 return const_binop (code, arg1, arg2);
1723 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1724 Return zero if computing the constants is not possible. */
1726 tree
1727 const_unop (enum tree_code code, tree type, tree arg0)
1729 /* Don't perform the operation, other than NEGATE and ABS, if
1730 flag_signaling_nans is on and the operand is a signaling NaN. */
1731 if (TREE_CODE (arg0) == REAL_CST
1732 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1733 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1734 && code != NEGATE_EXPR
1735 && code != ABS_EXPR
1736 && code != ABSU_EXPR)
1737 return NULL_TREE;
1739 switch (code)
1741 CASE_CONVERT:
1742 case FLOAT_EXPR:
1743 case FIX_TRUNC_EXPR:
1744 case FIXED_CONVERT_EXPR:
1745 return fold_convert_const (code, type, arg0);
1747 case ADDR_SPACE_CONVERT_EXPR:
1748 /* If the source address is 0, and the source address space
1749 cannot have a valid object at 0, fold to dest type null. */
1750 if (integer_zerop (arg0)
1751 && !(targetm.addr_space.zero_address_valid
1752 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1753 return fold_convert_const (code, type, arg0);
1754 break;
1756 case VIEW_CONVERT_EXPR:
1757 return fold_view_convert_expr (type, arg0);
1759 case NEGATE_EXPR:
1761 /* Can't call fold_negate_const directly here as that doesn't
1762 handle all cases and we might not be able to negate some
1763 constants. */
1764 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1765 if (tem && CONSTANT_CLASS_P (tem))
1766 return tem;
1767 break;
1770 case ABS_EXPR:
1771 case ABSU_EXPR:
1772 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1773 return fold_abs_const (arg0, type);
1774 break;
1776 case CONJ_EXPR:
1777 if (TREE_CODE (arg0) == COMPLEX_CST)
1779 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1780 TREE_TYPE (type));
1781 return build_complex (type, TREE_REALPART (arg0), ipart);
1783 break;
1785 case BIT_NOT_EXPR:
1786 if (TREE_CODE (arg0) == INTEGER_CST)
1787 return fold_not_const (arg0, type);
1788 else if (POLY_INT_CST_P (arg0))
1789 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1790 /* Perform BIT_NOT_EXPR on each element individually. */
1791 else if (TREE_CODE (arg0) == VECTOR_CST)
1793 tree elem;
1795 /* This can cope with stepped encodings because ~x == -1 - x. */
1796 tree_vector_builder elements;
1797 elements.new_unary_operation (type, arg0, true);
1798 unsigned int i, count = elements.encoded_nelts ();
1799 for (i = 0; i < count; ++i)
1801 elem = VECTOR_CST_ELT (arg0, i);
1802 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1803 if (elem == NULL_TREE)
1804 break;
1805 elements.quick_push (elem);
1807 if (i == count)
1808 return elements.build ();
1810 break;
1812 case TRUTH_NOT_EXPR:
1813 if (TREE_CODE (arg0) == INTEGER_CST)
1814 return constant_boolean_node (integer_zerop (arg0), type);
1815 break;
1817 case REALPART_EXPR:
1818 if (TREE_CODE (arg0) == COMPLEX_CST)
1819 return fold_convert (type, TREE_REALPART (arg0));
1820 break;
1822 case IMAGPART_EXPR:
1823 if (TREE_CODE (arg0) == COMPLEX_CST)
1824 return fold_convert (type, TREE_IMAGPART (arg0));
1825 break;
1827 case VEC_UNPACK_LO_EXPR:
1828 case VEC_UNPACK_HI_EXPR:
1829 case VEC_UNPACK_FLOAT_LO_EXPR:
1830 case VEC_UNPACK_FLOAT_HI_EXPR:
1831 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1832 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1834 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1835 enum tree_code subcode;
1837 if (TREE_CODE (arg0) != VECTOR_CST)
1838 return NULL_TREE;
1840 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1841 return NULL_TREE;
1842 out_nelts = in_nelts / 2;
1843 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1845 unsigned int offset = 0;
1846 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1847 || code == VEC_UNPACK_FLOAT_LO_EXPR
1848 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1849 offset = out_nelts;
1851 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1852 subcode = NOP_EXPR;
1853 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1854 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1855 subcode = FLOAT_EXPR;
1856 else
1857 subcode = FIX_TRUNC_EXPR;
1859 tree_vector_builder elts (type, out_nelts, 1);
1860 for (i = 0; i < out_nelts; i++)
1862 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1863 VECTOR_CST_ELT (arg0, i + offset));
1864 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1865 return NULL_TREE;
1866 elts.quick_push (elt);
1869 return elts.build ();
1872 case VEC_DUPLICATE_EXPR:
1873 if (CONSTANT_CLASS_P (arg0))
1874 return build_vector_from_val (type, arg0);
1875 return NULL_TREE;
1877 default:
1878 break;
1881 return NULL_TREE;
1884 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1885 indicates which particular sizetype to create. */
1887 tree
1888 size_int_kind (poly_int64 number, enum size_type_kind kind)
1890 return build_int_cst (sizetype_tab[(int) kind], number);
1893 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1894 is a tree code. The type of the result is taken from the operands.
1895 Both must be equivalent integer types, ala int_binop_types_match_p.
1896 If the operands are constant, so is the result. */
1898 tree
1899 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1901 tree type = TREE_TYPE (arg0);
1903 if (arg0 == error_mark_node || arg1 == error_mark_node)
1904 return error_mark_node;
1906 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1907 TREE_TYPE (arg1)));
1909 /* Handle the special case of two poly_int constants faster. */
1910 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1912 /* And some specific cases even faster than that. */
1913 if (code == PLUS_EXPR)
1915 if (integer_zerop (arg0)
1916 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1917 return arg1;
1918 if (integer_zerop (arg1)
1919 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1920 return arg0;
1922 else if (code == MINUS_EXPR)
1924 if (integer_zerop (arg1)
1925 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1926 return arg0;
1928 else if (code == MULT_EXPR)
1930 if (integer_onep (arg0)
1931 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1932 return arg1;
1935 /* Handle general case of two integer constants. For sizetype
1936 constant calculations we always want to know about overflow,
1937 even in the unsigned case. */
1938 tree res = int_const_binop (code, arg0, arg1, -1);
1939 if (res != NULL_TREE)
1940 return res;
1943 return fold_build2_loc (loc, code, type, arg0, arg1);
1946 /* Given two values, either both of sizetype or both of bitsizetype,
1947 compute the difference between the two values. Return the value
1948 in signed type corresponding to the type of the operands. */
1950 tree
1951 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1953 tree type = TREE_TYPE (arg0);
1954 tree ctype;
1956 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1957 TREE_TYPE (arg1)));
1959 /* If the type is already signed, just do the simple thing. */
1960 if (!TYPE_UNSIGNED (type))
1961 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1963 if (type == sizetype)
1964 ctype = ssizetype;
1965 else if (type == bitsizetype)
1966 ctype = sbitsizetype;
1967 else
1968 ctype = signed_type_for (type);
1970 /* If either operand is not a constant, do the conversions to the signed
1971 type and subtract. The hardware will do the right thing with any
1972 overflow in the subtraction. */
1973 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1974 return size_binop_loc (loc, MINUS_EXPR,
1975 fold_convert_loc (loc, ctype, arg0),
1976 fold_convert_loc (loc, ctype, arg1));
1978 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1979 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1980 overflow) and negate (which can't either). Special-case a result
1981 of zero while we're here. */
1982 if (tree_int_cst_equal (arg0, arg1))
1983 return build_int_cst (ctype, 0);
1984 else if (tree_int_cst_lt (arg1, arg0))
1985 return fold_convert_loc (loc, ctype,
1986 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1987 else
1988 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1989 fold_convert_loc (loc, ctype,
1990 size_binop_loc (loc,
1991 MINUS_EXPR,
1992 arg1, arg0)));
1995 /* A subroutine of fold_convert_const handling conversions of an
1996 INTEGER_CST to another integer type. */
1998 static tree
1999 fold_convert_const_int_from_int (tree type, const_tree arg1)
2001 /* Given an integer constant, make new constant with new type,
2002 appropriately sign-extended or truncated. Use widest_int
2003 so that any extension is done according ARG1's type. */
2004 return force_fit_type (type, wi::to_widest (arg1),
2005 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2006 TREE_OVERFLOW (arg1));
2009 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2010 to an integer type. */
2012 static tree
2013 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2015 bool overflow = false;
2016 tree t;
2018 /* The following code implements the floating point to integer
2019 conversion rules required by the Java Language Specification,
2020 that IEEE NaNs are mapped to zero and values that overflow
2021 the target precision saturate, i.e. values greater than
2022 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2023 are mapped to INT_MIN. These semantics are allowed by the
2024 C and C++ standards that simply state that the behavior of
2025 FP-to-integer conversion is unspecified upon overflow. */
2027 wide_int val;
2028 REAL_VALUE_TYPE r;
2029 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2031 switch (code)
2033 case FIX_TRUNC_EXPR:
2034 real_trunc (&r, VOIDmode, &x);
2035 break;
2037 default:
2038 gcc_unreachable ();
2041 /* If R is NaN, return zero and show we have an overflow. */
2042 if (REAL_VALUE_ISNAN (r))
2044 overflow = true;
2045 val = wi::zero (TYPE_PRECISION (type));
2048 /* See if R is less than the lower bound or greater than the
2049 upper bound. */
2051 if (! overflow)
2053 tree lt = TYPE_MIN_VALUE (type);
2054 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2055 if (real_less (&r, &l))
2057 overflow = true;
2058 val = wi::to_wide (lt);
2062 if (! overflow)
2064 tree ut = TYPE_MAX_VALUE (type);
2065 if (ut)
2067 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2068 if (real_less (&u, &r))
2070 overflow = true;
2071 val = wi::to_wide (ut);
2076 if (! overflow)
2077 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2079 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2080 return t;
2083 /* A subroutine of fold_convert_const handling conversions of a
2084 FIXED_CST to an integer type. */
2086 static tree
2087 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2089 tree t;
2090 double_int temp, temp_trunc;
2091 scalar_mode mode;
2093 /* Right shift FIXED_CST to temp by fbit. */
2094 temp = TREE_FIXED_CST (arg1).data;
2095 mode = TREE_FIXED_CST (arg1).mode;
2096 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2098 temp = temp.rshift (GET_MODE_FBIT (mode),
2099 HOST_BITS_PER_DOUBLE_INT,
2100 SIGNED_FIXED_POINT_MODE_P (mode));
2102 /* Left shift temp to temp_trunc by fbit. */
2103 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2104 HOST_BITS_PER_DOUBLE_INT,
2105 SIGNED_FIXED_POINT_MODE_P (mode));
2107 else
2109 temp = double_int_zero;
2110 temp_trunc = double_int_zero;
2113 /* If FIXED_CST is negative, we need to round the value toward 0.
2114 By checking if the fractional bits are not zero to add 1 to temp. */
2115 if (SIGNED_FIXED_POINT_MODE_P (mode)
2116 && temp_trunc.is_negative ()
2117 && TREE_FIXED_CST (arg1).data != temp_trunc)
2118 temp += double_int_one;
2120 /* Given a fixed-point constant, make new constant with new type,
2121 appropriately sign-extended or truncated. */
2122 t = force_fit_type (type, temp, -1,
2123 (temp.is_negative ()
2124 && (TYPE_UNSIGNED (type)
2125 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2126 | TREE_OVERFLOW (arg1));
2128 return t;
2131 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2132 to another floating point type. */
2134 static tree
2135 fold_convert_const_real_from_real (tree type, const_tree arg1)
2137 REAL_VALUE_TYPE value;
2138 tree t;
2140 /* Don't perform the operation if flag_signaling_nans is on
2141 and the operand is a signaling NaN. */
2142 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2143 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2144 return NULL_TREE;
2146 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2147 t = build_real (type, value);
2149 /* If converting an infinity or NAN to a representation that doesn't
2150 have one, set the overflow bit so that we can produce some kind of
2151 error message at the appropriate point if necessary. It's not the
2152 most user-friendly message, but it's better than nothing. */
2153 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2154 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2155 TREE_OVERFLOW (t) = 1;
2156 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2157 && !MODE_HAS_NANS (TYPE_MODE (type)))
2158 TREE_OVERFLOW (t) = 1;
2159 /* Regular overflow, conversion produced an infinity in a mode that
2160 can't represent them. */
2161 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2162 && REAL_VALUE_ISINF (value)
2163 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2164 TREE_OVERFLOW (t) = 1;
2165 else
2166 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2167 return t;
2170 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2171 to a floating point type. */
2173 static tree
2174 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2176 REAL_VALUE_TYPE value;
2177 tree t;
2179 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2180 &TREE_FIXED_CST (arg1));
2181 t = build_real (type, value);
2183 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2184 return t;
2187 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2188 to another fixed-point type. */
2190 static tree
2191 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2193 FIXED_VALUE_TYPE value;
2194 tree t;
2195 bool overflow_p;
2197 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2198 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2199 t = build_fixed (type, value);
2201 /* Propagate overflow flags. */
2202 if (overflow_p | TREE_OVERFLOW (arg1))
2203 TREE_OVERFLOW (t) = 1;
2204 return t;
2207 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2208 to a fixed-point type. */
2210 static tree
2211 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2213 FIXED_VALUE_TYPE value;
2214 tree t;
2215 bool overflow_p;
2216 double_int di;
2218 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2220 di.low = TREE_INT_CST_ELT (arg1, 0);
2221 if (TREE_INT_CST_NUNITS (arg1) == 1)
2222 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2223 else
2224 di.high = TREE_INT_CST_ELT (arg1, 1);
2226 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2227 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2228 TYPE_SATURATING (type));
2229 t = build_fixed (type, value);
2231 /* Propagate overflow flags. */
2232 if (overflow_p | TREE_OVERFLOW (arg1))
2233 TREE_OVERFLOW (t) = 1;
2234 return t;
2237 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2238 to a fixed-point type. */
2240 static tree
2241 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2243 FIXED_VALUE_TYPE value;
2244 tree t;
2245 bool overflow_p;
2247 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2248 &TREE_REAL_CST (arg1),
2249 TYPE_SATURATING (type));
2250 t = build_fixed (type, value);
2252 /* Propagate overflow flags. */
2253 if (overflow_p | TREE_OVERFLOW (arg1))
2254 TREE_OVERFLOW (t) = 1;
2255 return t;
2258 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2259 type TYPE. If no simplification can be done return NULL_TREE. */
2261 static tree
2262 fold_convert_const (enum tree_code code, tree type, tree arg1)
2264 tree arg_type = TREE_TYPE (arg1);
2265 if (arg_type == type)
2266 return arg1;
2268 /* We can't widen types, since the runtime value could overflow the
2269 original type before being extended to the new type. */
2270 if (POLY_INT_CST_P (arg1)
2271 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2272 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2273 return build_poly_int_cst (type,
2274 poly_wide_int::from (poly_int_cst_value (arg1),
2275 TYPE_PRECISION (type),
2276 TYPE_SIGN (arg_type)));
2278 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2279 || TREE_CODE (type) == OFFSET_TYPE)
2281 if (TREE_CODE (arg1) == INTEGER_CST)
2282 return fold_convert_const_int_from_int (type, arg1);
2283 else if (TREE_CODE (arg1) == REAL_CST)
2284 return fold_convert_const_int_from_real (code, type, arg1);
2285 else if (TREE_CODE (arg1) == FIXED_CST)
2286 return fold_convert_const_int_from_fixed (type, arg1);
2288 else if (TREE_CODE (type) == REAL_TYPE)
2290 if (TREE_CODE (arg1) == INTEGER_CST)
2291 return build_real_from_int_cst (type, arg1);
2292 else if (TREE_CODE (arg1) == REAL_CST)
2293 return fold_convert_const_real_from_real (type, arg1);
2294 else if (TREE_CODE (arg1) == FIXED_CST)
2295 return fold_convert_const_real_from_fixed (type, arg1);
2297 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2299 if (TREE_CODE (arg1) == FIXED_CST)
2300 return fold_convert_const_fixed_from_fixed (type, arg1);
2301 else if (TREE_CODE (arg1) == INTEGER_CST)
2302 return fold_convert_const_fixed_from_int (type, arg1);
2303 else if (TREE_CODE (arg1) == REAL_CST)
2304 return fold_convert_const_fixed_from_real (type, arg1);
2306 else if (TREE_CODE (type) == VECTOR_TYPE)
2308 if (TREE_CODE (arg1) == VECTOR_CST
2309 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2311 tree elttype = TREE_TYPE (type);
2312 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2313 /* We can't handle steps directly when extending, since the
2314 values need to wrap at the original precision first. */
2315 bool step_ok_p
2316 = (INTEGRAL_TYPE_P (elttype)
2317 && INTEGRAL_TYPE_P (arg1_elttype)
2318 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2319 tree_vector_builder v;
2320 if (!v.new_unary_operation (type, arg1, step_ok_p))
2321 return NULL_TREE;
2322 unsigned int len = v.encoded_nelts ();
2323 for (unsigned int i = 0; i < len; ++i)
2325 tree elt = VECTOR_CST_ELT (arg1, i);
2326 tree cvt = fold_convert_const (code, elttype, elt);
2327 if (cvt == NULL_TREE)
2328 return NULL_TREE;
2329 v.quick_push (cvt);
2331 return v.build ();
2334 return NULL_TREE;
2337 /* Construct a vector of zero elements of vector type TYPE. */
2339 static tree
2340 build_zero_vector (tree type)
2342 tree t;
2344 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2345 return build_vector_from_val (type, t);
2348 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2350 bool
2351 fold_convertible_p (const_tree type, const_tree arg)
2353 tree orig = TREE_TYPE (arg);
2355 if (type == orig)
2356 return true;
2358 if (TREE_CODE (arg) == ERROR_MARK
2359 || TREE_CODE (type) == ERROR_MARK
2360 || TREE_CODE (orig) == ERROR_MARK)
2361 return false;
2363 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2364 return true;
2366 switch (TREE_CODE (type))
2368 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2369 case POINTER_TYPE: case REFERENCE_TYPE:
2370 case OFFSET_TYPE:
2371 return (INTEGRAL_TYPE_P (orig)
2372 || (POINTER_TYPE_P (orig)
2373 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2374 || TREE_CODE (orig) == OFFSET_TYPE);
2376 case REAL_TYPE:
2377 case FIXED_POINT_TYPE:
2378 case VECTOR_TYPE:
2379 case VOID_TYPE:
2380 return TREE_CODE (type) == TREE_CODE (orig);
2382 default:
2383 return false;
2387 /* Convert expression ARG to type TYPE. Used by the middle-end for
2388 simple conversions in preference to calling the front-end's convert. */
2390 tree
2391 fold_convert_loc (location_t loc, tree type, tree arg)
2393 tree orig = TREE_TYPE (arg);
2394 tree tem;
2396 if (type == orig)
2397 return arg;
2399 if (TREE_CODE (arg) == ERROR_MARK
2400 || TREE_CODE (type) == ERROR_MARK
2401 || TREE_CODE (orig) == ERROR_MARK)
2402 return error_mark_node;
2404 switch (TREE_CODE (type))
2406 case POINTER_TYPE:
2407 case REFERENCE_TYPE:
2408 /* Handle conversions between pointers to different address spaces. */
2409 if (POINTER_TYPE_P (orig)
2410 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2411 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2412 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2413 /* fall through */
2415 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2416 case OFFSET_TYPE:
2417 if (TREE_CODE (arg) == INTEGER_CST)
2419 tem = fold_convert_const (NOP_EXPR, type, arg);
2420 if (tem != NULL_TREE)
2421 return tem;
2423 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2424 || TREE_CODE (orig) == OFFSET_TYPE)
2425 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2426 if (TREE_CODE (orig) == COMPLEX_TYPE)
2427 return fold_convert_loc (loc, type,
2428 fold_build1_loc (loc, REALPART_EXPR,
2429 TREE_TYPE (orig), arg));
2430 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2431 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2432 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2434 case REAL_TYPE:
2435 if (TREE_CODE (arg) == INTEGER_CST)
2437 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2438 if (tem != NULL_TREE)
2439 return tem;
2441 else if (TREE_CODE (arg) == REAL_CST)
2443 tem = fold_convert_const (NOP_EXPR, type, arg);
2444 if (tem != NULL_TREE)
2445 return tem;
2447 else if (TREE_CODE (arg) == FIXED_CST)
2449 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2450 if (tem != NULL_TREE)
2451 return tem;
2454 switch (TREE_CODE (orig))
2456 case INTEGER_TYPE:
2457 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2458 case POINTER_TYPE: case REFERENCE_TYPE:
2459 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2461 case REAL_TYPE:
2462 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2464 case FIXED_POINT_TYPE:
2465 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2467 case COMPLEX_TYPE:
2468 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2469 return fold_convert_loc (loc, type, tem);
2471 default:
2472 gcc_unreachable ();
2475 case FIXED_POINT_TYPE:
2476 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2477 || TREE_CODE (arg) == REAL_CST)
2479 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2480 if (tem != NULL_TREE)
2481 goto fold_convert_exit;
2484 switch (TREE_CODE (orig))
2486 case FIXED_POINT_TYPE:
2487 case INTEGER_TYPE:
2488 case ENUMERAL_TYPE:
2489 case BOOLEAN_TYPE:
2490 case REAL_TYPE:
2491 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2493 case COMPLEX_TYPE:
2494 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2495 return fold_convert_loc (loc, type, tem);
2497 default:
2498 gcc_unreachable ();
2501 case COMPLEX_TYPE:
2502 switch (TREE_CODE (orig))
2504 case INTEGER_TYPE:
2505 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2506 case POINTER_TYPE: case REFERENCE_TYPE:
2507 case REAL_TYPE:
2508 case FIXED_POINT_TYPE:
2509 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2510 fold_convert_loc (loc, TREE_TYPE (type), arg),
2511 fold_convert_loc (loc, TREE_TYPE (type),
2512 integer_zero_node));
2513 case COMPLEX_TYPE:
2515 tree rpart, ipart;
2517 if (TREE_CODE (arg) == COMPLEX_EXPR)
2519 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2520 TREE_OPERAND (arg, 0));
2521 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2522 TREE_OPERAND (arg, 1));
2523 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2526 arg = save_expr (arg);
2527 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2528 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2529 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2530 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2531 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2534 default:
2535 gcc_unreachable ();
2538 case VECTOR_TYPE:
2539 if (integer_zerop (arg))
2540 return build_zero_vector (type);
2541 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2542 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2543 || TREE_CODE (orig) == VECTOR_TYPE);
2544 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2546 case VOID_TYPE:
2547 tem = fold_ignored_result (arg);
2548 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2550 default:
2551 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2552 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2553 gcc_unreachable ();
2555 fold_convert_exit:
2556 protected_set_expr_location_unshare (tem, loc);
2557 return tem;
2560 /* Return false if expr can be assumed not to be an lvalue, true
2561 otherwise. */
2563 static bool
2564 maybe_lvalue_p (const_tree x)
2566 /* We only need to wrap lvalue tree codes. */
2567 switch (TREE_CODE (x))
2569 case VAR_DECL:
2570 case PARM_DECL:
2571 case RESULT_DECL:
2572 case LABEL_DECL:
2573 case FUNCTION_DECL:
2574 case SSA_NAME:
2576 case COMPONENT_REF:
2577 case MEM_REF:
2578 case INDIRECT_REF:
2579 case ARRAY_REF:
2580 case ARRAY_RANGE_REF:
2581 case BIT_FIELD_REF:
2582 case OBJ_TYPE_REF:
2584 case REALPART_EXPR:
2585 case IMAGPART_EXPR:
2586 case PREINCREMENT_EXPR:
2587 case PREDECREMENT_EXPR:
2588 case SAVE_EXPR:
2589 case TRY_CATCH_EXPR:
2590 case WITH_CLEANUP_EXPR:
2591 case COMPOUND_EXPR:
2592 case MODIFY_EXPR:
2593 case TARGET_EXPR:
2594 case COND_EXPR:
2595 case BIND_EXPR:
2596 break;
2598 default:
2599 /* Assume the worst for front-end tree codes. */
2600 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2601 break;
2602 return false;
2605 return true;
2608 /* Return an expr equal to X but certainly not valid as an lvalue. */
2610 tree
2611 non_lvalue_loc (location_t loc, tree x)
2613 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2614 us. */
2615 if (in_gimple_form)
2616 return x;
2618 if (! maybe_lvalue_p (x))
2619 return x;
2620 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2623 /* When pedantic, return an expr equal to X but certainly not valid as a
2624 pedantic lvalue. Otherwise, return X. */
2626 static tree
2627 pedantic_non_lvalue_loc (location_t loc, tree x)
2629 return protected_set_expr_location_unshare (x, loc);
2632 /* Given a tree comparison code, return the code that is the logical inverse.
2633 It is generally not safe to do this for floating-point comparisons, except
2634 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2635 ERROR_MARK in this case. */
2637 enum tree_code
2638 invert_tree_comparison (enum tree_code code, bool honor_nans)
2640 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2641 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2642 return ERROR_MARK;
2644 switch (code)
2646 case EQ_EXPR:
2647 return NE_EXPR;
2648 case NE_EXPR:
2649 return EQ_EXPR;
2650 case GT_EXPR:
2651 return honor_nans ? UNLE_EXPR : LE_EXPR;
2652 case GE_EXPR:
2653 return honor_nans ? UNLT_EXPR : LT_EXPR;
2654 case LT_EXPR:
2655 return honor_nans ? UNGE_EXPR : GE_EXPR;
2656 case LE_EXPR:
2657 return honor_nans ? UNGT_EXPR : GT_EXPR;
2658 case LTGT_EXPR:
2659 return UNEQ_EXPR;
2660 case UNEQ_EXPR:
2661 return LTGT_EXPR;
2662 case UNGT_EXPR:
2663 return LE_EXPR;
2664 case UNGE_EXPR:
2665 return LT_EXPR;
2666 case UNLT_EXPR:
2667 return GE_EXPR;
2668 case UNLE_EXPR:
2669 return GT_EXPR;
2670 case ORDERED_EXPR:
2671 return UNORDERED_EXPR;
2672 case UNORDERED_EXPR:
2673 return ORDERED_EXPR;
2674 default:
2675 gcc_unreachable ();
2679 /* Similar, but return the comparison that results if the operands are
2680 swapped. This is safe for floating-point. */
2682 enum tree_code
2683 swap_tree_comparison (enum tree_code code)
2685 switch (code)
2687 case EQ_EXPR:
2688 case NE_EXPR:
2689 case ORDERED_EXPR:
2690 case UNORDERED_EXPR:
2691 case LTGT_EXPR:
2692 case UNEQ_EXPR:
2693 return code;
2694 case GT_EXPR:
2695 return LT_EXPR;
2696 case GE_EXPR:
2697 return LE_EXPR;
2698 case LT_EXPR:
2699 return GT_EXPR;
2700 case LE_EXPR:
2701 return GE_EXPR;
2702 case UNGT_EXPR:
2703 return UNLT_EXPR;
2704 case UNGE_EXPR:
2705 return UNLE_EXPR;
2706 case UNLT_EXPR:
2707 return UNGT_EXPR;
2708 case UNLE_EXPR:
2709 return UNGE_EXPR;
2710 default:
2711 gcc_unreachable ();
2716 /* Convert a comparison tree code from an enum tree_code representation
2717 into a compcode bit-based encoding. This function is the inverse of
2718 compcode_to_comparison. */
2720 static enum comparison_code
2721 comparison_to_compcode (enum tree_code code)
2723 switch (code)
2725 case LT_EXPR:
2726 return COMPCODE_LT;
2727 case EQ_EXPR:
2728 return COMPCODE_EQ;
2729 case LE_EXPR:
2730 return COMPCODE_LE;
2731 case GT_EXPR:
2732 return COMPCODE_GT;
2733 case NE_EXPR:
2734 return COMPCODE_NE;
2735 case GE_EXPR:
2736 return COMPCODE_GE;
2737 case ORDERED_EXPR:
2738 return COMPCODE_ORD;
2739 case UNORDERED_EXPR:
2740 return COMPCODE_UNORD;
2741 case UNLT_EXPR:
2742 return COMPCODE_UNLT;
2743 case UNEQ_EXPR:
2744 return COMPCODE_UNEQ;
2745 case UNLE_EXPR:
2746 return COMPCODE_UNLE;
2747 case UNGT_EXPR:
2748 return COMPCODE_UNGT;
2749 case LTGT_EXPR:
2750 return COMPCODE_LTGT;
2751 case UNGE_EXPR:
2752 return COMPCODE_UNGE;
2753 default:
2754 gcc_unreachable ();
2758 /* Convert a compcode bit-based encoding of a comparison operator back
2759 to GCC's enum tree_code representation. This function is the
2760 inverse of comparison_to_compcode. */
2762 static enum tree_code
2763 compcode_to_comparison (enum comparison_code code)
2765 switch (code)
2767 case COMPCODE_LT:
2768 return LT_EXPR;
2769 case COMPCODE_EQ:
2770 return EQ_EXPR;
2771 case COMPCODE_LE:
2772 return LE_EXPR;
2773 case COMPCODE_GT:
2774 return GT_EXPR;
2775 case COMPCODE_NE:
2776 return NE_EXPR;
2777 case COMPCODE_GE:
2778 return GE_EXPR;
2779 case COMPCODE_ORD:
2780 return ORDERED_EXPR;
2781 case COMPCODE_UNORD:
2782 return UNORDERED_EXPR;
2783 case COMPCODE_UNLT:
2784 return UNLT_EXPR;
2785 case COMPCODE_UNEQ:
2786 return UNEQ_EXPR;
2787 case COMPCODE_UNLE:
2788 return UNLE_EXPR;
2789 case COMPCODE_UNGT:
2790 return UNGT_EXPR;
2791 case COMPCODE_LTGT:
2792 return LTGT_EXPR;
2793 case COMPCODE_UNGE:
2794 return UNGE_EXPR;
2795 default:
2796 gcc_unreachable ();
2800 /* Return true if COND1 tests the opposite condition of COND2. */
2802 bool
2803 inverse_conditions_p (const_tree cond1, const_tree cond2)
2805 return (COMPARISON_CLASS_P (cond1)
2806 && COMPARISON_CLASS_P (cond2)
2807 && (invert_tree_comparison
2808 (TREE_CODE (cond1),
2809 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2810 && operand_equal_p (TREE_OPERAND (cond1, 0),
2811 TREE_OPERAND (cond2, 0), 0)
2812 && operand_equal_p (TREE_OPERAND (cond1, 1),
2813 TREE_OPERAND (cond2, 1), 0));
2816 /* Return a tree for the comparison which is the combination of
2817 doing the AND or OR (depending on CODE) of the two operations LCODE
2818 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2819 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2820 if this makes the transformation invalid. */
2822 tree
2823 combine_comparisons (location_t loc,
2824 enum tree_code code, enum tree_code lcode,
2825 enum tree_code rcode, tree truth_type,
2826 tree ll_arg, tree lr_arg)
2828 bool honor_nans = HONOR_NANS (ll_arg);
2829 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2830 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2831 int compcode;
2833 switch (code)
2835 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2836 compcode = lcompcode & rcompcode;
2837 break;
2839 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2840 compcode = lcompcode | rcompcode;
2841 break;
2843 default:
2844 return NULL_TREE;
2847 if (!honor_nans)
2849 /* Eliminate unordered comparisons, as well as LTGT and ORD
2850 which are not used unless the mode has NaNs. */
2851 compcode &= ~COMPCODE_UNORD;
2852 if (compcode == COMPCODE_LTGT)
2853 compcode = COMPCODE_NE;
2854 else if (compcode == COMPCODE_ORD)
2855 compcode = COMPCODE_TRUE;
2857 else if (flag_trapping_math)
2859 /* Check that the original operation and the optimized ones will trap
2860 under the same condition. */
2861 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2862 && (lcompcode != COMPCODE_EQ)
2863 && (lcompcode != COMPCODE_ORD);
2864 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2865 && (rcompcode != COMPCODE_EQ)
2866 && (rcompcode != COMPCODE_ORD);
2867 bool trap = (compcode & COMPCODE_UNORD) == 0
2868 && (compcode != COMPCODE_EQ)
2869 && (compcode != COMPCODE_ORD);
2871 /* In a short-circuited boolean expression the LHS might be
2872 such that the RHS, if evaluated, will never trap. For
2873 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2874 if neither x nor y is NaN. (This is a mixed blessing: for
2875 example, the expression above will never trap, hence
2876 optimizing it to x < y would be invalid). */
2877 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2878 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2879 rtrap = false;
2881 /* If the comparison was short-circuited, and only the RHS
2882 trapped, we may now generate a spurious trap. */
2883 if (rtrap && !ltrap
2884 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2885 return NULL_TREE;
2887 /* If we changed the conditions that cause a trap, we lose. */
2888 if ((ltrap || rtrap) != trap)
2889 return NULL_TREE;
2892 if (compcode == COMPCODE_TRUE)
2893 return constant_boolean_node (true, truth_type);
2894 else if (compcode == COMPCODE_FALSE)
2895 return constant_boolean_node (false, truth_type);
2896 else
2898 enum tree_code tcode;
2900 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2901 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2905 /* Return nonzero if two operands (typically of the same tree node)
2906 are necessarily equal. FLAGS modifies behavior as follows:
2908 If OEP_ONLY_CONST is set, only return nonzero for constants.
2909 This function tests whether the operands are indistinguishable;
2910 it does not test whether they are equal using C's == operation.
2911 The distinction is important for IEEE floating point, because
2912 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2913 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2915 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2916 even though it may hold multiple values during a function.
2917 This is because a GCC tree node guarantees that nothing else is
2918 executed between the evaluation of its "operands" (which may often
2919 be evaluated in arbitrary order). Hence if the operands themselves
2920 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2921 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2922 unset means assuming isochronic (or instantaneous) tree equivalence.
2923 Unless comparing arbitrary expression trees, such as from different
2924 statements, this flag can usually be left unset.
2926 If OEP_PURE_SAME is set, then pure functions with identical arguments
2927 are considered the same. It is used when the caller has other ways
2928 to ensure that global memory is unchanged in between.
2930 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2931 not values of expressions.
2933 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2934 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2936 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2937 any operand with side effect. This is unnecesarily conservative in the
2938 case we know that arg0 and arg1 are in disjoint code paths (such as in
2939 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2940 addresses with TREE_CONSTANT flag set so we know that &var == &var
2941 even if var is volatile. */
2943 bool
2944 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
2945 unsigned int flags)
2947 bool r;
2948 if (verify_hash_value (arg0, arg1, flags, &r))
2949 return r;
2951 STRIP_ANY_LOCATION_WRAPPER (arg0);
2952 STRIP_ANY_LOCATION_WRAPPER (arg1);
2954 /* If either is ERROR_MARK, they aren't equal. */
2955 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2956 || TREE_TYPE (arg0) == error_mark_node
2957 || TREE_TYPE (arg1) == error_mark_node)
2958 return false;
2960 /* Similar, if either does not have a type (like a template id),
2961 they aren't equal. */
2962 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2963 return false;
2965 /* We cannot consider pointers to different address space equal. */
2966 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2967 && POINTER_TYPE_P (TREE_TYPE (arg1))
2968 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2969 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2970 return false;
2972 /* Check equality of integer constants before bailing out due to
2973 precision differences. */
2974 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2976 /* Address of INTEGER_CST is not defined; check that we did not forget
2977 to drop the OEP_ADDRESS_OF flags. */
2978 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2979 return tree_int_cst_equal (arg0, arg1);
2982 if (!(flags & OEP_ADDRESS_OF))
2984 /* If both types don't have the same signedness, then we can't consider
2985 them equal. We must check this before the STRIP_NOPS calls
2986 because they may change the signedness of the arguments. As pointers
2987 strictly don't have a signedness, require either two pointers or
2988 two non-pointers as well. */
2989 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2990 || POINTER_TYPE_P (TREE_TYPE (arg0))
2991 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2992 return false;
2994 /* If both types don't have the same precision, then it is not safe
2995 to strip NOPs. */
2996 if (element_precision (TREE_TYPE (arg0))
2997 != element_precision (TREE_TYPE (arg1)))
2998 return false;
3000 STRIP_NOPS (arg0);
3001 STRIP_NOPS (arg1);
3003 #if 0
3004 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3005 sanity check once the issue is solved. */
3006 else
3007 /* Addresses of conversions and SSA_NAMEs (and many other things)
3008 are not defined. Check that we did not forget to drop the
3009 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3010 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3011 && TREE_CODE (arg0) != SSA_NAME);
3012 #endif
3014 /* In case both args are comparisons but with different comparison
3015 code, try to swap the comparison operands of one arg to produce
3016 a match and compare that variant. */
3017 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3018 && COMPARISON_CLASS_P (arg0)
3019 && COMPARISON_CLASS_P (arg1))
3021 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3023 if (TREE_CODE (arg0) == swap_code)
3024 return operand_equal_p (TREE_OPERAND (arg0, 0),
3025 TREE_OPERAND (arg1, 1), flags)
3026 && operand_equal_p (TREE_OPERAND (arg0, 1),
3027 TREE_OPERAND (arg1, 0), flags);
3030 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3032 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3033 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3035 else if (flags & OEP_ADDRESS_OF)
3037 /* If we are interested in comparing addresses ignore
3038 MEM_REF wrappings of the base that can appear just for
3039 TBAA reasons. */
3040 if (TREE_CODE (arg0) == MEM_REF
3041 && DECL_P (arg1)
3042 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3043 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3044 && integer_zerop (TREE_OPERAND (arg0, 1)))
3045 return true;
3046 else if (TREE_CODE (arg1) == MEM_REF
3047 && DECL_P (arg0)
3048 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3049 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3050 && integer_zerop (TREE_OPERAND (arg1, 1)))
3051 return true;
3052 return false;
3054 else
3055 return false;
3058 /* When not checking adddresses, this is needed for conversions and for
3059 COMPONENT_REF. Might as well play it safe and always test this. */
3060 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3061 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3062 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3063 && !(flags & OEP_ADDRESS_OF)))
3064 return false;
3066 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3067 We don't care about side effects in that case because the SAVE_EXPR
3068 takes care of that for us. In all other cases, two expressions are
3069 equal if they have no side effects. If we have two identical
3070 expressions with side effects that should be treated the same due
3071 to the only side effects being identical SAVE_EXPR's, that will
3072 be detected in the recursive calls below.
3073 If we are taking an invariant address of two identical objects
3074 they are necessarily equal as well. */
3075 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3076 && (TREE_CODE (arg0) == SAVE_EXPR
3077 || (flags & OEP_MATCH_SIDE_EFFECTS)
3078 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3079 return true;
3081 /* Next handle constant cases, those for which we can return 1 even
3082 if ONLY_CONST is set. */
3083 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3084 switch (TREE_CODE (arg0))
3086 case INTEGER_CST:
3087 return tree_int_cst_equal (arg0, arg1);
3089 case FIXED_CST:
3090 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3091 TREE_FIXED_CST (arg1));
3093 case REAL_CST:
3094 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3095 return true;
3098 if (!HONOR_SIGNED_ZEROS (arg0))
3100 /* If we do not distinguish between signed and unsigned zero,
3101 consider them equal. */
3102 if (real_zerop (arg0) && real_zerop (arg1))
3103 return true;
3105 return false;
3107 case VECTOR_CST:
3109 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3110 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3111 return false;
3113 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3114 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3115 return false;
3117 unsigned int count = vector_cst_encoded_nelts (arg0);
3118 for (unsigned int i = 0; i < count; ++i)
3119 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3120 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3121 return false;
3122 return true;
3125 case COMPLEX_CST:
3126 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3127 flags)
3128 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3129 flags));
3131 case STRING_CST:
3132 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3133 && ! memcmp (TREE_STRING_POINTER (arg0),
3134 TREE_STRING_POINTER (arg1),
3135 TREE_STRING_LENGTH (arg0)));
3137 case ADDR_EXPR:
3138 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3139 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3140 flags | OEP_ADDRESS_OF
3141 | OEP_MATCH_SIDE_EFFECTS);
3142 case CONSTRUCTOR:
3143 /* In GIMPLE empty constructors are allowed in initializers of
3144 aggregates. */
3145 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3146 default:
3147 break;
3150 if (flags & OEP_ONLY_CONST)
3151 return false;
3153 /* Define macros to test an operand from arg0 and arg1 for equality and a
3154 variant that allows null and views null as being different from any
3155 non-null value. In the latter case, if either is null, the both
3156 must be; otherwise, do the normal comparison. */
3157 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3158 TREE_OPERAND (arg1, N), flags)
3160 #define OP_SAME_WITH_NULL(N) \
3161 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3162 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3164 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3166 case tcc_unary:
3167 /* Two conversions are equal only if signedness and modes match. */
3168 switch (TREE_CODE (arg0))
3170 CASE_CONVERT:
3171 case FIX_TRUNC_EXPR:
3172 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3173 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3174 return false;
3175 break;
3176 default:
3177 break;
3180 return OP_SAME (0);
3183 case tcc_comparison:
3184 case tcc_binary:
3185 if (OP_SAME (0) && OP_SAME (1))
3186 return true;
3188 /* For commutative ops, allow the other order. */
3189 return (commutative_tree_code (TREE_CODE (arg0))
3190 && operand_equal_p (TREE_OPERAND (arg0, 0),
3191 TREE_OPERAND (arg1, 1), flags)
3192 && operand_equal_p (TREE_OPERAND (arg0, 1),
3193 TREE_OPERAND (arg1, 0), flags));
3195 case tcc_reference:
3196 /* If either of the pointer (or reference) expressions we are
3197 dereferencing contain a side effect, these cannot be equal,
3198 but their addresses can be. */
3199 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3200 && (TREE_SIDE_EFFECTS (arg0)
3201 || TREE_SIDE_EFFECTS (arg1)))
3202 return false;
3204 switch (TREE_CODE (arg0))
3206 case INDIRECT_REF:
3207 if (!(flags & OEP_ADDRESS_OF))
3209 if (TYPE_ALIGN (TREE_TYPE (arg0))
3210 != TYPE_ALIGN (TREE_TYPE (arg1)))
3211 return false;
3212 /* Verify that the access types are compatible. */
3213 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3214 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3215 return false;
3217 flags &= ~OEP_ADDRESS_OF;
3218 return OP_SAME (0);
3220 case IMAGPART_EXPR:
3221 /* Require the same offset. */
3222 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3223 TYPE_SIZE (TREE_TYPE (arg1)),
3224 flags & ~OEP_ADDRESS_OF))
3225 return false;
3227 /* Fallthru. */
3228 case REALPART_EXPR:
3229 case VIEW_CONVERT_EXPR:
3230 return OP_SAME (0);
3232 case TARGET_MEM_REF:
3233 case MEM_REF:
3234 if (!(flags & OEP_ADDRESS_OF))
3236 /* Require equal access sizes */
3237 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3238 && (!TYPE_SIZE (TREE_TYPE (arg0))
3239 || !TYPE_SIZE (TREE_TYPE (arg1))
3240 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3241 TYPE_SIZE (TREE_TYPE (arg1)),
3242 flags)))
3243 return false;
3244 /* Verify that access happens in similar types. */
3245 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3246 return false;
3247 /* Verify that accesses are TBAA compatible. */
3248 if (!alias_ptr_types_compatible_p
3249 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3250 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3251 || (MR_DEPENDENCE_CLIQUE (arg0)
3252 != MR_DEPENDENCE_CLIQUE (arg1))
3253 || (MR_DEPENDENCE_BASE (arg0)
3254 != MR_DEPENDENCE_BASE (arg1)))
3255 return false;
3256 /* Verify that alignment is compatible. */
3257 if (TYPE_ALIGN (TREE_TYPE (arg0))
3258 != TYPE_ALIGN (TREE_TYPE (arg1)))
3259 return false;
3261 flags &= ~OEP_ADDRESS_OF;
3262 return (OP_SAME (0) && OP_SAME (1)
3263 /* TARGET_MEM_REF require equal extra operands. */
3264 && (TREE_CODE (arg0) != TARGET_MEM_REF
3265 || (OP_SAME_WITH_NULL (2)
3266 && OP_SAME_WITH_NULL (3)
3267 && OP_SAME_WITH_NULL (4))));
3269 case ARRAY_REF:
3270 case ARRAY_RANGE_REF:
3271 if (!OP_SAME (0))
3272 return false;
3273 flags &= ~OEP_ADDRESS_OF;
3274 /* Compare the array index by value if it is constant first as we
3275 may have different types but same value here. */
3276 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3277 TREE_OPERAND (arg1, 1))
3278 || OP_SAME (1))
3279 && OP_SAME_WITH_NULL (2)
3280 && OP_SAME_WITH_NULL (3)
3281 /* Compare low bound and element size as with OEP_ADDRESS_OF
3282 we have to account for the offset of the ref. */
3283 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3284 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3285 || (operand_equal_p (array_ref_low_bound
3286 (CONST_CAST_TREE (arg0)),
3287 array_ref_low_bound
3288 (CONST_CAST_TREE (arg1)), flags)
3289 && operand_equal_p (array_ref_element_size
3290 (CONST_CAST_TREE (arg0)),
3291 array_ref_element_size
3292 (CONST_CAST_TREE (arg1)),
3293 flags))));
3295 case COMPONENT_REF:
3296 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3297 may be NULL when we're called to compare MEM_EXPRs. */
3298 if (!OP_SAME_WITH_NULL (0)
3299 || !OP_SAME (1))
3300 return false;
3301 flags &= ~OEP_ADDRESS_OF;
3302 return OP_SAME_WITH_NULL (2);
3304 case BIT_FIELD_REF:
3305 if (!OP_SAME (0))
3306 return false;
3307 flags &= ~OEP_ADDRESS_OF;
3308 return OP_SAME (1) && OP_SAME (2);
3310 /* Virtual table call. */
3311 case OBJ_TYPE_REF:
3313 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3314 OBJ_TYPE_REF_EXPR (arg1), flags))
3315 return false;
3316 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3317 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3318 return false;
3319 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3320 OBJ_TYPE_REF_OBJECT (arg1), flags))
3321 return false;
3322 if (!types_same_for_odr (obj_type_ref_class (arg0),
3323 obj_type_ref_class (arg1)))
3324 return false;
3325 return true;
3328 default:
3329 return false;
3332 case tcc_expression:
3333 switch (TREE_CODE (arg0))
3335 case ADDR_EXPR:
3336 /* Be sure we pass right ADDRESS_OF flag. */
3337 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3338 return operand_equal_p (TREE_OPERAND (arg0, 0),
3339 TREE_OPERAND (arg1, 0),
3340 flags | OEP_ADDRESS_OF);
3342 case TRUTH_NOT_EXPR:
3343 return OP_SAME (0);
3345 case TRUTH_ANDIF_EXPR:
3346 case TRUTH_ORIF_EXPR:
3347 return OP_SAME (0) && OP_SAME (1);
3349 case WIDEN_MULT_PLUS_EXPR:
3350 case WIDEN_MULT_MINUS_EXPR:
3351 if (!OP_SAME (2))
3352 return false;
3353 /* The multiplcation operands are commutative. */
3354 /* FALLTHRU */
3356 case TRUTH_AND_EXPR:
3357 case TRUTH_OR_EXPR:
3358 case TRUTH_XOR_EXPR:
3359 if (OP_SAME (0) && OP_SAME (1))
3360 return true;
3362 /* Otherwise take into account this is a commutative operation. */
3363 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3364 TREE_OPERAND (arg1, 1), flags)
3365 && operand_equal_p (TREE_OPERAND (arg0, 1),
3366 TREE_OPERAND (arg1, 0), flags));
3368 case COND_EXPR:
3369 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3370 return false;
3371 flags &= ~OEP_ADDRESS_OF;
3372 return OP_SAME (0);
3374 case BIT_INSERT_EXPR:
3375 /* BIT_INSERT_EXPR has an implict operand as the type precision
3376 of op1. Need to check to make sure they are the same. */
3377 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3378 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3379 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3380 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3381 return false;
3382 /* FALLTHRU */
3384 case VEC_COND_EXPR:
3385 case DOT_PROD_EXPR:
3386 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3388 case MODIFY_EXPR:
3389 case INIT_EXPR:
3390 case COMPOUND_EXPR:
3391 case PREDECREMENT_EXPR:
3392 case PREINCREMENT_EXPR:
3393 case POSTDECREMENT_EXPR:
3394 case POSTINCREMENT_EXPR:
3395 if (flags & OEP_LEXICOGRAPHIC)
3396 return OP_SAME (0) && OP_SAME (1);
3397 return false;
3399 case CLEANUP_POINT_EXPR:
3400 case EXPR_STMT:
3401 case SAVE_EXPR:
3402 if (flags & OEP_LEXICOGRAPHIC)
3403 return OP_SAME (0);
3404 return false;
3406 default:
3407 return false;
3410 case tcc_vl_exp:
3411 switch (TREE_CODE (arg0))
3413 case CALL_EXPR:
3414 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3415 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3416 /* If not both CALL_EXPRs are either internal or normal function
3417 functions, then they are not equal. */
3418 return false;
3419 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3421 /* If the CALL_EXPRs call different internal functions, then they
3422 are not equal. */
3423 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3424 return false;
3426 else
3428 /* If the CALL_EXPRs call different functions, then they are not
3429 equal. */
3430 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3431 flags))
3432 return false;
3435 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3437 unsigned int cef = call_expr_flags (arg0);
3438 if (flags & OEP_PURE_SAME)
3439 cef &= ECF_CONST | ECF_PURE;
3440 else
3441 cef &= ECF_CONST;
3442 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3443 return false;
3446 /* Now see if all the arguments are the same. */
3448 const_call_expr_arg_iterator iter0, iter1;
3449 const_tree a0, a1;
3450 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3451 a1 = first_const_call_expr_arg (arg1, &iter1);
3452 a0 && a1;
3453 a0 = next_const_call_expr_arg (&iter0),
3454 a1 = next_const_call_expr_arg (&iter1))
3455 if (! operand_equal_p (a0, a1, flags))
3456 return false;
3458 /* If we get here and both argument lists are exhausted
3459 then the CALL_EXPRs are equal. */
3460 return ! (a0 || a1);
3462 default:
3463 return false;
3466 case tcc_declaration:
3467 /* Consider __builtin_sqrt equal to sqrt. */
3468 return (TREE_CODE (arg0) == FUNCTION_DECL
3469 && fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3470 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3471 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3472 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3474 case tcc_exceptional:
3475 if (TREE_CODE (arg0) == CONSTRUCTOR)
3477 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3478 return false;
3480 /* In GIMPLE constructors are used only to build vectors from
3481 elements. Individual elements in the constructor must be
3482 indexed in increasing order and form an initial sequence.
3484 We make no effort to compare constructors in generic.
3485 (see sem_variable::equals in ipa-icf which can do so for
3486 constants). */
3487 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3488 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3489 return false;
3491 /* Be sure that vectors constructed have the same representation.
3492 We only tested element precision and modes to match.
3493 Vectors may be BLKmode and thus also check that the number of
3494 parts match. */
3495 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3496 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3497 return false;
3499 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3500 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3501 unsigned int len = vec_safe_length (v0);
3503 if (len != vec_safe_length (v1))
3504 return false;
3506 for (unsigned int i = 0; i < len; i++)
3508 constructor_elt *c0 = &(*v0)[i];
3509 constructor_elt *c1 = &(*v1)[i];
3511 if (!operand_equal_p (c0->value, c1->value, flags)
3512 /* In GIMPLE the indexes can be either NULL or matching i.
3513 Double check this so we won't get false
3514 positives for GENERIC. */
3515 || (c0->index
3516 && (TREE_CODE (c0->index) != INTEGER_CST
3517 || compare_tree_int (c0->index, i)))
3518 || (c1->index
3519 && (TREE_CODE (c1->index) != INTEGER_CST
3520 || compare_tree_int (c1->index, i))))
3521 return false;
3523 return true;
3525 else if (TREE_CODE (arg0) == STATEMENT_LIST
3526 && (flags & OEP_LEXICOGRAPHIC))
3528 /* Compare the STATEMENT_LISTs. */
3529 tree_stmt_iterator tsi1, tsi2;
3530 tree body1 = CONST_CAST_TREE (arg0);
3531 tree body2 = CONST_CAST_TREE (arg1);
3532 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3533 tsi_next (&tsi1), tsi_next (&tsi2))
3535 /* The lists don't have the same number of statements. */
3536 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3537 return false;
3538 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3539 return true;
3540 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3541 flags & (OEP_LEXICOGRAPHIC
3542 | OEP_NO_HASH_CHECK)))
3543 return false;
3546 return false;
3548 case tcc_statement:
3549 switch (TREE_CODE (arg0))
3551 case RETURN_EXPR:
3552 if (flags & OEP_LEXICOGRAPHIC)
3553 return OP_SAME_WITH_NULL (0);
3554 return false;
3555 case DEBUG_BEGIN_STMT:
3556 if (flags & OEP_LEXICOGRAPHIC)
3557 return true;
3558 return false;
3559 default:
3560 return false;
3563 default:
3564 return false;
3567 #undef OP_SAME
3568 #undef OP_SAME_WITH_NULL
3571 /* Generate a hash value for an expression. This can be used iteratively
3572 by passing a previous result as the HSTATE argument. */
3574 void
3575 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3576 unsigned int flags)
3578 int i;
3579 enum tree_code code;
3580 enum tree_code_class tclass;
3582 if (t == NULL_TREE || t == error_mark_node)
3584 hstate.merge_hash (0);
3585 return;
3588 STRIP_ANY_LOCATION_WRAPPER (t);
3590 if (!(flags & OEP_ADDRESS_OF))
3591 STRIP_NOPS (t);
3593 code = TREE_CODE (t);
3595 switch (code)
3597 /* Alas, constants aren't shared, so we can't rely on pointer
3598 identity. */
3599 case VOID_CST:
3600 hstate.merge_hash (0);
3601 return;
3602 case INTEGER_CST:
3603 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3604 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3605 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3606 return;
3607 case REAL_CST:
3609 unsigned int val2;
3610 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3611 val2 = rvc_zero;
3612 else
3613 val2 = real_hash (TREE_REAL_CST_PTR (t));
3614 hstate.merge_hash (val2);
3615 return;
3617 case FIXED_CST:
3619 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3620 hstate.merge_hash (val2);
3621 return;
3623 case STRING_CST:
3624 hstate.add ((const void *) TREE_STRING_POINTER (t),
3625 TREE_STRING_LENGTH (t));
3626 return;
3627 case COMPLEX_CST:
3628 hash_operand (TREE_REALPART (t), hstate, flags);
3629 hash_operand (TREE_IMAGPART (t), hstate, flags);
3630 return;
3631 case VECTOR_CST:
3633 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3634 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3635 unsigned int count = vector_cst_encoded_nelts (t);
3636 for (unsigned int i = 0; i < count; ++i)
3637 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3638 return;
3640 case SSA_NAME:
3641 /* We can just compare by pointer. */
3642 hstate.add_hwi (SSA_NAME_VERSION (t));
3643 return;
3644 case PLACEHOLDER_EXPR:
3645 /* The node itself doesn't matter. */
3646 return;
3647 case BLOCK:
3648 case OMP_CLAUSE:
3649 /* Ignore. */
3650 return;
3651 case TREE_LIST:
3652 /* A list of expressions, for a CALL_EXPR or as the elements of a
3653 VECTOR_CST. */
3654 for (; t; t = TREE_CHAIN (t))
3655 hash_operand (TREE_VALUE (t), hstate, flags);
3656 return;
3657 case CONSTRUCTOR:
3659 unsigned HOST_WIDE_INT idx;
3660 tree field, value;
3661 flags &= ~OEP_ADDRESS_OF;
3662 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3663 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3665 /* In GIMPLE the indexes can be either NULL or matching i. */
3666 if (field == NULL_TREE)
3667 field = bitsize_int (idx);
3668 hash_operand (field, hstate, flags);
3669 hash_operand (value, hstate, flags);
3671 return;
3673 case STATEMENT_LIST:
3675 tree_stmt_iterator i;
3676 for (i = tsi_start (CONST_CAST_TREE (t));
3677 !tsi_end_p (i); tsi_next (&i))
3678 hash_operand (tsi_stmt (i), hstate, flags);
3679 return;
3681 case TREE_VEC:
3682 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3683 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3684 return;
3685 case IDENTIFIER_NODE:
3686 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3687 return;
3688 case FUNCTION_DECL:
3689 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3690 Otherwise nodes that compare equal according to operand_equal_p might
3691 get different hash codes. However, don't do this for machine specific
3692 or front end builtins, since the function code is overloaded in those
3693 cases. */
3694 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3695 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3697 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3698 code = TREE_CODE (t);
3700 /* FALL THROUGH */
3701 default:
3702 if (POLY_INT_CST_P (t))
3704 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3705 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3706 return;
3708 tclass = TREE_CODE_CLASS (code);
3710 if (tclass == tcc_declaration)
3712 /* DECL's have a unique ID */
3713 hstate.add_hwi (DECL_UID (t));
3715 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3717 /* For comparisons that can be swapped, use the lower
3718 tree code. */
3719 enum tree_code ccode = swap_tree_comparison (code);
3720 if (code < ccode)
3721 ccode = code;
3722 hstate.add_object (ccode);
3723 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3724 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3726 else if (CONVERT_EXPR_CODE_P (code))
3728 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3729 operand_equal_p. */
3730 enum tree_code ccode = NOP_EXPR;
3731 hstate.add_object (ccode);
3733 /* Don't hash the type, that can lead to having nodes which
3734 compare equal according to operand_equal_p, but which
3735 have different hash codes. Make sure to include signedness
3736 in the hash computation. */
3737 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3738 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3740 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3741 else if (code == MEM_REF
3742 && (flags & OEP_ADDRESS_OF) != 0
3743 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3744 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3745 && integer_zerop (TREE_OPERAND (t, 1)))
3746 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3747 hstate, flags);
3748 /* Don't ICE on FE specific trees, or their arguments etc.
3749 during operand_equal_p hash verification. */
3750 else if (!IS_EXPR_CODE_CLASS (tclass))
3751 gcc_assert (flags & OEP_HASH_CHECK);
3752 else
3754 unsigned int sflags = flags;
3756 hstate.add_object (code);
3758 switch (code)
3760 case ADDR_EXPR:
3761 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3762 flags |= OEP_ADDRESS_OF;
3763 sflags = flags;
3764 break;
3766 case INDIRECT_REF:
3767 case MEM_REF:
3768 case TARGET_MEM_REF:
3769 flags &= ~OEP_ADDRESS_OF;
3770 sflags = flags;
3771 break;
3773 case ARRAY_REF:
3774 case ARRAY_RANGE_REF:
3775 case COMPONENT_REF:
3776 case BIT_FIELD_REF:
3777 sflags &= ~OEP_ADDRESS_OF;
3778 break;
3780 case COND_EXPR:
3781 flags &= ~OEP_ADDRESS_OF;
3782 break;
3784 case WIDEN_MULT_PLUS_EXPR:
3785 case WIDEN_MULT_MINUS_EXPR:
3787 /* The multiplication operands are commutative. */
3788 inchash::hash one, two;
3789 hash_operand (TREE_OPERAND (t, 0), one, flags);
3790 hash_operand (TREE_OPERAND (t, 1), two, flags);
3791 hstate.add_commutative (one, two);
3792 hash_operand (TREE_OPERAND (t, 2), two, flags);
3793 return;
3796 case CALL_EXPR:
3797 if (CALL_EXPR_FN (t) == NULL_TREE)
3798 hstate.add_int (CALL_EXPR_IFN (t));
3799 break;
3801 case TARGET_EXPR:
3802 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3803 Usually different TARGET_EXPRs just should use
3804 different temporaries in their slots. */
3805 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
3806 return;
3808 /* Virtual table call. */
3809 case OBJ_TYPE_REF:
3810 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
3811 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
3812 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
3813 return;
3814 default:
3815 break;
3818 /* Don't hash the type, that can lead to having nodes which
3819 compare equal according to operand_equal_p, but which
3820 have different hash codes. */
3821 if (code == NON_LVALUE_EXPR)
3823 /* Make sure to include signness in the hash computation. */
3824 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3825 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3828 else if (commutative_tree_code (code))
3830 /* It's a commutative expression. We want to hash it the same
3831 however it appears. We do this by first hashing both operands
3832 and then rehashing based on the order of their independent
3833 hashes. */
3834 inchash::hash one, two;
3835 hash_operand (TREE_OPERAND (t, 0), one, flags);
3836 hash_operand (TREE_OPERAND (t, 1), two, flags);
3837 hstate.add_commutative (one, two);
3839 else
3840 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3841 hash_operand (TREE_OPERAND (t, i), hstate,
3842 i == 0 ? flags : sflags);
3844 return;
3848 bool
3849 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
3850 unsigned int flags, bool *ret)
3852 /* When checking, verify at the outermost operand_equal_p call that
3853 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
3854 hash value. */
3855 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
3857 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
3859 if (arg0 != arg1)
3861 inchash::hash hstate0 (0), hstate1 (0);
3862 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
3863 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
3864 hashval_t h0 = hstate0.end ();
3865 hashval_t h1 = hstate1.end ();
3866 gcc_assert (h0 == h1);
3868 *ret = true;
3870 else
3871 *ret = false;
3873 return true;
3876 return false;
3880 static operand_compare default_compare_instance;
3882 /* Conveinece wrapper around operand_compare class because usually we do
3883 not need to play with the valueizer. */
3885 bool
3886 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3888 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
3891 namespace inchash
3894 /* Generate a hash value for an expression. This can be used iteratively
3895 by passing a previous result as the HSTATE argument.
3897 This function is intended to produce the same hash for expressions which
3898 would compare equal using operand_equal_p. */
3899 void
3900 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
3902 default_compare_instance.hash_operand (t, hstate, flags);
3907 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3908 with a different signedness or a narrower precision. */
3910 static bool
3911 operand_equal_for_comparison_p (tree arg0, tree arg1)
3913 if (operand_equal_p (arg0, arg1, 0))
3914 return true;
3916 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3917 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3918 return false;
3920 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3921 and see if the inner values are the same. This removes any
3922 signedness comparison, which doesn't matter here. */
3923 tree op0 = arg0;
3924 tree op1 = arg1;
3925 STRIP_NOPS (op0);
3926 STRIP_NOPS (op1);
3927 if (operand_equal_p (op0, op1, 0))
3928 return true;
3930 /* Discard a single widening conversion from ARG1 and see if the inner
3931 value is the same as ARG0. */
3932 if (CONVERT_EXPR_P (arg1)
3933 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3934 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3935 < TYPE_PRECISION (TREE_TYPE (arg1))
3936 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3937 return true;
3939 return false;
3942 /* See if ARG is an expression that is either a comparison or is performing
3943 arithmetic on comparisons. The comparisons must only be comparing
3944 two different values, which will be stored in *CVAL1 and *CVAL2; if
3945 they are nonzero it means that some operands have already been found.
3946 No variables may be used anywhere else in the expression except in the
3947 comparisons.
3949 If this is true, return 1. Otherwise, return zero. */
3951 static bool
3952 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
3954 enum tree_code code = TREE_CODE (arg);
3955 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3957 /* We can handle some of the tcc_expression cases here. */
3958 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3959 tclass = tcc_unary;
3960 else if (tclass == tcc_expression
3961 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3962 || code == COMPOUND_EXPR))
3963 tclass = tcc_binary;
3965 switch (tclass)
3967 case tcc_unary:
3968 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
3970 case tcc_binary:
3971 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3972 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
3974 case tcc_constant:
3975 return true;
3977 case tcc_expression:
3978 if (code == COND_EXPR)
3979 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3980 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
3981 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
3982 return false;
3984 case tcc_comparison:
3985 /* First see if we can handle the first operand, then the second. For
3986 the second operand, we know *CVAL1 can't be zero. It must be that
3987 one side of the comparison is each of the values; test for the
3988 case where this isn't true by failing if the two operands
3989 are the same. */
3991 if (operand_equal_p (TREE_OPERAND (arg, 0),
3992 TREE_OPERAND (arg, 1), 0))
3993 return false;
3995 if (*cval1 == 0)
3996 *cval1 = TREE_OPERAND (arg, 0);
3997 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3999 else if (*cval2 == 0)
4000 *cval2 = TREE_OPERAND (arg, 0);
4001 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4003 else
4004 return false;
4006 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4008 else if (*cval2 == 0)
4009 *cval2 = TREE_OPERAND (arg, 1);
4010 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4012 else
4013 return false;
4015 return true;
4017 default:
4018 return false;
4022 /* ARG is a tree that is known to contain just arithmetic operations and
4023 comparisons. Evaluate the operations in the tree substituting NEW0 for
4024 any occurrence of OLD0 as an operand of a comparison and likewise for
4025 NEW1 and OLD1. */
4027 static tree
4028 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4029 tree old1, tree new1)
4031 tree type = TREE_TYPE (arg);
4032 enum tree_code code = TREE_CODE (arg);
4033 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4035 /* We can handle some of the tcc_expression cases here. */
4036 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4037 tclass = tcc_unary;
4038 else if (tclass == tcc_expression
4039 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4040 tclass = tcc_binary;
4042 switch (tclass)
4044 case tcc_unary:
4045 return fold_build1_loc (loc, code, type,
4046 eval_subst (loc, TREE_OPERAND (arg, 0),
4047 old0, new0, old1, new1));
4049 case tcc_binary:
4050 return fold_build2_loc (loc, code, type,
4051 eval_subst (loc, TREE_OPERAND (arg, 0),
4052 old0, new0, old1, new1),
4053 eval_subst (loc, TREE_OPERAND (arg, 1),
4054 old0, new0, old1, new1));
4056 case tcc_expression:
4057 switch (code)
4059 case SAVE_EXPR:
4060 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4061 old1, new1);
4063 case COMPOUND_EXPR:
4064 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4065 old1, new1);
4067 case COND_EXPR:
4068 return fold_build3_loc (loc, code, type,
4069 eval_subst (loc, TREE_OPERAND (arg, 0),
4070 old0, new0, old1, new1),
4071 eval_subst (loc, TREE_OPERAND (arg, 1),
4072 old0, new0, old1, new1),
4073 eval_subst (loc, TREE_OPERAND (arg, 2),
4074 old0, new0, old1, new1));
4075 default:
4076 break;
4078 /* Fall through - ??? */
4080 case tcc_comparison:
4082 tree arg0 = TREE_OPERAND (arg, 0);
4083 tree arg1 = TREE_OPERAND (arg, 1);
4085 /* We need to check both for exact equality and tree equality. The
4086 former will be true if the operand has a side-effect. In that
4087 case, we know the operand occurred exactly once. */
4089 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4090 arg0 = new0;
4091 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4092 arg0 = new1;
4094 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4095 arg1 = new0;
4096 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4097 arg1 = new1;
4099 return fold_build2_loc (loc, code, type, arg0, arg1);
4102 default:
4103 return arg;
4107 /* Return a tree for the case when the result of an expression is RESULT
4108 converted to TYPE and OMITTED was previously an operand of the expression
4109 but is now not needed (e.g., we folded OMITTED * 0).
4111 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4112 the conversion of RESULT to TYPE. */
4114 tree
4115 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4117 tree t = fold_convert_loc (loc, type, result);
4119 /* If the resulting operand is an empty statement, just return the omitted
4120 statement casted to void. */
4121 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4122 return build1_loc (loc, NOP_EXPR, void_type_node,
4123 fold_ignored_result (omitted));
4125 if (TREE_SIDE_EFFECTS (omitted))
4126 return build2_loc (loc, COMPOUND_EXPR, type,
4127 fold_ignored_result (omitted), t);
4129 return non_lvalue_loc (loc, t);
4132 /* Return a tree for the case when the result of an expression is RESULT
4133 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4134 of the expression but are now not needed.
4136 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4137 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4138 evaluated before OMITTED2. Otherwise, if neither has side effects,
4139 just do the conversion of RESULT to TYPE. */
4141 tree
4142 omit_two_operands_loc (location_t loc, tree type, tree result,
4143 tree omitted1, tree omitted2)
4145 tree t = fold_convert_loc (loc, type, result);
4147 if (TREE_SIDE_EFFECTS (omitted2))
4148 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4149 if (TREE_SIDE_EFFECTS (omitted1))
4150 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4152 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4156 /* Return a simplified tree node for the truth-negation of ARG. This
4157 never alters ARG itself. We assume that ARG is an operation that
4158 returns a truth value (0 or 1).
4160 FIXME: one would think we would fold the result, but it causes
4161 problems with the dominator optimizer. */
4163 static tree
4164 fold_truth_not_expr (location_t loc, tree arg)
4166 tree type = TREE_TYPE (arg);
4167 enum tree_code code = TREE_CODE (arg);
4168 location_t loc1, loc2;
4170 /* If this is a comparison, we can simply invert it, except for
4171 floating-point non-equality comparisons, in which case we just
4172 enclose a TRUTH_NOT_EXPR around what we have. */
4174 if (TREE_CODE_CLASS (code) == tcc_comparison)
4176 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4177 if (FLOAT_TYPE_P (op_type)
4178 && flag_trapping_math
4179 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4180 && code != NE_EXPR && code != EQ_EXPR)
4181 return NULL_TREE;
4183 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4184 if (code == ERROR_MARK)
4185 return NULL_TREE;
4187 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4188 TREE_OPERAND (arg, 1));
4189 if (TREE_NO_WARNING (arg))
4190 TREE_NO_WARNING (ret) = 1;
4191 return ret;
4194 switch (code)
4196 case INTEGER_CST:
4197 return constant_boolean_node (integer_zerop (arg), type);
4199 case TRUTH_AND_EXPR:
4200 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4201 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4202 return build2_loc (loc, TRUTH_OR_EXPR, type,
4203 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4204 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4206 case TRUTH_OR_EXPR:
4207 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4208 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4209 return build2_loc (loc, TRUTH_AND_EXPR, type,
4210 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4211 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4213 case TRUTH_XOR_EXPR:
4214 /* Here we can invert either operand. We invert the first operand
4215 unless the second operand is a TRUTH_NOT_EXPR in which case our
4216 result is the XOR of the first operand with the inside of the
4217 negation of the second operand. */
4219 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4220 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4221 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4222 else
4223 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4224 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4225 TREE_OPERAND (arg, 1));
4227 case TRUTH_ANDIF_EXPR:
4228 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4229 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4230 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4231 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4232 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4234 case TRUTH_ORIF_EXPR:
4235 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4236 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4237 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4238 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4239 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4241 case TRUTH_NOT_EXPR:
4242 return TREE_OPERAND (arg, 0);
4244 case COND_EXPR:
4246 tree arg1 = TREE_OPERAND (arg, 1);
4247 tree arg2 = TREE_OPERAND (arg, 2);
4249 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4250 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4252 /* A COND_EXPR may have a throw as one operand, which
4253 then has void type. Just leave void operands
4254 as they are. */
4255 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4256 VOID_TYPE_P (TREE_TYPE (arg1))
4257 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4258 VOID_TYPE_P (TREE_TYPE (arg2))
4259 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4262 case COMPOUND_EXPR:
4263 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4264 return build2_loc (loc, COMPOUND_EXPR, type,
4265 TREE_OPERAND (arg, 0),
4266 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4268 case NON_LVALUE_EXPR:
4269 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4270 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4272 CASE_CONVERT:
4273 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4274 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4276 /* fall through */
4278 case FLOAT_EXPR:
4279 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4280 return build1_loc (loc, TREE_CODE (arg), type,
4281 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4283 case BIT_AND_EXPR:
4284 if (!integer_onep (TREE_OPERAND (arg, 1)))
4285 return NULL_TREE;
4286 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4288 case SAVE_EXPR:
4289 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4291 case CLEANUP_POINT_EXPR:
4292 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4293 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4294 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4296 default:
4297 return NULL_TREE;
4301 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4302 assume that ARG is an operation that returns a truth value (0 or 1
4303 for scalars, 0 or -1 for vectors). Return the folded expression if
4304 folding is successful. Otherwise, return NULL_TREE. */
4306 static tree
4307 fold_invert_truthvalue (location_t loc, tree arg)
4309 tree type = TREE_TYPE (arg);
4310 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4311 ? BIT_NOT_EXPR
4312 : TRUTH_NOT_EXPR,
4313 type, arg);
4316 /* Return a simplified tree node for the truth-negation of ARG. This
4317 never alters ARG itself. We assume that ARG is an operation that
4318 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4320 tree
4321 invert_truthvalue_loc (location_t loc, tree arg)
4323 if (TREE_CODE (arg) == ERROR_MARK)
4324 return arg;
4326 tree type = TREE_TYPE (arg);
4327 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4328 ? BIT_NOT_EXPR
4329 : TRUTH_NOT_EXPR,
4330 type, arg);
4333 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4334 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4335 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4336 is the original memory reference used to preserve the alias set of
4337 the access. */
4339 static tree
4340 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4341 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4342 int unsignedp, int reversep)
4344 tree result, bftype;
4346 /* Attempt not to lose the access path if possible. */
4347 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4349 tree ninner = TREE_OPERAND (orig_inner, 0);
4350 machine_mode nmode;
4351 poly_int64 nbitsize, nbitpos;
4352 tree noffset;
4353 int nunsignedp, nreversep, nvolatilep = 0;
4354 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4355 &noffset, &nmode, &nunsignedp,
4356 &nreversep, &nvolatilep);
4357 if (base == inner
4358 && noffset == NULL_TREE
4359 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4360 && !reversep
4361 && !nreversep
4362 && !nvolatilep)
4364 inner = ninner;
4365 bitpos -= nbitpos;
4369 alias_set_type iset = get_alias_set (orig_inner);
4370 if (iset == 0 && get_alias_set (inner) != iset)
4371 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4372 build_fold_addr_expr (inner),
4373 build_int_cst (ptr_type_node, 0));
4375 if (known_eq (bitpos, 0) && !reversep)
4377 tree size = TYPE_SIZE (TREE_TYPE (inner));
4378 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4379 || POINTER_TYPE_P (TREE_TYPE (inner)))
4380 && tree_fits_shwi_p (size)
4381 && tree_to_shwi (size) == bitsize)
4382 return fold_convert_loc (loc, type, inner);
4385 bftype = type;
4386 if (TYPE_PRECISION (bftype) != bitsize
4387 || TYPE_UNSIGNED (bftype) == !unsignedp)
4388 bftype = build_nonstandard_integer_type (bitsize, 0);
4390 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4391 bitsize_int (bitsize), bitsize_int (bitpos));
4392 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4394 if (bftype != type)
4395 result = fold_convert_loc (loc, type, result);
4397 return result;
4400 /* Optimize a bit-field compare.
4402 There are two cases: First is a compare against a constant and the
4403 second is a comparison of two items where the fields are at the same
4404 bit position relative to the start of a chunk (byte, halfword, word)
4405 large enough to contain it. In these cases we can avoid the shift
4406 implicit in bitfield extractions.
4408 For constants, we emit a compare of the shifted constant with the
4409 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4410 compared. For two fields at the same position, we do the ANDs with the
4411 similar mask and compare the result of the ANDs.
4413 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4414 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4415 are the left and right operands of the comparison, respectively.
4417 If the optimization described above can be done, we return the resulting
4418 tree. Otherwise we return zero. */
4420 static tree
4421 optimize_bit_field_compare (location_t loc, enum tree_code code,
4422 tree compare_type, tree lhs, tree rhs)
4424 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4425 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4426 tree type = TREE_TYPE (lhs);
4427 tree unsigned_type;
4428 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4429 machine_mode lmode, rmode;
4430 scalar_int_mode nmode;
4431 int lunsignedp, runsignedp;
4432 int lreversep, rreversep;
4433 int lvolatilep = 0, rvolatilep = 0;
4434 tree linner, rinner = NULL_TREE;
4435 tree mask;
4436 tree offset;
4438 /* Get all the information about the extractions being done. If the bit size
4439 is the same as the size of the underlying object, we aren't doing an
4440 extraction at all and so can do nothing. We also don't want to
4441 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4442 then will no longer be able to replace it. */
4443 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4444 &lunsignedp, &lreversep, &lvolatilep);
4445 if (linner == lhs
4446 || !known_size_p (plbitsize)
4447 || !plbitsize.is_constant (&lbitsize)
4448 || !plbitpos.is_constant (&lbitpos)
4449 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4450 || offset != 0
4451 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4452 || lvolatilep)
4453 return 0;
4455 if (const_p)
4456 rreversep = lreversep;
4457 else
4459 /* If this is not a constant, we can only do something if bit positions,
4460 sizes, signedness and storage order are the same. */
4461 rinner
4462 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4463 &runsignedp, &rreversep, &rvolatilep);
4465 if (rinner == rhs
4466 || maybe_ne (lbitpos, rbitpos)
4467 || maybe_ne (lbitsize, rbitsize)
4468 || lunsignedp != runsignedp
4469 || lreversep != rreversep
4470 || offset != 0
4471 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4472 || rvolatilep)
4473 return 0;
4476 /* Honor the C++ memory model and mimic what RTL expansion does. */
4477 poly_uint64 bitstart = 0;
4478 poly_uint64 bitend = 0;
4479 if (TREE_CODE (lhs) == COMPONENT_REF)
4481 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4482 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4483 return 0;
4486 /* See if we can find a mode to refer to this field. We should be able to,
4487 but fail if we can't. */
4488 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4489 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4490 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4491 TYPE_ALIGN (TREE_TYPE (rinner))),
4492 BITS_PER_WORD, false, &nmode))
4493 return 0;
4495 /* Set signed and unsigned types of the precision of this mode for the
4496 shifts below. */
4497 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4499 /* Compute the bit position and size for the new reference and our offset
4500 within it. If the new reference is the same size as the original, we
4501 won't optimize anything, so return zero. */
4502 nbitsize = GET_MODE_BITSIZE (nmode);
4503 nbitpos = lbitpos & ~ (nbitsize - 1);
4504 lbitpos -= nbitpos;
4505 if (nbitsize == lbitsize)
4506 return 0;
4508 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4509 lbitpos = nbitsize - lbitsize - lbitpos;
4511 /* Make the mask to be used against the extracted field. */
4512 mask = build_int_cst_type (unsigned_type, -1);
4513 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4514 mask = const_binop (RSHIFT_EXPR, mask,
4515 size_int (nbitsize - lbitsize - lbitpos));
4517 if (! const_p)
4519 if (nbitpos < 0)
4520 return 0;
4522 /* If not comparing with constant, just rework the comparison
4523 and return. */
4524 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4525 nbitsize, nbitpos, 1, lreversep);
4526 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4527 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4528 nbitsize, nbitpos, 1, rreversep);
4529 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4530 return fold_build2_loc (loc, code, compare_type, t1, t2);
4533 /* Otherwise, we are handling the constant case. See if the constant is too
4534 big for the field. Warn and return a tree for 0 (false) if so. We do
4535 this not only for its own sake, but to avoid having to test for this
4536 error case below. If we didn't, we might generate wrong code.
4538 For unsigned fields, the constant shifted right by the field length should
4539 be all zero. For signed fields, the high-order bits should agree with
4540 the sign bit. */
4542 if (lunsignedp)
4544 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4546 warning (0, "comparison is always %d due to width of bit-field",
4547 code == NE_EXPR);
4548 return constant_boolean_node (code == NE_EXPR, compare_type);
4551 else
4553 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4554 if (tem != 0 && tem != -1)
4556 warning (0, "comparison is always %d due to width of bit-field",
4557 code == NE_EXPR);
4558 return constant_boolean_node (code == NE_EXPR, compare_type);
4562 if (nbitpos < 0)
4563 return 0;
4565 /* Single-bit compares should always be against zero. */
4566 if (lbitsize == 1 && ! integer_zerop (rhs))
4568 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4569 rhs = build_int_cst (type, 0);
4572 /* Make a new bitfield reference, shift the constant over the
4573 appropriate number of bits and mask it with the computed mask
4574 (in case this was a signed field). If we changed it, make a new one. */
4575 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4576 nbitsize, nbitpos, 1, lreversep);
4578 rhs = const_binop (BIT_AND_EXPR,
4579 const_binop (LSHIFT_EXPR,
4580 fold_convert_loc (loc, unsigned_type, rhs),
4581 size_int (lbitpos)),
4582 mask);
4584 lhs = build2_loc (loc, code, compare_type,
4585 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4586 return lhs;
4589 /* Subroutine for fold_truth_andor_1: decode a field reference.
4591 If EXP is a comparison reference, we return the innermost reference.
4593 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4594 set to the starting bit number.
4596 If the innermost field can be completely contained in a mode-sized
4597 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4599 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4600 otherwise it is not changed.
4602 *PUNSIGNEDP is set to the signedness of the field.
4604 *PREVERSEP is set to the storage order of the field.
4606 *PMASK is set to the mask used. This is either contained in a
4607 BIT_AND_EXPR or derived from the width of the field.
4609 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4611 Return 0 if this is not a component reference or is one that we can't
4612 do anything with. */
4614 static tree
4615 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4616 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4617 int *punsignedp, int *preversep, int *pvolatilep,
4618 tree *pmask, tree *pand_mask)
4620 tree exp = *exp_;
4621 tree outer_type = 0;
4622 tree and_mask = 0;
4623 tree mask, inner, offset;
4624 tree unsigned_type;
4625 unsigned int precision;
4627 /* All the optimizations using this function assume integer fields.
4628 There are problems with FP fields since the type_for_size call
4629 below can fail for, e.g., XFmode. */
4630 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4631 return NULL_TREE;
4633 /* We are interested in the bare arrangement of bits, so strip everything
4634 that doesn't affect the machine mode. However, record the type of the
4635 outermost expression if it may matter below. */
4636 if (CONVERT_EXPR_P (exp)
4637 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4638 outer_type = TREE_TYPE (exp);
4639 STRIP_NOPS (exp);
4641 if (TREE_CODE (exp) == BIT_AND_EXPR)
4643 and_mask = TREE_OPERAND (exp, 1);
4644 exp = TREE_OPERAND (exp, 0);
4645 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4646 if (TREE_CODE (and_mask) != INTEGER_CST)
4647 return NULL_TREE;
4650 poly_int64 poly_bitsize, poly_bitpos;
4651 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4652 pmode, punsignedp, preversep, pvolatilep);
4653 if ((inner == exp && and_mask == 0)
4654 || !poly_bitsize.is_constant (pbitsize)
4655 || !poly_bitpos.is_constant (pbitpos)
4656 || *pbitsize < 0
4657 || offset != 0
4658 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4659 /* Reject out-of-bound accesses (PR79731). */
4660 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4661 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4662 *pbitpos + *pbitsize) < 0))
4663 return NULL_TREE;
4665 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4666 if (unsigned_type == NULL_TREE)
4667 return NULL_TREE;
4669 *exp_ = exp;
4671 /* If the number of bits in the reference is the same as the bitsize of
4672 the outer type, then the outer type gives the signedness. Otherwise
4673 (in case of a small bitfield) the signedness is unchanged. */
4674 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4675 *punsignedp = TYPE_UNSIGNED (outer_type);
4677 /* Compute the mask to access the bitfield. */
4678 precision = TYPE_PRECISION (unsigned_type);
4680 mask = build_int_cst_type (unsigned_type, -1);
4682 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4683 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4685 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4686 if (and_mask != 0)
4687 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4688 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4690 *pmask = mask;
4691 *pand_mask = and_mask;
4692 return inner;
4695 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4696 bit positions and MASK is SIGNED. */
4698 static bool
4699 all_ones_mask_p (const_tree mask, unsigned int size)
4701 tree type = TREE_TYPE (mask);
4702 unsigned int precision = TYPE_PRECISION (type);
4704 /* If this function returns true when the type of the mask is
4705 UNSIGNED, then there will be errors. In particular see
4706 gcc.c-torture/execute/990326-1.c. There does not appear to be
4707 any documentation paper trail as to why this is so. But the pre
4708 wide-int worked with that restriction and it has been preserved
4709 here. */
4710 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4711 return false;
4713 return wi::mask (size, false, precision) == wi::to_wide (mask);
4716 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4717 represents the sign bit of EXP's type. If EXP represents a sign
4718 or zero extension, also test VAL against the unextended type.
4719 The return value is the (sub)expression whose sign bit is VAL,
4720 or NULL_TREE otherwise. */
4722 tree
4723 sign_bit_p (tree exp, const_tree val)
4725 int width;
4726 tree t;
4728 /* Tree EXP must have an integral type. */
4729 t = TREE_TYPE (exp);
4730 if (! INTEGRAL_TYPE_P (t))
4731 return NULL_TREE;
4733 /* Tree VAL must be an integer constant. */
4734 if (TREE_CODE (val) != INTEGER_CST
4735 || TREE_OVERFLOW (val))
4736 return NULL_TREE;
4738 width = TYPE_PRECISION (t);
4739 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4740 return exp;
4742 /* Handle extension from a narrower type. */
4743 if (TREE_CODE (exp) == NOP_EXPR
4744 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4745 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4747 return NULL_TREE;
4750 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4751 to be evaluated unconditionally. */
4753 static bool
4754 simple_operand_p (const_tree exp)
4756 /* Strip any conversions that don't change the machine mode. */
4757 STRIP_NOPS (exp);
4759 return (CONSTANT_CLASS_P (exp)
4760 || TREE_CODE (exp) == SSA_NAME
4761 || (DECL_P (exp)
4762 && ! TREE_ADDRESSABLE (exp)
4763 && ! TREE_THIS_VOLATILE (exp)
4764 && ! DECL_NONLOCAL (exp)
4765 /* Don't regard global variables as simple. They may be
4766 allocated in ways unknown to the compiler (shared memory,
4767 #pragma weak, etc). */
4768 && ! TREE_PUBLIC (exp)
4769 && ! DECL_EXTERNAL (exp)
4770 /* Weakrefs are not safe to be read, since they can be NULL.
4771 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4772 have DECL_WEAK flag set. */
4773 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4774 /* Loading a static variable is unduly expensive, but global
4775 registers aren't expensive. */
4776 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4779 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4780 to be evaluated unconditionally.
4781 I addition to simple_operand_p, we assume that comparisons, conversions,
4782 and logic-not operations are simple, if their operands are simple, too. */
4784 static bool
4785 simple_operand_p_2 (tree exp)
4787 enum tree_code code;
4789 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
4790 return false;
4792 while (CONVERT_EXPR_P (exp))
4793 exp = TREE_OPERAND (exp, 0);
4795 code = TREE_CODE (exp);
4797 if (TREE_CODE_CLASS (code) == tcc_comparison)
4798 return (simple_operand_p (TREE_OPERAND (exp, 0))
4799 && simple_operand_p (TREE_OPERAND (exp, 1)));
4801 if (code == TRUTH_NOT_EXPR)
4802 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4804 return simple_operand_p (exp);
4808 /* The following functions are subroutines to fold_range_test and allow it to
4809 try to change a logical combination of comparisons into a range test.
4811 For example, both
4812 X == 2 || X == 3 || X == 4 || X == 5
4814 X >= 2 && X <= 5
4815 are converted to
4816 (unsigned) (X - 2) <= 3
4818 We describe each set of comparisons as being either inside or outside
4819 a range, using a variable named like IN_P, and then describe the
4820 range with a lower and upper bound. If one of the bounds is omitted,
4821 it represents either the highest or lowest value of the type.
4823 In the comments below, we represent a range by two numbers in brackets
4824 preceded by a "+" to designate being inside that range, or a "-" to
4825 designate being outside that range, so the condition can be inverted by
4826 flipping the prefix. An omitted bound is represented by a "-". For
4827 example, "- [-, 10]" means being outside the range starting at the lowest
4828 possible value and ending at 10, in other words, being greater than 10.
4829 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4830 always false.
4832 We set up things so that the missing bounds are handled in a consistent
4833 manner so neither a missing bound nor "true" and "false" need to be
4834 handled using a special case. */
4836 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4837 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4838 and UPPER1_P are nonzero if the respective argument is an upper bound
4839 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4840 must be specified for a comparison. ARG1 will be converted to ARG0's
4841 type if both are specified. */
4843 static tree
4844 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4845 tree arg1, int upper1_p)
4847 tree tem;
4848 int result;
4849 int sgn0, sgn1;
4851 /* If neither arg represents infinity, do the normal operation.
4852 Else, if not a comparison, return infinity. Else handle the special
4853 comparison rules. Note that most of the cases below won't occur, but
4854 are handled for consistency. */
4856 if (arg0 != 0 && arg1 != 0)
4858 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4859 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4860 STRIP_NOPS (tem);
4861 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4864 if (TREE_CODE_CLASS (code) != tcc_comparison)
4865 return 0;
4867 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4868 for neither. In real maths, we cannot assume open ended ranges are
4869 the same. But, this is computer arithmetic, where numbers are finite.
4870 We can therefore make the transformation of any unbounded range with
4871 the value Z, Z being greater than any representable number. This permits
4872 us to treat unbounded ranges as equal. */
4873 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4874 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4875 switch (code)
4877 case EQ_EXPR:
4878 result = sgn0 == sgn1;
4879 break;
4880 case NE_EXPR:
4881 result = sgn0 != sgn1;
4882 break;
4883 case LT_EXPR:
4884 result = sgn0 < sgn1;
4885 break;
4886 case LE_EXPR:
4887 result = sgn0 <= sgn1;
4888 break;
4889 case GT_EXPR:
4890 result = sgn0 > sgn1;
4891 break;
4892 case GE_EXPR:
4893 result = sgn0 >= sgn1;
4894 break;
4895 default:
4896 gcc_unreachable ();
4899 return constant_boolean_node (result, type);
4902 /* Helper routine for make_range. Perform one step for it, return
4903 new expression if the loop should continue or NULL_TREE if it should
4904 stop. */
4906 tree
4907 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4908 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4909 bool *strict_overflow_p)
4911 tree arg0_type = TREE_TYPE (arg0);
4912 tree n_low, n_high, low = *p_low, high = *p_high;
4913 int in_p = *p_in_p, n_in_p;
4915 switch (code)
4917 case TRUTH_NOT_EXPR:
4918 /* We can only do something if the range is testing for zero. */
4919 if (low == NULL_TREE || high == NULL_TREE
4920 || ! integer_zerop (low) || ! integer_zerop (high))
4921 return NULL_TREE;
4922 *p_in_p = ! in_p;
4923 return arg0;
4925 case EQ_EXPR: case NE_EXPR:
4926 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4927 /* We can only do something if the range is testing for zero
4928 and if the second operand is an integer constant. Note that
4929 saying something is "in" the range we make is done by
4930 complementing IN_P since it will set in the initial case of
4931 being not equal to zero; "out" is leaving it alone. */
4932 if (low == NULL_TREE || high == NULL_TREE
4933 || ! integer_zerop (low) || ! integer_zerop (high)
4934 || TREE_CODE (arg1) != INTEGER_CST)
4935 return NULL_TREE;
4937 switch (code)
4939 case NE_EXPR: /* - [c, c] */
4940 low = high = arg1;
4941 break;
4942 case EQ_EXPR: /* + [c, c] */
4943 in_p = ! in_p, low = high = arg1;
4944 break;
4945 case GT_EXPR: /* - [-, c] */
4946 low = 0, high = arg1;
4947 break;
4948 case GE_EXPR: /* + [c, -] */
4949 in_p = ! in_p, low = arg1, high = 0;
4950 break;
4951 case LT_EXPR: /* - [c, -] */
4952 low = arg1, high = 0;
4953 break;
4954 case LE_EXPR: /* + [-, c] */
4955 in_p = ! in_p, low = 0, high = arg1;
4956 break;
4957 default:
4958 gcc_unreachable ();
4961 /* If this is an unsigned comparison, we also know that EXP is
4962 greater than or equal to zero. We base the range tests we make
4963 on that fact, so we record it here so we can parse existing
4964 range tests. We test arg0_type since often the return type
4965 of, e.g. EQ_EXPR, is boolean. */
4966 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4968 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4969 in_p, low, high, 1,
4970 build_int_cst (arg0_type, 0),
4971 NULL_TREE))
4972 return NULL_TREE;
4974 in_p = n_in_p, low = n_low, high = n_high;
4976 /* If the high bound is missing, but we have a nonzero low
4977 bound, reverse the range so it goes from zero to the low bound
4978 minus 1. */
4979 if (high == 0 && low && ! integer_zerop (low))
4981 in_p = ! in_p;
4982 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4983 build_int_cst (TREE_TYPE (low), 1), 0);
4984 low = build_int_cst (arg0_type, 0);
4988 *p_low = low;
4989 *p_high = high;
4990 *p_in_p = in_p;
4991 return arg0;
4993 case NEGATE_EXPR:
4994 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4995 low and high are non-NULL, then normalize will DTRT. */
4996 if (!TYPE_UNSIGNED (arg0_type)
4997 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4999 if (low == NULL_TREE)
5000 low = TYPE_MIN_VALUE (arg0_type);
5001 if (high == NULL_TREE)
5002 high = TYPE_MAX_VALUE (arg0_type);
5005 /* (-x) IN [a,b] -> x in [-b, -a] */
5006 n_low = range_binop (MINUS_EXPR, exp_type,
5007 build_int_cst (exp_type, 0),
5008 0, high, 1);
5009 n_high = range_binop (MINUS_EXPR, exp_type,
5010 build_int_cst (exp_type, 0),
5011 0, low, 0);
5012 if (n_high != 0 && TREE_OVERFLOW (n_high))
5013 return NULL_TREE;
5014 goto normalize;
5016 case BIT_NOT_EXPR:
5017 /* ~ X -> -X - 1 */
5018 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5019 build_int_cst (exp_type, 1));
5021 case PLUS_EXPR:
5022 case MINUS_EXPR:
5023 if (TREE_CODE (arg1) != INTEGER_CST)
5024 return NULL_TREE;
5026 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5027 move a constant to the other side. */
5028 if (!TYPE_UNSIGNED (arg0_type)
5029 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5030 return NULL_TREE;
5032 /* If EXP is signed, any overflow in the computation is undefined,
5033 so we don't worry about it so long as our computations on
5034 the bounds don't overflow. For unsigned, overflow is defined
5035 and this is exactly the right thing. */
5036 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5037 arg0_type, low, 0, arg1, 0);
5038 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5039 arg0_type, high, 1, arg1, 0);
5040 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5041 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5042 return NULL_TREE;
5044 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5045 *strict_overflow_p = true;
5047 normalize:
5048 /* Check for an unsigned range which has wrapped around the maximum
5049 value thus making n_high < n_low, and normalize it. */
5050 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5052 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5053 build_int_cst (TREE_TYPE (n_high), 1), 0);
5054 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5055 build_int_cst (TREE_TYPE (n_low), 1), 0);
5057 /* If the range is of the form +/- [ x+1, x ], we won't
5058 be able to normalize it. But then, it represents the
5059 whole range or the empty set, so make it
5060 +/- [ -, - ]. */
5061 if (tree_int_cst_equal (n_low, low)
5062 && tree_int_cst_equal (n_high, high))
5063 low = high = 0;
5064 else
5065 in_p = ! in_p;
5067 else
5068 low = n_low, high = n_high;
5070 *p_low = low;
5071 *p_high = high;
5072 *p_in_p = in_p;
5073 return arg0;
5075 CASE_CONVERT:
5076 case NON_LVALUE_EXPR:
5077 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5078 return NULL_TREE;
5080 if (! INTEGRAL_TYPE_P (arg0_type)
5081 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5082 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5083 return NULL_TREE;
5085 n_low = low, n_high = high;
5087 if (n_low != 0)
5088 n_low = fold_convert_loc (loc, arg0_type, n_low);
5090 if (n_high != 0)
5091 n_high = fold_convert_loc (loc, arg0_type, n_high);
5093 /* If we're converting arg0 from an unsigned type, to exp,
5094 a signed type, we will be doing the comparison as unsigned.
5095 The tests above have already verified that LOW and HIGH
5096 are both positive.
5098 So we have to ensure that we will handle large unsigned
5099 values the same way that the current signed bounds treat
5100 negative values. */
5102 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5104 tree high_positive;
5105 tree equiv_type;
5106 /* For fixed-point modes, we need to pass the saturating flag
5107 as the 2nd parameter. */
5108 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5109 equiv_type
5110 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5111 TYPE_SATURATING (arg0_type));
5112 else
5113 equiv_type
5114 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5116 /* A range without an upper bound is, naturally, unbounded.
5117 Since convert would have cropped a very large value, use
5118 the max value for the destination type. */
5119 high_positive
5120 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5121 : TYPE_MAX_VALUE (arg0_type);
5123 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5124 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5125 fold_convert_loc (loc, arg0_type,
5126 high_positive),
5127 build_int_cst (arg0_type, 1));
5129 /* If the low bound is specified, "and" the range with the
5130 range for which the original unsigned value will be
5131 positive. */
5132 if (low != 0)
5134 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5135 1, fold_convert_loc (loc, arg0_type,
5136 integer_zero_node),
5137 high_positive))
5138 return NULL_TREE;
5140 in_p = (n_in_p == in_p);
5142 else
5144 /* Otherwise, "or" the range with the range of the input
5145 that will be interpreted as negative. */
5146 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5147 1, fold_convert_loc (loc, arg0_type,
5148 integer_zero_node),
5149 high_positive))
5150 return NULL_TREE;
5152 in_p = (in_p != n_in_p);
5156 *p_low = n_low;
5157 *p_high = n_high;
5158 *p_in_p = in_p;
5159 return arg0;
5161 default:
5162 return NULL_TREE;
5166 /* Given EXP, a logical expression, set the range it is testing into
5167 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5168 actually being tested. *PLOW and *PHIGH will be made of the same
5169 type as the returned expression. If EXP is not a comparison, we
5170 will most likely not be returning a useful value and range. Set
5171 *STRICT_OVERFLOW_P to true if the return value is only valid
5172 because signed overflow is undefined; otherwise, do not change
5173 *STRICT_OVERFLOW_P. */
5175 tree
5176 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5177 bool *strict_overflow_p)
5179 enum tree_code code;
5180 tree arg0, arg1 = NULL_TREE;
5181 tree exp_type, nexp;
5182 int in_p;
5183 tree low, high;
5184 location_t loc = EXPR_LOCATION (exp);
5186 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5187 and see if we can refine the range. Some of the cases below may not
5188 happen, but it doesn't seem worth worrying about this. We "continue"
5189 the outer loop when we've changed something; otherwise we "break"
5190 the switch, which will "break" the while. */
5192 in_p = 0;
5193 low = high = build_int_cst (TREE_TYPE (exp), 0);
5195 while (1)
5197 code = TREE_CODE (exp);
5198 exp_type = TREE_TYPE (exp);
5199 arg0 = NULL_TREE;
5201 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5203 if (TREE_OPERAND_LENGTH (exp) > 0)
5204 arg0 = TREE_OPERAND (exp, 0);
5205 if (TREE_CODE_CLASS (code) == tcc_binary
5206 || TREE_CODE_CLASS (code) == tcc_comparison
5207 || (TREE_CODE_CLASS (code) == tcc_expression
5208 && TREE_OPERAND_LENGTH (exp) > 1))
5209 arg1 = TREE_OPERAND (exp, 1);
5211 if (arg0 == NULL_TREE)
5212 break;
5214 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5215 &high, &in_p, strict_overflow_p);
5216 if (nexp == NULL_TREE)
5217 break;
5218 exp = nexp;
5221 /* If EXP is a constant, we can evaluate whether this is true or false. */
5222 if (TREE_CODE (exp) == INTEGER_CST)
5224 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5225 exp, 0, low, 0))
5226 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5227 exp, 1, high, 1)));
5228 low = high = 0;
5229 exp = 0;
5232 *pin_p = in_p, *plow = low, *phigh = high;
5233 return exp;
5236 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5237 a bitwise check i.e. when
5238 LOW == 0xXX...X00...0
5239 HIGH == 0xXX...X11...1
5240 Return corresponding mask in MASK and stem in VALUE. */
5242 static bool
5243 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5244 tree *value)
5246 if (TREE_CODE (low) != INTEGER_CST
5247 || TREE_CODE (high) != INTEGER_CST)
5248 return false;
5250 unsigned prec = TYPE_PRECISION (type);
5251 wide_int lo = wi::to_wide (low, prec);
5252 wide_int hi = wi::to_wide (high, prec);
5254 wide_int end_mask = lo ^ hi;
5255 if ((end_mask & (end_mask + 1)) != 0
5256 || (lo & end_mask) != 0)
5257 return false;
5259 wide_int stem_mask = ~end_mask;
5260 wide_int stem = lo & stem_mask;
5261 if (stem != (hi & stem_mask))
5262 return false;
5264 *mask = wide_int_to_tree (type, stem_mask);
5265 *value = wide_int_to_tree (type, stem);
5267 return true;
5270 /* Helper routine for build_range_check and match.pd. Return the type to
5271 perform the check or NULL if it shouldn't be optimized. */
5273 tree
5274 range_check_type (tree etype)
5276 /* First make sure that arithmetics in this type is valid, then make sure
5277 that it wraps around. */
5278 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5279 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5281 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5283 tree utype, minv, maxv;
5285 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5286 for the type in question, as we rely on this here. */
5287 utype = unsigned_type_for (etype);
5288 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5289 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5290 build_int_cst (TREE_TYPE (maxv), 1), 1);
5291 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5293 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5294 minv, 1, maxv, 1)))
5295 etype = utype;
5296 else
5297 return NULL_TREE;
5299 else if (POINTER_TYPE_P (etype))
5300 etype = unsigned_type_for (etype);
5301 return etype;
5304 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5305 type, TYPE, return an expression to test if EXP is in (or out of, depending
5306 on IN_P) the range. Return 0 if the test couldn't be created. */
5308 tree
5309 build_range_check (location_t loc, tree type, tree exp, int in_p,
5310 tree low, tree high)
5312 tree etype = TREE_TYPE (exp), mask, value;
5314 /* Disable this optimization for function pointer expressions
5315 on targets that require function pointer canonicalization. */
5316 if (targetm.have_canonicalize_funcptr_for_compare ()
5317 && POINTER_TYPE_P (etype)
5318 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5319 return NULL_TREE;
5321 if (! in_p)
5323 value = build_range_check (loc, type, exp, 1, low, high);
5324 if (value != 0)
5325 return invert_truthvalue_loc (loc, value);
5327 return 0;
5330 if (low == 0 && high == 0)
5331 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5333 if (low == 0)
5334 return fold_build2_loc (loc, LE_EXPR, type, exp,
5335 fold_convert_loc (loc, etype, high));
5337 if (high == 0)
5338 return fold_build2_loc (loc, GE_EXPR, type, exp,
5339 fold_convert_loc (loc, etype, low));
5341 if (operand_equal_p (low, high, 0))
5342 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5343 fold_convert_loc (loc, etype, low));
5345 if (TREE_CODE (exp) == BIT_AND_EXPR
5346 && maskable_range_p (low, high, etype, &mask, &value))
5347 return fold_build2_loc (loc, EQ_EXPR, type,
5348 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5349 exp, mask),
5350 value);
5352 if (integer_zerop (low))
5354 if (! TYPE_UNSIGNED (etype))
5356 etype = unsigned_type_for (etype);
5357 high = fold_convert_loc (loc, etype, high);
5358 exp = fold_convert_loc (loc, etype, exp);
5360 return build_range_check (loc, type, exp, 1, 0, high);
5363 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5364 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5366 int prec = TYPE_PRECISION (etype);
5368 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5370 if (TYPE_UNSIGNED (etype))
5372 tree signed_etype = signed_type_for (etype);
5373 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5374 etype
5375 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5376 else
5377 etype = signed_etype;
5378 exp = fold_convert_loc (loc, etype, exp);
5380 return fold_build2_loc (loc, GT_EXPR, type, exp,
5381 build_int_cst (etype, 0));
5385 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5386 This requires wrap-around arithmetics for the type of the expression. */
5387 etype = range_check_type (etype);
5388 if (etype == NULL_TREE)
5389 return NULL_TREE;
5391 high = fold_convert_loc (loc, etype, high);
5392 low = fold_convert_loc (loc, etype, low);
5393 exp = fold_convert_loc (loc, etype, exp);
5395 value = const_binop (MINUS_EXPR, high, low);
5397 if (value != 0 && !TREE_OVERFLOW (value))
5398 return build_range_check (loc, type,
5399 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5400 1, build_int_cst (etype, 0), value);
5402 return 0;
5405 /* Return the predecessor of VAL in its type, handling the infinite case. */
5407 static tree
5408 range_predecessor (tree val)
5410 tree type = TREE_TYPE (val);
5412 if (INTEGRAL_TYPE_P (type)
5413 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5414 return 0;
5415 else
5416 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5417 build_int_cst (TREE_TYPE (val), 1), 0);
5420 /* Return the successor of VAL in its type, handling the infinite case. */
5422 static tree
5423 range_successor (tree val)
5425 tree type = TREE_TYPE (val);
5427 if (INTEGRAL_TYPE_P (type)
5428 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5429 return 0;
5430 else
5431 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5432 build_int_cst (TREE_TYPE (val), 1), 0);
5435 /* Given two ranges, see if we can merge them into one. Return 1 if we
5436 can, 0 if we can't. Set the output range into the specified parameters. */
5438 bool
5439 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5440 tree high0, int in1_p, tree low1, tree high1)
5442 int no_overlap;
5443 int subset;
5444 int temp;
5445 tree tem;
5446 int in_p;
5447 tree low, high;
5448 int lowequal = ((low0 == 0 && low1 == 0)
5449 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5450 low0, 0, low1, 0)));
5451 int highequal = ((high0 == 0 && high1 == 0)
5452 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5453 high0, 1, high1, 1)));
5455 /* Make range 0 be the range that starts first, or ends last if they
5456 start at the same value. Swap them if it isn't. */
5457 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5458 low0, 0, low1, 0))
5459 || (lowequal
5460 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5461 high1, 1, high0, 1))))
5463 temp = in0_p, in0_p = in1_p, in1_p = temp;
5464 tem = low0, low0 = low1, low1 = tem;
5465 tem = high0, high0 = high1, high1 = tem;
5468 /* If the second range is != high1 where high1 is the type maximum of
5469 the type, try first merging with < high1 range. */
5470 if (low1
5471 && high1
5472 && TREE_CODE (low1) == INTEGER_CST
5473 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5474 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5475 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5476 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5477 && operand_equal_p (low1, high1, 0))
5479 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5480 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5481 !in1_p, NULL_TREE, range_predecessor (low1)))
5482 return true;
5483 /* Similarly for the second range != low1 where low1 is the type minimum
5484 of the type, try first merging with > low1 range. */
5485 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5486 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5487 !in1_p, range_successor (low1), NULL_TREE))
5488 return true;
5491 /* Now flag two cases, whether the ranges are disjoint or whether the
5492 second range is totally subsumed in the first. Note that the tests
5493 below are simplified by the ones above. */
5494 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5495 high0, 1, low1, 0));
5496 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5497 high1, 1, high0, 1));
5499 /* We now have four cases, depending on whether we are including or
5500 excluding the two ranges. */
5501 if (in0_p && in1_p)
5503 /* If they don't overlap, the result is false. If the second range
5504 is a subset it is the result. Otherwise, the range is from the start
5505 of the second to the end of the first. */
5506 if (no_overlap)
5507 in_p = 0, low = high = 0;
5508 else if (subset)
5509 in_p = 1, low = low1, high = high1;
5510 else
5511 in_p = 1, low = low1, high = high0;
5514 else if (in0_p && ! in1_p)
5516 /* If they don't overlap, the result is the first range. If they are
5517 equal, the result is false. If the second range is a subset of the
5518 first, and the ranges begin at the same place, we go from just after
5519 the end of the second range to the end of the first. If the second
5520 range is not a subset of the first, or if it is a subset and both
5521 ranges end at the same place, the range starts at the start of the
5522 first range and ends just before the second range.
5523 Otherwise, we can't describe this as a single range. */
5524 if (no_overlap)
5525 in_p = 1, low = low0, high = high0;
5526 else if (lowequal && highequal)
5527 in_p = 0, low = high = 0;
5528 else if (subset && lowequal)
5530 low = range_successor (high1);
5531 high = high0;
5532 in_p = 1;
5533 if (low == 0)
5535 /* We are in the weird situation where high0 > high1 but
5536 high1 has no successor. Punt. */
5537 return 0;
5540 else if (! subset || highequal)
5542 low = low0;
5543 high = range_predecessor (low1);
5544 in_p = 1;
5545 if (high == 0)
5547 /* low0 < low1 but low1 has no predecessor. Punt. */
5548 return 0;
5551 else
5552 return 0;
5555 else if (! in0_p && in1_p)
5557 /* If they don't overlap, the result is the second range. If the second
5558 is a subset of the first, the result is false. Otherwise,
5559 the range starts just after the first range and ends at the
5560 end of the second. */
5561 if (no_overlap)
5562 in_p = 1, low = low1, high = high1;
5563 else if (subset || highequal)
5564 in_p = 0, low = high = 0;
5565 else
5567 low = range_successor (high0);
5568 high = high1;
5569 in_p = 1;
5570 if (low == 0)
5572 /* high1 > high0 but high0 has no successor. Punt. */
5573 return 0;
5578 else
5580 /* The case where we are excluding both ranges. Here the complex case
5581 is if they don't overlap. In that case, the only time we have a
5582 range is if they are adjacent. If the second is a subset of the
5583 first, the result is the first. Otherwise, the range to exclude
5584 starts at the beginning of the first range and ends at the end of the
5585 second. */
5586 if (no_overlap)
5588 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5589 range_successor (high0),
5590 1, low1, 0)))
5591 in_p = 0, low = low0, high = high1;
5592 else
5594 /* Canonicalize - [min, x] into - [-, x]. */
5595 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5596 switch (TREE_CODE (TREE_TYPE (low0)))
5598 case ENUMERAL_TYPE:
5599 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5600 GET_MODE_BITSIZE
5601 (TYPE_MODE (TREE_TYPE (low0)))))
5602 break;
5603 /* FALLTHROUGH */
5604 case INTEGER_TYPE:
5605 if (tree_int_cst_equal (low0,
5606 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5607 low0 = 0;
5608 break;
5609 case POINTER_TYPE:
5610 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5611 && integer_zerop (low0))
5612 low0 = 0;
5613 break;
5614 default:
5615 break;
5618 /* Canonicalize - [x, max] into - [x, -]. */
5619 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5620 switch (TREE_CODE (TREE_TYPE (high1)))
5622 case ENUMERAL_TYPE:
5623 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5624 GET_MODE_BITSIZE
5625 (TYPE_MODE (TREE_TYPE (high1)))))
5626 break;
5627 /* FALLTHROUGH */
5628 case INTEGER_TYPE:
5629 if (tree_int_cst_equal (high1,
5630 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5631 high1 = 0;
5632 break;
5633 case POINTER_TYPE:
5634 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5635 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5636 high1, 1,
5637 build_int_cst (TREE_TYPE (high1), 1),
5638 1)))
5639 high1 = 0;
5640 break;
5641 default:
5642 break;
5645 /* The ranges might be also adjacent between the maximum and
5646 minimum values of the given type. For
5647 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5648 return + [x + 1, y - 1]. */
5649 if (low0 == 0 && high1 == 0)
5651 low = range_successor (high0);
5652 high = range_predecessor (low1);
5653 if (low == 0 || high == 0)
5654 return 0;
5656 in_p = 1;
5658 else
5659 return 0;
5662 else if (subset)
5663 in_p = 0, low = low0, high = high0;
5664 else
5665 in_p = 0, low = low0, high = high1;
5668 *pin_p = in_p, *plow = low, *phigh = high;
5669 return 1;
5673 /* Subroutine of fold, looking inside expressions of the form
5674 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5675 of the COND_EXPR. This function is being used also to optimize
5676 A op B ? C : A, by reversing the comparison first.
5678 Return a folded expression whose code is not a COND_EXPR
5679 anymore, or NULL_TREE if no folding opportunity is found. */
5681 static tree
5682 fold_cond_expr_with_comparison (location_t loc, tree type,
5683 tree arg0, tree arg1, tree arg2)
5685 enum tree_code comp_code = TREE_CODE (arg0);
5686 tree arg00 = TREE_OPERAND (arg0, 0);
5687 tree arg01 = TREE_OPERAND (arg0, 1);
5688 tree arg1_type = TREE_TYPE (arg1);
5689 tree tem;
5691 STRIP_NOPS (arg1);
5692 STRIP_NOPS (arg2);
5694 /* If we have A op 0 ? A : -A, consider applying the following
5695 transformations:
5697 A == 0? A : -A same as -A
5698 A != 0? A : -A same as A
5699 A >= 0? A : -A same as abs (A)
5700 A > 0? A : -A same as abs (A)
5701 A <= 0? A : -A same as -abs (A)
5702 A < 0? A : -A same as -abs (A)
5704 None of these transformations work for modes with signed
5705 zeros. If A is +/-0, the first two transformations will
5706 change the sign of the result (from +0 to -0, or vice
5707 versa). The last four will fix the sign of the result,
5708 even though the original expressions could be positive or
5709 negative, depending on the sign of A.
5711 Note that all these transformations are correct if A is
5712 NaN, since the two alternatives (A and -A) are also NaNs. */
5713 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5714 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5715 ? real_zerop (arg01)
5716 : integer_zerop (arg01))
5717 && ((TREE_CODE (arg2) == NEGATE_EXPR
5718 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5719 /* In the case that A is of the form X-Y, '-A' (arg2) may
5720 have already been folded to Y-X, check for that. */
5721 || (TREE_CODE (arg1) == MINUS_EXPR
5722 && TREE_CODE (arg2) == MINUS_EXPR
5723 && operand_equal_p (TREE_OPERAND (arg1, 0),
5724 TREE_OPERAND (arg2, 1), 0)
5725 && operand_equal_p (TREE_OPERAND (arg1, 1),
5726 TREE_OPERAND (arg2, 0), 0))))
5727 switch (comp_code)
5729 case EQ_EXPR:
5730 case UNEQ_EXPR:
5731 tem = fold_convert_loc (loc, arg1_type, arg1);
5732 return fold_convert_loc (loc, type, negate_expr (tem));
5733 case NE_EXPR:
5734 case LTGT_EXPR:
5735 return fold_convert_loc (loc, type, arg1);
5736 case UNGE_EXPR:
5737 case UNGT_EXPR:
5738 if (flag_trapping_math)
5739 break;
5740 /* Fall through. */
5741 case GE_EXPR:
5742 case GT_EXPR:
5743 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5744 break;
5745 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5746 return fold_convert_loc (loc, type, tem);
5747 case UNLE_EXPR:
5748 case UNLT_EXPR:
5749 if (flag_trapping_math)
5750 break;
5751 /* FALLTHRU */
5752 case LE_EXPR:
5753 case LT_EXPR:
5754 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5755 break;
5756 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5757 return negate_expr (fold_convert_loc (loc, type, tem));
5758 default:
5759 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5760 break;
5763 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5764 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5765 both transformations are correct when A is NaN: A != 0
5766 is then true, and A == 0 is false. */
5768 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5769 && integer_zerop (arg01) && integer_zerop (arg2))
5771 if (comp_code == NE_EXPR)
5772 return fold_convert_loc (loc, type, arg1);
5773 else if (comp_code == EQ_EXPR)
5774 return build_zero_cst (type);
5777 /* Try some transformations of A op B ? A : B.
5779 A == B? A : B same as B
5780 A != B? A : B same as A
5781 A >= B? A : B same as max (A, B)
5782 A > B? A : B same as max (B, A)
5783 A <= B? A : B same as min (A, B)
5784 A < B? A : B same as min (B, A)
5786 As above, these transformations don't work in the presence
5787 of signed zeros. For example, if A and B are zeros of
5788 opposite sign, the first two transformations will change
5789 the sign of the result. In the last four, the original
5790 expressions give different results for (A=+0, B=-0) and
5791 (A=-0, B=+0), but the transformed expressions do not.
5793 The first two transformations are correct if either A or B
5794 is a NaN. In the first transformation, the condition will
5795 be false, and B will indeed be chosen. In the case of the
5796 second transformation, the condition A != B will be true,
5797 and A will be chosen.
5799 The conversions to max() and min() are not correct if B is
5800 a number and A is not. The conditions in the original
5801 expressions will be false, so all four give B. The min()
5802 and max() versions would give a NaN instead. */
5803 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5804 && operand_equal_for_comparison_p (arg01, arg2)
5805 /* Avoid these transformations if the COND_EXPR may be used
5806 as an lvalue in the C++ front-end. PR c++/19199. */
5807 && (in_gimple_form
5808 || VECTOR_TYPE_P (type)
5809 || (! lang_GNU_CXX ()
5810 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5811 || ! maybe_lvalue_p (arg1)
5812 || ! maybe_lvalue_p (arg2)))
5814 tree comp_op0 = arg00;
5815 tree comp_op1 = arg01;
5816 tree comp_type = TREE_TYPE (comp_op0);
5818 switch (comp_code)
5820 case EQ_EXPR:
5821 return fold_convert_loc (loc, type, arg2);
5822 case NE_EXPR:
5823 return fold_convert_loc (loc, type, arg1);
5824 case LE_EXPR:
5825 case LT_EXPR:
5826 case UNLE_EXPR:
5827 case UNLT_EXPR:
5828 /* In C++ a ?: expression can be an lvalue, so put the
5829 operand which will be used if they are equal first
5830 so that we can convert this back to the
5831 corresponding COND_EXPR. */
5832 if (!HONOR_NANS (arg1))
5834 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5835 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5836 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5837 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5838 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5839 comp_op1, comp_op0);
5840 return fold_convert_loc (loc, type, tem);
5842 break;
5843 case GE_EXPR:
5844 case GT_EXPR:
5845 case UNGE_EXPR:
5846 case UNGT_EXPR:
5847 if (!HONOR_NANS (arg1))
5849 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5850 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5851 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5852 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5853 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5854 comp_op1, comp_op0);
5855 return fold_convert_loc (loc, type, tem);
5857 break;
5858 case UNEQ_EXPR:
5859 if (!HONOR_NANS (arg1))
5860 return fold_convert_loc (loc, type, arg2);
5861 break;
5862 case LTGT_EXPR:
5863 if (!HONOR_NANS (arg1))
5864 return fold_convert_loc (loc, type, arg1);
5865 break;
5866 default:
5867 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5868 break;
5872 return NULL_TREE;
5877 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5878 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5879 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5880 false) >= 2)
5881 #endif
5883 /* EXP is some logical combination of boolean tests. See if we can
5884 merge it into some range test. Return the new tree if so. */
5886 static tree
5887 fold_range_test (location_t loc, enum tree_code code, tree type,
5888 tree op0, tree op1)
5890 int or_op = (code == TRUTH_ORIF_EXPR
5891 || code == TRUTH_OR_EXPR);
5892 int in0_p, in1_p, in_p;
5893 tree low0, low1, low, high0, high1, high;
5894 bool strict_overflow_p = false;
5895 tree tem, lhs, rhs;
5896 const char * const warnmsg = G_("assuming signed overflow does not occur "
5897 "when simplifying range test");
5899 if (!INTEGRAL_TYPE_P (type))
5900 return 0;
5902 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5903 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5905 /* If this is an OR operation, invert both sides; we will invert
5906 again at the end. */
5907 if (or_op)
5908 in0_p = ! in0_p, in1_p = ! in1_p;
5910 /* If both expressions are the same, if we can merge the ranges, and we
5911 can build the range test, return it or it inverted. If one of the
5912 ranges is always true or always false, consider it to be the same
5913 expression as the other. */
5914 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5915 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5916 in1_p, low1, high1)
5917 && (tem = (build_range_check (loc, type,
5918 lhs != 0 ? lhs
5919 : rhs != 0 ? rhs : integer_zero_node,
5920 in_p, low, high))) != 0)
5922 if (strict_overflow_p)
5923 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5924 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5927 /* On machines where the branch cost is expensive, if this is a
5928 short-circuited branch and the underlying object on both sides
5929 is the same, make a non-short-circuit operation. */
5930 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
5931 if (param_logical_op_non_short_circuit != -1)
5932 logical_op_non_short_circuit
5933 = param_logical_op_non_short_circuit;
5934 if (logical_op_non_short_circuit
5935 && !flag_sanitize_coverage
5936 && lhs != 0 && rhs != 0
5937 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5938 && operand_equal_p (lhs, rhs, 0))
5940 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5941 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5942 which cases we can't do this. */
5943 if (simple_operand_p (lhs))
5944 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5945 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5946 type, op0, op1);
5948 else if (!lang_hooks.decls.global_bindings_p ()
5949 && !CONTAINS_PLACEHOLDER_P (lhs))
5951 tree common = save_expr (lhs);
5953 if ((lhs = build_range_check (loc, type, common,
5954 or_op ? ! in0_p : in0_p,
5955 low0, high0)) != 0
5956 && (rhs = build_range_check (loc, type, common,
5957 or_op ? ! in1_p : in1_p,
5958 low1, high1)) != 0)
5960 if (strict_overflow_p)
5961 fold_overflow_warning (warnmsg,
5962 WARN_STRICT_OVERFLOW_COMPARISON);
5963 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5964 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5965 type, lhs, rhs);
5970 return 0;
5973 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5974 bit value. Arrange things so the extra bits will be set to zero if and
5975 only if C is signed-extended to its full width. If MASK is nonzero,
5976 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5978 static tree
5979 unextend (tree c, int p, int unsignedp, tree mask)
5981 tree type = TREE_TYPE (c);
5982 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
5983 tree temp;
5985 if (p == modesize || unsignedp)
5986 return c;
5988 /* We work by getting just the sign bit into the low-order bit, then
5989 into the high-order bit, then sign-extend. We then XOR that value
5990 with C. */
5991 temp = build_int_cst (TREE_TYPE (c),
5992 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
5994 /* We must use a signed type in order to get an arithmetic right shift.
5995 However, we must also avoid introducing accidental overflows, so that
5996 a subsequent call to integer_zerop will work. Hence we must
5997 do the type conversion here. At this point, the constant is either
5998 zero or one, and the conversion to a signed type can never overflow.
5999 We could get an overflow if this conversion is done anywhere else. */
6000 if (TYPE_UNSIGNED (type))
6001 temp = fold_convert (signed_type_for (type), temp);
6003 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6004 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6005 if (mask != 0)
6006 temp = const_binop (BIT_AND_EXPR, temp,
6007 fold_convert (TREE_TYPE (c), mask));
6008 /* If necessary, convert the type back to match the type of C. */
6009 if (TYPE_UNSIGNED (type))
6010 temp = fold_convert (type, temp);
6012 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6015 /* For an expression that has the form
6016 (A && B) || ~B
6018 (A || B) && ~B,
6019 we can drop one of the inner expressions and simplify to
6020 A || ~B
6022 A && ~B
6023 LOC is the location of the resulting expression. OP is the inner
6024 logical operation; the left-hand side in the examples above, while CMPOP
6025 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6026 removing a condition that guards another, as in
6027 (A != NULL && A->...) || A == NULL
6028 which we must not transform. If RHS_ONLY is true, only eliminate the
6029 right-most operand of the inner logical operation. */
6031 static tree
6032 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6033 bool rhs_only)
6035 tree type = TREE_TYPE (cmpop);
6036 enum tree_code code = TREE_CODE (cmpop);
6037 enum tree_code truthop_code = TREE_CODE (op);
6038 tree lhs = TREE_OPERAND (op, 0);
6039 tree rhs = TREE_OPERAND (op, 1);
6040 tree orig_lhs = lhs, orig_rhs = rhs;
6041 enum tree_code rhs_code = TREE_CODE (rhs);
6042 enum tree_code lhs_code = TREE_CODE (lhs);
6043 enum tree_code inv_code;
6045 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6046 return NULL_TREE;
6048 if (TREE_CODE_CLASS (code) != tcc_comparison)
6049 return NULL_TREE;
6051 if (rhs_code == truthop_code)
6053 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6054 if (newrhs != NULL_TREE)
6056 rhs = newrhs;
6057 rhs_code = TREE_CODE (rhs);
6060 if (lhs_code == truthop_code && !rhs_only)
6062 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6063 if (newlhs != NULL_TREE)
6065 lhs = newlhs;
6066 lhs_code = TREE_CODE (lhs);
6070 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6071 if (inv_code == rhs_code
6072 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6073 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6074 return lhs;
6075 if (!rhs_only && inv_code == lhs_code
6076 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6077 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6078 return rhs;
6079 if (rhs != orig_rhs || lhs != orig_lhs)
6080 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6081 lhs, rhs);
6082 return NULL_TREE;
6085 /* Find ways of folding logical expressions of LHS and RHS:
6086 Try to merge two comparisons to the same innermost item.
6087 Look for range tests like "ch >= '0' && ch <= '9'".
6088 Look for combinations of simple terms on machines with expensive branches
6089 and evaluate the RHS unconditionally.
6091 For example, if we have p->a == 2 && p->b == 4 and we can make an
6092 object large enough to span both A and B, we can do this with a comparison
6093 against the object ANDed with the a mask.
6095 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6096 operations to do this with one comparison.
6098 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6099 function and the one above.
6101 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6102 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6104 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6105 two operands.
6107 We return the simplified tree or 0 if no optimization is possible. */
6109 static tree
6110 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6111 tree lhs, tree rhs)
6113 /* If this is the "or" of two comparisons, we can do something if
6114 the comparisons are NE_EXPR. If this is the "and", we can do something
6115 if the comparisons are EQ_EXPR. I.e.,
6116 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6118 WANTED_CODE is this operation code. For single bit fields, we can
6119 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6120 comparison for one-bit fields. */
6122 enum tree_code wanted_code;
6123 enum tree_code lcode, rcode;
6124 tree ll_arg, lr_arg, rl_arg, rr_arg;
6125 tree ll_inner, lr_inner, rl_inner, rr_inner;
6126 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6127 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6128 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6129 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6130 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6131 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6132 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6133 scalar_int_mode lnmode, rnmode;
6134 tree ll_mask, lr_mask, rl_mask, rr_mask;
6135 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6136 tree l_const, r_const;
6137 tree lntype, rntype, result;
6138 HOST_WIDE_INT first_bit, end_bit;
6139 int volatilep;
6141 /* Start by getting the comparison codes. Fail if anything is volatile.
6142 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6143 it were surrounded with a NE_EXPR. */
6145 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6146 return 0;
6148 lcode = TREE_CODE (lhs);
6149 rcode = TREE_CODE (rhs);
6151 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6153 lhs = build2 (NE_EXPR, truth_type, lhs,
6154 build_int_cst (TREE_TYPE (lhs), 0));
6155 lcode = NE_EXPR;
6158 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6160 rhs = build2 (NE_EXPR, truth_type, rhs,
6161 build_int_cst (TREE_TYPE (rhs), 0));
6162 rcode = NE_EXPR;
6165 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6166 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6167 return 0;
6169 ll_arg = TREE_OPERAND (lhs, 0);
6170 lr_arg = TREE_OPERAND (lhs, 1);
6171 rl_arg = TREE_OPERAND (rhs, 0);
6172 rr_arg = TREE_OPERAND (rhs, 1);
6174 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6175 if (simple_operand_p (ll_arg)
6176 && simple_operand_p (lr_arg))
6178 if (operand_equal_p (ll_arg, rl_arg, 0)
6179 && operand_equal_p (lr_arg, rr_arg, 0))
6181 result = combine_comparisons (loc, code, lcode, rcode,
6182 truth_type, ll_arg, lr_arg);
6183 if (result)
6184 return result;
6186 else if (operand_equal_p (ll_arg, rr_arg, 0)
6187 && operand_equal_p (lr_arg, rl_arg, 0))
6189 result = combine_comparisons (loc, code, lcode,
6190 swap_tree_comparison (rcode),
6191 truth_type, ll_arg, lr_arg);
6192 if (result)
6193 return result;
6197 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6198 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6200 /* If the RHS can be evaluated unconditionally and its operands are
6201 simple, it wins to evaluate the RHS unconditionally on machines
6202 with expensive branches. In this case, this isn't a comparison
6203 that can be merged. */
6205 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6206 false) >= 2
6207 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6208 && simple_operand_p (rl_arg)
6209 && simple_operand_p (rr_arg))
6211 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6212 if (code == TRUTH_OR_EXPR
6213 && lcode == NE_EXPR && integer_zerop (lr_arg)
6214 && rcode == NE_EXPR && integer_zerop (rr_arg)
6215 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6216 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6217 return build2_loc (loc, NE_EXPR, truth_type,
6218 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6219 ll_arg, rl_arg),
6220 build_int_cst (TREE_TYPE (ll_arg), 0));
6222 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6223 if (code == TRUTH_AND_EXPR
6224 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6225 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6226 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6227 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6228 return build2_loc (loc, EQ_EXPR, truth_type,
6229 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6230 ll_arg, rl_arg),
6231 build_int_cst (TREE_TYPE (ll_arg), 0));
6234 /* See if the comparisons can be merged. Then get all the parameters for
6235 each side. */
6237 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6238 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6239 return 0;
6241 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6242 volatilep = 0;
6243 ll_inner = decode_field_reference (loc, &ll_arg,
6244 &ll_bitsize, &ll_bitpos, &ll_mode,
6245 &ll_unsignedp, &ll_reversep, &volatilep,
6246 &ll_mask, &ll_and_mask);
6247 lr_inner = decode_field_reference (loc, &lr_arg,
6248 &lr_bitsize, &lr_bitpos, &lr_mode,
6249 &lr_unsignedp, &lr_reversep, &volatilep,
6250 &lr_mask, &lr_and_mask);
6251 rl_inner = decode_field_reference (loc, &rl_arg,
6252 &rl_bitsize, &rl_bitpos, &rl_mode,
6253 &rl_unsignedp, &rl_reversep, &volatilep,
6254 &rl_mask, &rl_and_mask);
6255 rr_inner = decode_field_reference (loc, &rr_arg,
6256 &rr_bitsize, &rr_bitpos, &rr_mode,
6257 &rr_unsignedp, &rr_reversep, &volatilep,
6258 &rr_mask, &rr_and_mask);
6260 /* It must be true that the inner operation on the lhs of each
6261 comparison must be the same if we are to be able to do anything.
6262 Then see if we have constants. If not, the same must be true for
6263 the rhs's. */
6264 if (volatilep
6265 || ll_reversep != rl_reversep
6266 || ll_inner == 0 || rl_inner == 0
6267 || ! operand_equal_p (ll_inner, rl_inner, 0))
6268 return 0;
6270 if (TREE_CODE (lr_arg) == INTEGER_CST
6271 && TREE_CODE (rr_arg) == INTEGER_CST)
6273 l_const = lr_arg, r_const = rr_arg;
6274 lr_reversep = ll_reversep;
6276 else if (lr_reversep != rr_reversep
6277 || lr_inner == 0 || rr_inner == 0
6278 || ! operand_equal_p (lr_inner, rr_inner, 0))
6279 return 0;
6280 else
6281 l_const = r_const = 0;
6283 /* If either comparison code is not correct for our logical operation,
6284 fail. However, we can convert a one-bit comparison against zero into
6285 the opposite comparison against that bit being set in the field. */
6287 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6288 if (lcode != wanted_code)
6290 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6292 /* Make the left operand unsigned, since we are only interested
6293 in the value of one bit. Otherwise we are doing the wrong
6294 thing below. */
6295 ll_unsignedp = 1;
6296 l_const = ll_mask;
6298 else
6299 return 0;
6302 /* This is analogous to the code for l_const above. */
6303 if (rcode != wanted_code)
6305 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6307 rl_unsignedp = 1;
6308 r_const = rl_mask;
6310 else
6311 return 0;
6314 /* See if we can find a mode that contains both fields being compared on
6315 the left. If we can't, fail. Otherwise, update all constants and masks
6316 to be relative to a field of that size. */
6317 first_bit = MIN (ll_bitpos, rl_bitpos);
6318 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6319 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6320 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6321 volatilep, &lnmode))
6322 return 0;
6324 lnbitsize = GET_MODE_BITSIZE (lnmode);
6325 lnbitpos = first_bit & ~ (lnbitsize - 1);
6326 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6327 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6329 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6331 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6332 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6335 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6336 size_int (xll_bitpos));
6337 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6338 size_int (xrl_bitpos));
6340 if (l_const)
6342 l_const = fold_convert_loc (loc, lntype, l_const);
6343 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6344 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6345 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6346 fold_build1_loc (loc, BIT_NOT_EXPR,
6347 lntype, ll_mask))))
6349 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6351 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6354 if (r_const)
6356 r_const = fold_convert_loc (loc, lntype, r_const);
6357 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6358 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6359 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6360 fold_build1_loc (loc, BIT_NOT_EXPR,
6361 lntype, rl_mask))))
6363 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6365 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6369 /* If the right sides are not constant, do the same for it. Also,
6370 disallow this optimization if a size, signedness or storage order
6371 mismatch occurs between the left and right sides. */
6372 if (l_const == 0)
6374 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6375 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6376 || ll_reversep != lr_reversep
6377 /* Make sure the two fields on the right
6378 correspond to the left without being swapped. */
6379 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6380 return 0;
6382 first_bit = MIN (lr_bitpos, rr_bitpos);
6383 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6384 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6385 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6386 volatilep, &rnmode))
6387 return 0;
6389 rnbitsize = GET_MODE_BITSIZE (rnmode);
6390 rnbitpos = first_bit & ~ (rnbitsize - 1);
6391 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6392 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6394 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6396 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6397 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6400 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6401 rntype, lr_mask),
6402 size_int (xlr_bitpos));
6403 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6404 rntype, rr_mask),
6405 size_int (xrr_bitpos));
6407 /* Make a mask that corresponds to both fields being compared.
6408 Do this for both items being compared. If the operands are the
6409 same size and the bits being compared are in the same position
6410 then we can do this by masking both and comparing the masked
6411 results. */
6412 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6413 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6414 if (lnbitsize == rnbitsize
6415 && xll_bitpos == xlr_bitpos
6416 && lnbitpos >= 0
6417 && rnbitpos >= 0)
6419 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6420 lntype, lnbitsize, lnbitpos,
6421 ll_unsignedp || rl_unsignedp, ll_reversep);
6422 if (! all_ones_mask_p (ll_mask, lnbitsize))
6423 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6425 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6426 rntype, rnbitsize, rnbitpos,
6427 lr_unsignedp || rr_unsignedp, lr_reversep);
6428 if (! all_ones_mask_p (lr_mask, rnbitsize))
6429 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6431 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6434 /* There is still another way we can do something: If both pairs of
6435 fields being compared are adjacent, we may be able to make a wider
6436 field containing them both.
6438 Note that we still must mask the lhs/rhs expressions. Furthermore,
6439 the mask must be shifted to account for the shift done by
6440 make_bit_field_ref. */
6441 if (((ll_bitsize + ll_bitpos == rl_bitpos
6442 && lr_bitsize + lr_bitpos == rr_bitpos)
6443 || (ll_bitpos == rl_bitpos + rl_bitsize
6444 && lr_bitpos == rr_bitpos + rr_bitsize))
6445 && ll_bitpos >= 0
6446 && rl_bitpos >= 0
6447 && lr_bitpos >= 0
6448 && rr_bitpos >= 0)
6450 tree type;
6452 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6453 ll_bitsize + rl_bitsize,
6454 MIN (ll_bitpos, rl_bitpos),
6455 ll_unsignedp, ll_reversep);
6456 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6457 lr_bitsize + rr_bitsize,
6458 MIN (lr_bitpos, rr_bitpos),
6459 lr_unsignedp, lr_reversep);
6461 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6462 size_int (MIN (xll_bitpos, xrl_bitpos)));
6463 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6464 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6466 /* Convert to the smaller type before masking out unwanted bits. */
6467 type = lntype;
6468 if (lntype != rntype)
6470 if (lnbitsize > rnbitsize)
6472 lhs = fold_convert_loc (loc, rntype, lhs);
6473 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6474 type = rntype;
6476 else if (lnbitsize < rnbitsize)
6478 rhs = fold_convert_loc (loc, lntype, rhs);
6479 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6480 type = lntype;
6484 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6485 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6487 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6488 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6490 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6493 return 0;
6496 /* Handle the case of comparisons with constants. If there is something in
6497 common between the masks, those bits of the constants must be the same.
6498 If not, the condition is always false. Test for this to avoid generating
6499 incorrect code below. */
6500 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6501 if (! integer_zerop (result)
6502 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6503 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6505 if (wanted_code == NE_EXPR)
6507 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6508 return constant_boolean_node (true, truth_type);
6510 else
6512 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6513 return constant_boolean_node (false, truth_type);
6517 if (lnbitpos < 0)
6518 return 0;
6520 /* Construct the expression we will return. First get the component
6521 reference we will make. Unless the mask is all ones the width of
6522 that field, perform the mask operation. Then compare with the
6523 merged constant. */
6524 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6525 lntype, lnbitsize, lnbitpos,
6526 ll_unsignedp || rl_unsignedp, ll_reversep);
6528 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6529 if (! all_ones_mask_p (ll_mask, lnbitsize))
6530 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6532 return build2_loc (loc, wanted_code, truth_type, result,
6533 const_binop (BIT_IOR_EXPR, l_const, r_const));
6536 /* T is an integer expression that is being multiplied, divided, or taken a
6537 modulus (CODE says which and what kind of divide or modulus) by a
6538 constant C. See if we can eliminate that operation by folding it with
6539 other operations already in T. WIDE_TYPE, if non-null, is a type that
6540 should be used for the computation if wider than our type.
6542 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6543 (X * 2) + (Y * 4). We must, however, be assured that either the original
6544 expression would not overflow or that overflow is undefined for the type
6545 in the language in question.
6547 If we return a non-null expression, it is an equivalent form of the
6548 original computation, but need not be in the original type.
6550 We set *STRICT_OVERFLOW_P to true if the return values depends on
6551 signed overflow being undefined. Otherwise we do not change
6552 *STRICT_OVERFLOW_P. */
6554 static tree
6555 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6556 bool *strict_overflow_p)
6558 /* To avoid exponential search depth, refuse to allow recursion past
6559 three levels. Beyond that (1) it's highly unlikely that we'll find
6560 something interesting and (2) we've probably processed it before
6561 when we built the inner expression. */
6563 static int depth;
6564 tree ret;
6566 if (depth > 3)
6567 return NULL;
6569 depth++;
6570 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6571 depth--;
6573 return ret;
6576 static tree
6577 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6578 bool *strict_overflow_p)
6580 tree type = TREE_TYPE (t);
6581 enum tree_code tcode = TREE_CODE (t);
6582 tree ctype = (wide_type != 0
6583 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6584 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6585 ? wide_type : type);
6586 tree t1, t2;
6587 int same_p = tcode == code;
6588 tree op0 = NULL_TREE, op1 = NULL_TREE;
6589 bool sub_strict_overflow_p;
6591 /* Don't deal with constants of zero here; they confuse the code below. */
6592 if (integer_zerop (c))
6593 return NULL_TREE;
6595 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6596 op0 = TREE_OPERAND (t, 0);
6598 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6599 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6601 /* Note that we need not handle conditional operations here since fold
6602 already handles those cases. So just do arithmetic here. */
6603 switch (tcode)
6605 case INTEGER_CST:
6606 /* For a constant, we can always simplify if we are a multiply
6607 or (for divide and modulus) if it is a multiple of our constant. */
6608 if (code == MULT_EXPR
6609 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6610 TYPE_SIGN (type)))
6612 tree tem = const_binop (code, fold_convert (ctype, t),
6613 fold_convert (ctype, c));
6614 /* If the multiplication overflowed, we lost information on it.
6615 See PR68142 and PR69845. */
6616 if (TREE_OVERFLOW (tem))
6617 return NULL_TREE;
6618 return tem;
6620 break;
6622 CASE_CONVERT: case NON_LVALUE_EXPR:
6623 /* If op0 is an expression ... */
6624 if ((COMPARISON_CLASS_P (op0)
6625 || UNARY_CLASS_P (op0)
6626 || BINARY_CLASS_P (op0)
6627 || VL_EXP_CLASS_P (op0)
6628 || EXPRESSION_CLASS_P (op0))
6629 /* ... and has wrapping overflow, and its type is smaller
6630 than ctype, then we cannot pass through as widening. */
6631 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6632 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6633 && (TYPE_PRECISION (ctype)
6634 > TYPE_PRECISION (TREE_TYPE (op0))))
6635 /* ... or this is a truncation (t is narrower than op0),
6636 then we cannot pass through this narrowing. */
6637 || (TYPE_PRECISION (type)
6638 < TYPE_PRECISION (TREE_TYPE (op0)))
6639 /* ... or signedness changes for division or modulus,
6640 then we cannot pass through this conversion. */
6641 || (code != MULT_EXPR
6642 && (TYPE_UNSIGNED (ctype)
6643 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6644 /* ... or has undefined overflow while the converted to
6645 type has not, we cannot do the operation in the inner type
6646 as that would introduce undefined overflow. */
6647 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6648 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6649 && !TYPE_OVERFLOW_UNDEFINED (type))))
6650 break;
6652 /* Pass the constant down and see if we can make a simplification. If
6653 we can, replace this expression with the inner simplification for
6654 possible later conversion to our or some other type. */
6655 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6656 && TREE_CODE (t2) == INTEGER_CST
6657 && !TREE_OVERFLOW (t2)
6658 && (t1 = extract_muldiv (op0, t2, code,
6659 code == MULT_EXPR ? ctype : NULL_TREE,
6660 strict_overflow_p)) != 0)
6661 return t1;
6662 break;
6664 case ABS_EXPR:
6665 /* If widening the type changes it from signed to unsigned, then we
6666 must avoid building ABS_EXPR itself as unsigned. */
6667 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6669 tree cstype = (*signed_type_for) (ctype);
6670 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6671 != 0)
6673 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6674 return fold_convert (ctype, t1);
6676 break;
6678 /* If the constant is negative, we cannot simplify this. */
6679 if (tree_int_cst_sgn (c) == -1)
6680 break;
6681 /* FALLTHROUGH */
6682 case NEGATE_EXPR:
6683 /* For division and modulus, type can't be unsigned, as e.g.
6684 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6685 For signed types, even with wrapping overflow, this is fine. */
6686 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6687 break;
6688 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6689 != 0)
6690 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6691 break;
6693 case MIN_EXPR: case MAX_EXPR:
6694 /* If widening the type changes the signedness, then we can't perform
6695 this optimization as that changes the result. */
6696 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6697 break;
6699 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6700 sub_strict_overflow_p = false;
6701 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6702 &sub_strict_overflow_p)) != 0
6703 && (t2 = extract_muldiv (op1, c, code, wide_type,
6704 &sub_strict_overflow_p)) != 0)
6706 if (tree_int_cst_sgn (c) < 0)
6707 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6708 if (sub_strict_overflow_p)
6709 *strict_overflow_p = true;
6710 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6711 fold_convert (ctype, t2));
6713 break;
6715 case LSHIFT_EXPR: case RSHIFT_EXPR:
6716 /* If the second operand is constant, this is a multiplication
6717 or floor division, by a power of two, so we can treat it that
6718 way unless the multiplier or divisor overflows. Signed
6719 left-shift overflow is implementation-defined rather than
6720 undefined in C90, so do not convert signed left shift into
6721 multiplication. */
6722 if (TREE_CODE (op1) == INTEGER_CST
6723 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6724 /* const_binop may not detect overflow correctly,
6725 so check for it explicitly here. */
6726 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6727 wi::to_wide (op1))
6728 && (t1 = fold_convert (ctype,
6729 const_binop (LSHIFT_EXPR, size_one_node,
6730 op1))) != 0
6731 && !TREE_OVERFLOW (t1))
6732 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6733 ? MULT_EXPR : FLOOR_DIV_EXPR,
6734 ctype,
6735 fold_convert (ctype, op0),
6736 t1),
6737 c, code, wide_type, strict_overflow_p);
6738 break;
6740 case PLUS_EXPR: case MINUS_EXPR:
6741 /* See if we can eliminate the operation on both sides. If we can, we
6742 can return a new PLUS or MINUS. If we can't, the only remaining
6743 cases where we can do anything are if the second operand is a
6744 constant. */
6745 sub_strict_overflow_p = false;
6746 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6747 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6748 if (t1 != 0 && t2 != 0
6749 && TYPE_OVERFLOW_WRAPS (ctype)
6750 && (code == MULT_EXPR
6751 /* If not multiplication, we can only do this if both operands
6752 are divisible by c. */
6753 || (multiple_of_p (ctype, op0, c)
6754 && multiple_of_p (ctype, op1, c))))
6756 if (sub_strict_overflow_p)
6757 *strict_overflow_p = true;
6758 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6759 fold_convert (ctype, t2));
6762 /* If this was a subtraction, negate OP1 and set it to be an addition.
6763 This simplifies the logic below. */
6764 if (tcode == MINUS_EXPR)
6766 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6767 /* If OP1 was not easily negatable, the constant may be OP0. */
6768 if (TREE_CODE (op0) == INTEGER_CST)
6770 std::swap (op0, op1);
6771 std::swap (t1, t2);
6775 if (TREE_CODE (op1) != INTEGER_CST)
6776 break;
6778 /* If either OP1 or C are negative, this optimization is not safe for
6779 some of the division and remainder types while for others we need
6780 to change the code. */
6781 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6783 if (code == CEIL_DIV_EXPR)
6784 code = FLOOR_DIV_EXPR;
6785 else if (code == FLOOR_DIV_EXPR)
6786 code = CEIL_DIV_EXPR;
6787 else if (code != MULT_EXPR
6788 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6789 break;
6792 /* If it's a multiply or a division/modulus operation of a multiple
6793 of our constant, do the operation and verify it doesn't overflow. */
6794 if (code == MULT_EXPR
6795 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6796 TYPE_SIGN (type)))
6798 op1 = const_binop (code, fold_convert (ctype, op1),
6799 fold_convert (ctype, c));
6800 /* We allow the constant to overflow with wrapping semantics. */
6801 if (op1 == 0
6802 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6803 break;
6805 else
6806 break;
6808 /* If we have an unsigned type, we cannot widen the operation since it
6809 will change the result if the original computation overflowed. */
6810 if (TYPE_UNSIGNED (ctype) && ctype != type)
6811 break;
6813 /* The last case is if we are a multiply. In that case, we can
6814 apply the distributive law to commute the multiply and addition
6815 if the multiplication of the constants doesn't overflow
6816 and overflow is defined. With undefined overflow
6817 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6818 But fold_plusminus_mult_expr would factor back any power-of-two
6819 value so do not distribute in the first place in this case. */
6820 if (code == MULT_EXPR
6821 && TYPE_OVERFLOW_WRAPS (ctype)
6822 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
6823 return fold_build2 (tcode, ctype,
6824 fold_build2 (code, ctype,
6825 fold_convert (ctype, op0),
6826 fold_convert (ctype, c)),
6827 op1);
6829 break;
6831 case MULT_EXPR:
6832 /* We have a special case here if we are doing something like
6833 (C * 8) % 4 since we know that's zero. */
6834 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6835 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6836 /* If the multiplication can overflow we cannot optimize this. */
6837 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6838 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6839 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6840 TYPE_SIGN (type)))
6842 *strict_overflow_p = true;
6843 return omit_one_operand (type, integer_zero_node, op0);
6846 /* ... fall through ... */
6848 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6849 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6850 /* If we can extract our operation from the LHS, do so and return a
6851 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6852 do something only if the second operand is a constant. */
6853 if (same_p
6854 && TYPE_OVERFLOW_WRAPS (ctype)
6855 && (t1 = extract_muldiv (op0, c, code, wide_type,
6856 strict_overflow_p)) != 0)
6857 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6858 fold_convert (ctype, op1));
6859 else if (tcode == MULT_EXPR && code == MULT_EXPR
6860 && TYPE_OVERFLOW_WRAPS (ctype)
6861 && (t1 = extract_muldiv (op1, c, code, wide_type,
6862 strict_overflow_p)) != 0)
6863 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6864 fold_convert (ctype, t1));
6865 else if (TREE_CODE (op1) != INTEGER_CST)
6866 return 0;
6868 /* If these are the same operation types, we can associate them
6869 assuming no overflow. */
6870 if (tcode == code)
6872 bool overflow_p = false;
6873 wi::overflow_type overflow_mul;
6874 signop sign = TYPE_SIGN (ctype);
6875 unsigned prec = TYPE_PRECISION (ctype);
6876 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6877 wi::to_wide (c, prec),
6878 sign, &overflow_mul);
6879 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6880 if (overflow_mul
6881 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6882 overflow_p = true;
6883 if (!overflow_p)
6884 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6885 wide_int_to_tree (ctype, mul));
6888 /* If these operations "cancel" each other, we have the main
6889 optimizations of this pass, which occur when either constant is a
6890 multiple of the other, in which case we replace this with either an
6891 operation or CODE or TCODE.
6893 If we have an unsigned type, we cannot do this since it will change
6894 the result if the original computation overflowed. */
6895 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6896 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6897 || (tcode == MULT_EXPR
6898 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6899 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6900 && code != MULT_EXPR)))
6902 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6903 TYPE_SIGN (type)))
6905 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6906 *strict_overflow_p = true;
6907 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6908 fold_convert (ctype,
6909 const_binop (TRUNC_DIV_EXPR,
6910 op1, c)));
6912 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6913 TYPE_SIGN (type)))
6915 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6916 *strict_overflow_p = true;
6917 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6918 fold_convert (ctype,
6919 const_binop (TRUNC_DIV_EXPR,
6920 c, op1)));
6923 break;
6925 default:
6926 break;
6929 return 0;
6932 /* Return a node which has the indicated constant VALUE (either 0 or
6933 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6934 and is of the indicated TYPE. */
6936 tree
6937 constant_boolean_node (bool value, tree type)
6939 if (type == integer_type_node)
6940 return value ? integer_one_node : integer_zero_node;
6941 else if (type == boolean_type_node)
6942 return value ? boolean_true_node : boolean_false_node;
6943 else if (TREE_CODE (type) == VECTOR_TYPE)
6944 return build_vector_from_val (type,
6945 build_int_cst (TREE_TYPE (type),
6946 value ? -1 : 0));
6947 else
6948 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6952 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6953 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6954 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6955 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6956 COND is the first argument to CODE; otherwise (as in the example
6957 given here), it is the second argument. TYPE is the type of the
6958 original expression. Return NULL_TREE if no simplification is
6959 possible. */
6961 static tree
6962 fold_binary_op_with_conditional_arg (location_t loc,
6963 enum tree_code code,
6964 tree type, tree op0, tree op1,
6965 tree cond, tree arg, int cond_first_p)
6967 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6968 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6969 tree test, true_value, false_value;
6970 tree lhs = NULL_TREE;
6971 tree rhs = NULL_TREE;
6972 enum tree_code cond_code = COND_EXPR;
6974 /* Do not move possibly trapping operations into the conditional as this
6975 pessimizes code and causes gimplification issues when applied late. */
6976 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
6977 ANY_INTEGRAL_TYPE_P (type)
6978 && TYPE_OVERFLOW_TRAPS (type), op1))
6979 return NULL_TREE;
6981 if (TREE_CODE (cond) == COND_EXPR
6982 || TREE_CODE (cond) == VEC_COND_EXPR)
6984 test = TREE_OPERAND (cond, 0);
6985 true_value = TREE_OPERAND (cond, 1);
6986 false_value = TREE_OPERAND (cond, 2);
6987 /* If this operand throws an expression, then it does not make
6988 sense to try to perform a logical or arithmetic operation
6989 involving it. */
6990 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6991 lhs = true_value;
6992 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6993 rhs = false_value;
6995 else if (!(TREE_CODE (type) != VECTOR_TYPE
6996 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6998 tree testtype = TREE_TYPE (cond);
6999 test = cond;
7000 true_value = constant_boolean_node (true, testtype);
7001 false_value = constant_boolean_node (false, testtype);
7003 else
7004 /* Detect the case of mixing vector and scalar types - bail out. */
7005 return NULL_TREE;
7007 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
7008 cond_code = VEC_COND_EXPR;
7010 /* This transformation is only worthwhile if we don't have to wrap ARG
7011 in a SAVE_EXPR and the operation can be simplified without recursing
7012 on at least one of the branches once its pushed inside the COND_EXPR. */
7013 if (!TREE_CONSTANT (arg)
7014 && (TREE_SIDE_EFFECTS (arg)
7015 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7016 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7017 return NULL_TREE;
7019 arg = fold_convert_loc (loc, arg_type, arg);
7020 if (lhs == 0)
7022 true_value = fold_convert_loc (loc, cond_type, true_value);
7023 if (cond_first_p)
7024 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7025 else
7026 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7028 if (rhs == 0)
7030 false_value = fold_convert_loc (loc, cond_type, false_value);
7031 if (cond_first_p)
7032 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7033 else
7034 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7037 /* Check that we have simplified at least one of the branches. */
7038 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7039 return NULL_TREE;
7041 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7045 /* Subroutine of fold() that checks for the addition of +/- 0.0.
7047 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
7048 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
7049 ADDEND is the same as X.
7051 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7052 and finite. The problematic cases are when X is zero, and its mode
7053 has signed zeros. In the case of rounding towards -infinity,
7054 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7055 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7057 bool
7058 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
7060 if (!real_zerop (addend))
7061 return false;
7063 /* Don't allow the fold with -fsignaling-nans. */
7064 if (HONOR_SNANS (type))
7065 return false;
7067 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7068 if (!HONOR_SIGNED_ZEROS (type))
7069 return true;
7071 /* There is no case that is safe for all rounding modes. */
7072 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7073 return false;
7075 /* In a vector or complex, we would need to check the sign of all zeros. */
7076 if (TREE_CODE (addend) == VECTOR_CST)
7077 addend = uniform_vector_p (addend);
7078 if (!addend || TREE_CODE (addend) != REAL_CST)
7079 return false;
7081 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7082 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
7083 negate = !negate;
7085 /* The mode has signed zeros, and we have to honor their sign.
7086 In this situation, there is only one case we can return true for.
7087 X - 0 is the same as X with default rounding. */
7088 return negate;
7091 /* Subroutine of match.pd that optimizes comparisons of a division by
7092 a nonzero integer constant against an integer constant, i.e.
7093 X/C1 op C2.
7095 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7096 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7098 enum tree_code
7099 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7100 tree *hi, bool *neg_overflow)
7102 tree prod, tmp, type = TREE_TYPE (c1);
7103 signop sign = TYPE_SIGN (type);
7104 wi::overflow_type overflow;
7106 /* We have to do this the hard way to detect unsigned overflow.
7107 prod = int_const_binop (MULT_EXPR, c1, c2); */
7108 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7109 prod = force_fit_type (type, val, -1, overflow);
7110 *neg_overflow = false;
7112 if (sign == UNSIGNED)
7114 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7115 *lo = prod;
7117 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7118 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7119 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7121 else if (tree_int_cst_sgn (c1) >= 0)
7123 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7124 switch (tree_int_cst_sgn (c2))
7126 case -1:
7127 *neg_overflow = true;
7128 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7129 *hi = prod;
7130 break;
7132 case 0:
7133 *lo = fold_negate_const (tmp, type);
7134 *hi = tmp;
7135 break;
7137 case 1:
7138 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7139 *lo = prod;
7140 break;
7142 default:
7143 gcc_unreachable ();
7146 else
7148 /* A negative divisor reverses the relational operators. */
7149 code = swap_tree_comparison (code);
7151 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7152 switch (tree_int_cst_sgn (c2))
7154 case -1:
7155 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7156 *lo = prod;
7157 break;
7159 case 0:
7160 *hi = fold_negate_const (tmp, type);
7161 *lo = tmp;
7162 break;
7164 case 1:
7165 *neg_overflow = true;
7166 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7167 *hi = prod;
7168 break;
7170 default:
7171 gcc_unreachable ();
7175 if (code != EQ_EXPR && code != NE_EXPR)
7176 return code;
7178 if (TREE_OVERFLOW (*lo)
7179 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7180 *lo = NULL_TREE;
7181 if (TREE_OVERFLOW (*hi)
7182 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7183 *hi = NULL_TREE;
7185 return code;
7189 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7190 equality/inequality test, then return a simplified form of the test
7191 using a sign testing. Otherwise return NULL. TYPE is the desired
7192 result type. */
7194 static tree
7195 fold_single_bit_test_into_sign_test (location_t loc,
7196 enum tree_code code, tree arg0, tree arg1,
7197 tree result_type)
7199 /* If this is testing a single bit, we can optimize the test. */
7200 if ((code == NE_EXPR || code == EQ_EXPR)
7201 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7202 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7204 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7205 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7206 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7208 if (arg00 != NULL_TREE
7209 /* This is only a win if casting to a signed type is cheap,
7210 i.e. when arg00's type is not a partial mode. */
7211 && type_has_mode_precision_p (TREE_TYPE (arg00)))
7213 tree stype = signed_type_for (TREE_TYPE (arg00));
7214 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7215 result_type,
7216 fold_convert_loc (loc, stype, arg00),
7217 build_int_cst (stype, 0));
7221 return NULL_TREE;
7224 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7225 equality/inequality test, then return a simplified form of
7226 the test using shifts and logical operations. Otherwise return
7227 NULL. TYPE is the desired result type. */
7229 tree
7230 fold_single_bit_test (location_t loc, enum tree_code code,
7231 tree arg0, tree arg1, tree result_type)
7233 /* If this is testing a single bit, we can optimize the test. */
7234 if ((code == NE_EXPR || code == EQ_EXPR)
7235 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7236 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7238 tree inner = TREE_OPERAND (arg0, 0);
7239 tree type = TREE_TYPE (arg0);
7240 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7241 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
7242 int ops_unsigned;
7243 tree signed_type, unsigned_type, intermediate_type;
7244 tree tem, one;
7246 /* First, see if we can fold the single bit test into a sign-bit
7247 test. */
7248 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7249 result_type);
7250 if (tem)
7251 return tem;
7253 /* Otherwise we have (A & C) != 0 where C is a single bit,
7254 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7255 Similarly for (A & C) == 0. */
7257 /* If INNER is a right shift of a constant and it plus BITNUM does
7258 not overflow, adjust BITNUM and INNER. */
7259 if (TREE_CODE (inner) == RSHIFT_EXPR
7260 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7261 && bitnum < TYPE_PRECISION (type)
7262 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
7263 TYPE_PRECISION (type) - bitnum))
7265 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
7266 inner = TREE_OPERAND (inner, 0);
7269 /* If we are going to be able to omit the AND below, we must do our
7270 operations as unsigned. If we must use the AND, we have a choice.
7271 Normally unsigned is faster, but for some machines signed is. */
7272 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
7273 && !flag_syntax_only) ? 0 : 1;
7275 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7276 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7277 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7278 inner = fold_convert_loc (loc, intermediate_type, inner);
7280 if (bitnum != 0)
7281 inner = build2 (RSHIFT_EXPR, intermediate_type,
7282 inner, size_int (bitnum));
7284 one = build_int_cst (intermediate_type, 1);
7286 if (code == EQ_EXPR)
7287 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7289 /* Put the AND last so it can combine with more things. */
7290 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7292 /* Make sure to return the proper type. */
7293 inner = fold_convert_loc (loc, result_type, inner);
7295 return inner;
7297 return NULL_TREE;
7300 /* Test whether it is preferable two swap two operands, ARG0 and
7301 ARG1, for example because ARG0 is an integer constant and ARG1
7302 isn't. */
7304 bool
7305 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7307 if (CONSTANT_CLASS_P (arg1))
7308 return 0;
7309 if (CONSTANT_CLASS_P (arg0))
7310 return 1;
7312 STRIP_NOPS (arg0);
7313 STRIP_NOPS (arg1);
7315 if (TREE_CONSTANT (arg1))
7316 return 0;
7317 if (TREE_CONSTANT (arg0))
7318 return 1;
7320 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7321 for commutative and comparison operators. Ensuring a canonical
7322 form allows the optimizers to find additional redundancies without
7323 having to explicitly check for both orderings. */
7324 if (TREE_CODE (arg0) == SSA_NAME
7325 && TREE_CODE (arg1) == SSA_NAME
7326 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7327 return 1;
7329 /* Put SSA_NAMEs last. */
7330 if (TREE_CODE (arg1) == SSA_NAME)
7331 return 0;
7332 if (TREE_CODE (arg0) == SSA_NAME)
7333 return 1;
7335 /* Put variables last. */
7336 if (DECL_P (arg1))
7337 return 0;
7338 if (DECL_P (arg0))
7339 return 1;
7341 return 0;
7345 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7346 means A >= Y && A != MAX, but in this case we know that
7347 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7349 static tree
7350 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7352 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7354 if (TREE_CODE (bound) == LT_EXPR)
7355 a = TREE_OPERAND (bound, 0);
7356 else if (TREE_CODE (bound) == GT_EXPR)
7357 a = TREE_OPERAND (bound, 1);
7358 else
7359 return NULL_TREE;
7361 typea = TREE_TYPE (a);
7362 if (!INTEGRAL_TYPE_P (typea)
7363 && !POINTER_TYPE_P (typea))
7364 return NULL_TREE;
7366 if (TREE_CODE (ineq) == LT_EXPR)
7368 a1 = TREE_OPERAND (ineq, 1);
7369 y = TREE_OPERAND (ineq, 0);
7371 else if (TREE_CODE (ineq) == GT_EXPR)
7373 a1 = TREE_OPERAND (ineq, 0);
7374 y = TREE_OPERAND (ineq, 1);
7376 else
7377 return NULL_TREE;
7379 if (TREE_TYPE (a1) != typea)
7380 return NULL_TREE;
7382 if (POINTER_TYPE_P (typea))
7384 /* Convert the pointer types into integer before taking the difference. */
7385 tree ta = fold_convert_loc (loc, ssizetype, a);
7386 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7387 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7389 else
7390 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7392 if (!diff || !integer_onep (diff))
7393 return NULL_TREE;
7395 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7398 /* Fold a sum or difference of at least one multiplication.
7399 Returns the folded tree or NULL if no simplification could be made. */
7401 static tree
7402 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7403 tree arg0, tree arg1)
7405 tree arg00, arg01, arg10, arg11;
7406 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7408 /* (A * C) +- (B * C) -> (A+-B) * C.
7409 (A * C) +- A -> A * (C+-1).
7410 We are most concerned about the case where C is a constant,
7411 but other combinations show up during loop reduction. Since
7412 it is not difficult, try all four possibilities. */
7414 if (TREE_CODE (arg0) == MULT_EXPR)
7416 arg00 = TREE_OPERAND (arg0, 0);
7417 arg01 = TREE_OPERAND (arg0, 1);
7419 else if (TREE_CODE (arg0) == INTEGER_CST)
7421 arg00 = build_one_cst (type);
7422 arg01 = arg0;
7424 else
7426 /* We cannot generate constant 1 for fract. */
7427 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7428 return NULL_TREE;
7429 arg00 = arg0;
7430 arg01 = build_one_cst (type);
7432 if (TREE_CODE (arg1) == MULT_EXPR)
7434 arg10 = TREE_OPERAND (arg1, 0);
7435 arg11 = TREE_OPERAND (arg1, 1);
7437 else if (TREE_CODE (arg1) == INTEGER_CST)
7439 arg10 = build_one_cst (type);
7440 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7441 the purpose of this canonicalization. */
7442 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7443 && negate_expr_p (arg1)
7444 && code == PLUS_EXPR)
7446 arg11 = negate_expr (arg1);
7447 code = MINUS_EXPR;
7449 else
7450 arg11 = arg1;
7452 else
7454 /* We cannot generate constant 1 for fract. */
7455 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7456 return NULL_TREE;
7457 arg10 = arg1;
7458 arg11 = build_one_cst (type);
7460 same = NULL_TREE;
7462 /* Prefer factoring a common non-constant. */
7463 if (operand_equal_p (arg00, arg10, 0))
7464 same = arg00, alt0 = arg01, alt1 = arg11;
7465 else if (operand_equal_p (arg01, arg11, 0))
7466 same = arg01, alt0 = arg00, alt1 = arg10;
7467 else if (operand_equal_p (arg00, arg11, 0))
7468 same = arg00, alt0 = arg01, alt1 = arg10;
7469 else if (operand_equal_p (arg01, arg10, 0))
7470 same = arg01, alt0 = arg00, alt1 = arg11;
7472 /* No identical multiplicands; see if we can find a common
7473 power-of-two factor in non-power-of-two multiplies. This
7474 can help in multi-dimensional array access. */
7475 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7477 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7478 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7479 HOST_WIDE_INT tmp;
7480 bool swap = false;
7481 tree maybe_same;
7483 /* Move min of absolute values to int11. */
7484 if (absu_hwi (int01) < absu_hwi (int11))
7486 tmp = int01, int01 = int11, int11 = tmp;
7487 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7488 maybe_same = arg01;
7489 swap = true;
7491 else
7492 maybe_same = arg11;
7494 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7495 if (factor > 1
7496 && pow2p_hwi (factor)
7497 && (int01 & (factor - 1)) == 0
7498 /* The remainder should not be a constant, otherwise we
7499 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7500 increased the number of multiplications necessary. */
7501 && TREE_CODE (arg10) != INTEGER_CST)
7503 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7504 build_int_cst (TREE_TYPE (arg00),
7505 int01 / int11));
7506 alt1 = arg10;
7507 same = maybe_same;
7508 if (swap)
7509 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7513 if (!same)
7514 return NULL_TREE;
7516 if (! ANY_INTEGRAL_TYPE_P (type)
7517 || TYPE_OVERFLOW_WRAPS (type)
7518 /* We are neither factoring zero nor minus one. */
7519 || TREE_CODE (same) == INTEGER_CST)
7520 return fold_build2_loc (loc, MULT_EXPR, type,
7521 fold_build2_loc (loc, code, type,
7522 fold_convert_loc (loc, type, alt0),
7523 fold_convert_loc (loc, type, alt1)),
7524 fold_convert_loc (loc, type, same));
7526 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7527 same may be minus one and thus the multiplication may overflow. Perform
7528 the sum operation in an unsigned type. */
7529 tree utype = unsigned_type_for (type);
7530 tree tem = fold_build2_loc (loc, code, utype,
7531 fold_convert_loc (loc, utype, alt0),
7532 fold_convert_loc (loc, utype, alt1));
7533 /* If the sum evaluated to a constant that is not -INF the multiplication
7534 cannot overflow. */
7535 if (TREE_CODE (tem) == INTEGER_CST
7536 && (wi::to_wide (tem)
7537 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7538 return fold_build2_loc (loc, MULT_EXPR, type,
7539 fold_convert (type, tem), same);
7541 /* Do not resort to unsigned multiplication because
7542 we lose the no-overflow property of the expression. */
7543 return NULL_TREE;
7546 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7547 specified by EXPR into the buffer PTR of length LEN bytes.
7548 Return the number of bytes placed in the buffer, or zero
7549 upon failure. */
7551 static int
7552 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7554 tree type = TREE_TYPE (expr);
7555 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7556 int byte, offset, word, words;
7557 unsigned char value;
7559 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7560 return 0;
7561 if (off == -1)
7562 off = 0;
7564 if (ptr == NULL)
7565 /* Dry run. */
7566 return MIN (len, total_bytes - off);
7568 words = total_bytes / UNITS_PER_WORD;
7570 for (byte = 0; byte < total_bytes; byte++)
7572 int bitpos = byte * BITS_PER_UNIT;
7573 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7574 number of bytes. */
7575 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7577 if (total_bytes > UNITS_PER_WORD)
7579 word = byte / UNITS_PER_WORD;
7580 if (WORDS_BIG_ENDIAN)
7581 word = (words - 1) - word;
7582 offset = word * UNITS_PER_WORD;
7583 if (BYTES_BIG_ENDIAN)
7584 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7585 else
7586 offset += byte % UNITS_PER_WORD;
7588 else
7589 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7590 if (offset >= off && offset - off < len)
7591 ptr[offset - off] = value;
7593 return MIN (len, total_bytes - off);
7597 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7598 specified by EXPR into the buffer PTR of length LEN bytes.
7599 Return the number of bytes placed in the buffer, or zero
7600 upon failure. */
7602 static int
7603 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7605 tree type = TREE_TYPE (expr);
7606 scalar_mode mode = SCALAR_TYPE_MODE (type);
7607 int total_bytes = GET_MODE_SIZE (mode);
7608 FIXED_VALUE_TYPE value;
7609 tree i_value, i_type;
7611 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7612 return 0;
7614 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7616 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7617 return 0;
7619 value = TREE_FIXED_CST (expr);
7620 i_value = double_int_to_tree (i_type, value.data);
7622 return native_encode_int (i_value, ptr, len, off);
7626 /* Subroutine of native_encode_expr. Encode the REAL_CST
7627 specified by EXPR into the buffer PTR of length LEN bytes.
7628 Return the number of bytes placed in the buffer, or zero
7629 upon failure. */
7631 static int
7632 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7634 tree type = TREE_TYPE (expr);
7635 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7636 int byte, offset, word, words, bitpos;
7637 unsigned char value;
7639 /* There are always 32 bits in each long, no matter the size of
7640 the hosts long. We handle floating point representations with
7641 up to 192 bits. */
7642 long tmp[6];
7644 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7645 return 0;
7646 if (off == -1)
7647 off = 0;
7649 if (ptr == NULL)
7650 /* Dry run. */
7651 return MIN (len, total_bytes - off);
7653 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7655 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7657 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7658 bitpos += BITS_PER_UNIT)
7660 byte = (bitpos / BITS_PER_UNIT) & 3;
7661 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7663 if (UNITS_PER_WORD < 4)
7665 word = byte / UNITS_PER_WORD;
7666 if (WORDS_BIG_ENDIAN)
7667 word = (words - 1) - word;
7668 offset = word * UNITS_PER_WORD;
7669 if (BYTES_BIG_ENDIAN)
7670 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7671 else
7672 offset += byte % UNITS_PER_WORD;
7674 else
7676 offset = byte;
7677 if (BYTES_BIG_ENDIAN)
7679 /* Reverse bytes within each long, or within the entire float
7680 if it's smaller than a long (for HFmode). */
7681 offset = MIN (3, total_bytes - 1) - offset;
7682 gcc_assert (offset >= 0);
7685 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7686 if (offset >= off
7687 && offset - off < len)
7688 ptr[offset - off] = value;
7690 return MIN (len, total_bytes - off);
7693 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7694 specified by EXPR into the buffer PTR of length LEN bytes.
7695 Return the number of bytes placed in the buffer, or zero
7696 upon failure. */
7698 static int
7699 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7701 int rsize, isize;
7702 tree part;
7704 part = TREE_REALPART (expr);
7705 rsize = native_encode_expr (part, ptr, len, off);
7706 if (off == -1 && rsize == 0)
7707 return 0;
7708 part = TREE_IMAGPART (expr);
7709 if (off != -1)
7710 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7711 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7712 len - rsize, off);
7713 if (off == -1 && isize != rsize)
7714 return 0;
7715 return rsize + isize;
7719 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7720 specified by EXPR into the buffer PTR of length LEN bytes.
7721 Return the number of bytes placed in the buffer, or zero
7722 upon failure. */
7724 static int
7725 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7727 unsigned HOST_WIDE_INT i, count;
7728 int size, offset;
7729 tree itype, elem;
7731 offset = 0;
7732 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7733 return 0;
7734 itype = TREE_TYPE (TREE_TYPE (expr));
7735 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7736 for (i = 0; i < count; i++)
7738 if (off >= size)
7740 off -= size;
7741 continue;
7743 elem = VECTOR_CST_ELT (expr, i);
7744 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7745 len - offset, off);
7746 if ((off == -1 && res != size) || res == 0)
7747 return 0;
7748 offset += res;
7749 if (offset >= len)
7750 return (off == -1 && i < count - 1) ? 0 : offset;
7751 if (off != -1)
7752 off = 0;
7754 return offset;
7758 /* Subroutine of native_encode_expr. Encode the STRING_CST
7759 specified by EXPR into the buffer PTR of length LEN bytes.
7760 Return the number of bytes placed in the buffer, or zero
7761 upon failure. */
7763 static int
7764 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7766 tree type = TREE_TYPE (expr);
7768 /* Wide-char strings are encoded in target byte-order so native
7769 encoding them is trivial. */
7770 if (BITS_PER_UNIT != CHAR_BIT
7771 || TREE_CODE (type) != ARRAY_TYPE
7772 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7773 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7774 return 0;
7776 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7777 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7778 return 0;
7779 if (off == -1)
7780 off = 0;
7781 if (ptr == NULL)
7782 /* Dry run. */;
7783 else if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7785 int written = 0;
7786 if (off < TREE_STRING_LENGTH (expr))
7788 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7789 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7791 memset (ptr + written, 0,
7792 MIN (total_bytes - written, len - written));
7794 else
7795 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7796 return MIN (total_bytes - off, len);
7800 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7801 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7802 buffer PTR of length LEN bytes. If PTR is NULL, don't actually store
7803 anything, just do a dry run. If OFF is not -1 then start
7804 the encoding at byte offset OFF and encode at most LEN bytes.
7805 Return the number of bytes placed in the buffer, or zero upon failure. */
7808 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7810 /* We don't support starting at negative offset and -1 is special. */
7811 if (off < -1)
7812 return 0;
7814 switch (TREE_CODE (expr))
7816 case INTEGER_CST:
7817 return native_encode_int (expr, ptr, len, off);
7819 case REAL_CST:
7820 return native_encode_real (expr, ptr, len, off);
7822 case FIXED_CST:
7823 return native_encode_fixed (expr, ptr, len, off);
7825 case COMPLEX_CST:
7826 return native_encode_complex (expr, ptr, len, off);
7828 case VECTOR_CST:
7829 return native_encode_vector (expr, ptr, len, off);
7831 case STRING_CST:
7832 return native_encode_string (expr, ptr, len, off);
7834 default:
7835 return 0;
7840 /* Subroutine of native_interpret_expr. Interpret the contents of
7841 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7842 If the buffer cannot be interpreted, return NULL_TREE. */
7844 static tree
7845 native_interpret_int (tree type, const unsigned char *ptr, int len)
7847 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7849 if (total_bytes > len
7850 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7851 return NULL_TREE;
7853 wide_int result = wi::from_buffer (ptr, total_bytes);
7855 return wide_int_to_tree (type, result);
7859 /* Subroutine of native_interpret_expr. Interpret the contents of
7860 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7861 If the buffer cannot be interpreted, return NULL_TREE. */
7863 static tree
7864 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7866 scalar_mode mode = SCALAR_TYPE_MODE (type);
7867 int total_bytes = GET_MODE_SIZE (mode);
7868 double_int result;
7869 FIXED_VALUE_TYPE fixed_value;
7871 if (total_bytes > len
7872 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7873 return NULL_TREE;
7875 result = double_int::from_buffer (ptr, total_bytes);
7876 fixed_value = fixed_from_double_int (result, mode);
7878 return build_fixed (type, fixed_value);
7882 /* Subroutine of native_interpret_expr. Interpret the contents of
7883 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7884 If the buffer cannot be interpreted, return NULL_TREE. */
7886 static tree
7887 native_interpret_real (tree type, const unsigned char *ptr, int len)
7889 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
7890 int total_bytes = GET_MODE_SIZE (mode);
7891 unsigned char value;
7892 /* There are always 32 bits in each long, no matter the size of
7893 the hosts long. We handle floating point representations with
7894 up to 192 bits. */
7895 REAL_VALUE_TYPE r;
7896 long tmp[6];
7898 if (total_bytes > len || total_bytes > 24)
7899 return NULL_TREE;
7900 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7902 memset (tmp, 0, sizeof (tmp));
7903 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7904 bitpos += BITS_PER_UNIT)
7906 /* Both OFFSET and BYTE index within a long;
7907 bitpos indexes the whole float. */
7908 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7909 if (UNITS_PER_WORD < 4)
7911 int word = byte / UNITS_PER_WORD;
7912 if (WORDS_BIG_ENDIAN)
7913 word = (words - 1) - word;
7914 offset = word * UNITS_PER_WORD;
7915 if (BYTES_BIG_ENDIAN)
7916 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7917 else
7918 offset += byte % UNITS_PER_WORD;
7920 else
7922 offset = byte;
7923 if (BYTES_BIG_ENDIAN)
7925 /* Reverse bytes within each long, or within the entire float
7926 if it's smaller than a long (for HFmode). */
7927 offset = MIN (3, total_bytes - 1) - offset;
7928 gcc_assert (offset >= 0);
7931 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7933 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7936 real_from_target (&r, tmp, mode);
7937 return build_real (type, r);
7941 /* Subroutine of native_interpret_expr. Interpret the contents of
7942 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7943 If the buffer cannot be interpreted, return NULL_TREE. */
7945 static tree
7946 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7948 tree etype, rpart, ipart;
7949 int size;
7951 etype = TREE_TYPE (type);
7952 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7953 if (size * 2 > len)
7954 return NULL_TREE;
7955 rpart = native_interpret_expr (etype, ptr, size);
7956 if (!rpart)
7957 return NULL_TREE;
7958 ipart = native_interpret_expr (etype, ptr+size, size);
7959 if (!ipart)
7960 return NULL_TREE;
7961 return build_complex (type, rpart, ipart);
7965 /* Subroutine of native_interpret_expr. Interpret the contents of
7966 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7967 If the buffer cannot be interpreted, return NULL_TREE. */
7969 static tree
7970 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
7972 tree etype, elem;
7973 unsigned int i, size;
7974 unsigned HOST_WIDE_INT count;
7976 etype = TREE_TYPE (type);
7977 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7978 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
7979 || size * count > len)
7980 return NULL_TREE;
7982 tree_vector_builder elements (type, count, 1);
7983 for (i = 0; i < count; ++i)
7985 elem = native_interpret_expr (etype, ptr+(i*size), size);
7986 if (!elem)
7987 return NULL_TREE;
7988 elements.quick_push (elem);
7990 return elements.build ();
7994 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7995 the buffer PTR of length LEN as a constant of type TYPE. For
7996 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7997 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7998 return NULL_TREE. */
8000 tree
8001 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8003 switch (TREE_CODE (type))
8005 case INTEGER_TYPE:
8006 case ENUMERAL_TYPE:
8007 case BOOLEAN_TYPE:
8008 case POINTER_TYPE:
8009 case REFERENCE_TYPE:
8010 return native_interpret_int (type, ptr, len);
8012 case REAL_TYPE:
8013 return native_interpret_real (type, ptr, len);
8015 case FIXED_POINT_TYPE:
8016 return native_interpret_fixed (type, ptr, len);
8018 case COMPLEX_TYPE:
8019 return native_interpret_complex (type, ptr, len);
8021 case VECTOR_TYPE:
8022 return native_interpret_vector (type, ptr, len);
8024 default:
8025 return NULL_TREE;
8029 /* Returns true if we can interpret the contents of a native encoding
8030 as TYPE. */
8032 static bool
8033 can_native_interpret_type_p (tree type)
8035 switch (TREE_CODE (type))
8037 case INTEGER_TYPE:
8038 case ENUMERAL_TYPE:
8039 case BOOLEAN_TYPE:
8040 case POINTER_TYPE:
8041 case REFERENCE_TYPE:
8042 case FIXED_POINT_TYPE:
8043 case REAL_TYPE:
8044 case COMPLEX_TYPE:
8045 case VECTOR_TYPE:
8046 return true;
8047 default:
8048 return false;
8053 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8054 TYPE at compile-time. If we're unable to perform the conversion
8055 return NULL_TREE. */
8057 static tree
8058 fold_view_convert_expr (tree type, tree expr)
8060 /* We support up to 512-bit values (for V8DFmode). */
8061 unsigned char buffer[64];
8062 int len;
8064 /* Check that the host and target are sane. */
8065 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
8066 return NULL_TREE;
8068 len = native_encode_expr (expr, buffer, sizeof (buffer));
8069 if (len == 0)
8070 return NULL_TREE;
8072 return native_interpret_expr (type, buffer, len);
8075 /* Build an expression for the address of T. Folds away INDIRECT_REF
8076 to avoid confusing the gimplify process. */
8078 tree
8079 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
8081 /* The size of the object is not relevant when talking about its address. */
8082 if (TREE_CODE (t) == WITH_SIZE_EXPR)
8083 t = TREE_OPERAND (t, 0);
8085 if (TREE_CODE (t) == INDIRECT_REF)
8087 t = TREE_OPERAND (t, 0);
8089 if (TREE_TYPE (t) != ptrtype)
8090 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
8092 else if (TREE_CODE (t) == MEM_REF
8093 && integer_zerop (TREE_OPERAND (t, 1)))
8094 return TREE_OPERAND (t, 0);
8095 else if (TREE_CODE (t) == MEM_REF
8096 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
8097 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
8098 TREE_OPERAND (t, 0),
8099 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
8100 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8102 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
8104 if (TREE_TYPE (t) != ptrtype)
8105 t = fold_convert_loc (loc, ptrtype, t);
8107 else
8108 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
8110 return t;
8113 /* Build an expression for the address of T. */
8115 tree
8116 build_fold_addr_expr_loc (location_t loc, tree t)
8118 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8120 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
8123 /* Fold a unary expression of code CODE and type TYPE with operand
8124 OP0. Return the folded expression if folding is successful.
8125 Otherwise, return NULL_TREE. */
8127 tree
8128 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8130 tree tem;
8131 tree arg0;
8132 enum tree_code_class kind = TREE_CODE_CLASS (code);
8134 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8135 && TREE_CODE_LENGTH (code) == 1);
8137 arg0 = op0;
8138 if (arg0)
8140 if (CONVERT_EXPR_CODE_P (code)
8141 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
8143 /* Don't use STRIP_NOPS, because signedness of argument type
8144 matters. */
8145 STRIP_SIGN_NOPS (arg0);
8147 else
8149 /* Strip any conversions that don't change the mode. This
8150 is safe for every expression, except for a comparison
8151 expression because its signedness is derived from its
8152 operands.
8154 Note that this is done as an internal manipulation within
8155 the constant folder, in order to find the simplest
8156 representation of the arguments so that their form can be
8157 studied. In any cases, the appropriate type conversions
8158 should be put back in the tree that will get out of the
8159 constant folder. */
8160 STRIP_NOPS (arg0);
8163 if (CONSTANT_CLASS_P (arg0))
8165 tree tem = const_unop (code, type, arg0);
8166 if (tem)
8168 if (TREE_TYPE (tem) != type)
8169 tem = fold_convert_loc (loc, type, tem);
8170 return tem;
8175 tem = generic_simplify (loc, code, type, op0);
8176 if (tem)
8177 return tem;
8179 if (TREE_CODE_CLASS (code) == tcc_unary)
8181 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8182 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8183 fold_build1_loc (loc, code, type,
8184 fold_convert_loc (loc, TREE_TYPE (op0),
8185 TREE_OPERAND (arg0, 1))));
8186 else if (TREE_CODE (arg0) == COND_EXPR)
8188 tree arg01 = TREE_OPERAND (arg0, 1);
8189 tree arg02 = TREE_OPERAND (arg0, 2);
8190 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8191 arg01 = fold_build1_loc (loc, code, type,
8192 fold_convert_loc (loc,
8193 TREE_TYPE (op0), arg01));
8194 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8195 arg02 = fold_build1_loc (loc, code, type,
8196 fold_convert_loc (loc,
8197 TREE_TYPE (op0), arg02));
8198 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
8199 arg01, arg02);
8201 /* If this was a conversion, and all we did was to move into
8202 inside the COND_EXPR, bring it back out. But leave it if
8203 it is a conversion from integer to integer and the
8204 result precision is no wider than a word since such a
8205 conversion is cheap and may be optimized away by combine,
8206 while it couldn't if it were outside the COND_EXPR. Then return
8207 so we don't get into an infinite recursion loop taking the
8208 conversion out and then back in. */
8210 if ((CONVERT_EXPR_CODE_P (code)
8211 || code == NON_LVALUE_EXPR)
8212 && TREE_CODE (tem) == COND_EXPR
8213 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8214 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8215 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8216 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8217 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8218 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8219 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8220 && (INTEGRAL_TYPE_P
8221 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8222 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8223 || flag_syntax_only))
8224 tem = build1_loc (loc, code, type,
8225 build3 (COND_EXPR,
8226 TREE_TYPE (TREE_OPERAND
8227 (TREE_OPERAND (tem, 1), 0)),
8228 TREE_OPERAND (tem, 0),
8229 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8230 TREE_OPERAND (TREE_OPERAND (tem, 2),
8231 0)));
8232 return tem;
8236 switch (code)
8238 case NON_LVALUE_EXPR:
8239 if (!maybe_lvalue_p (op0))
8240 return fold_convert_loc (loc, type, op0);
8241 return NULL_TREE;
8243 CASE_CONVERT:
8244 case FLOAT_EXPR:
8245 case FIX_TRUNC_EXPR:
8246 if (COMPARISON_CLASS_P (op0))
8248 /* If we have (type) (a CMP b) and type is an integral type, return
8249 new expression involving the new type. Canonicalize
8250 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
8251 non-integral type.
8252 Do not fold the result as that would not simplify further, also
8253 folding again results in recursions. */
8254 if (TREE_CODE (type) == BOOLEAN_TYPE)
8255 return build2_loc (loc, TREE_CODE (op0), type,
8256 TREE_OPERAND (op0, 0),
8257 TREE_OPERAND (op0, 1));
8258 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
8259 && TREE_CODE (type) != VECTOR_TYPE)
8260 return build3_loc (loc, COND_EXPR, type, op0,
8261 constant_boolean_node (true, type),
8262 constant_boolean_node (false, type));
8265 /* Handle (T *)&A.B.C for A being of type T and B and C
8266 living at offset zero. This occurs frequently in
8267 C++ upcasting and then accessing the base. */
8268 if (TREE_CODE (op0) == ADDR_EXPR
8269 && POINTER_TYPE_P (type)
8270 && handled_component_p (TREE_OPERAND (op0, 0)))
8272 poly_int64 bitsize, bitpos;
8273 tree offset;
8274 machine_mode mode;
8275 int unsignedp, reversep, volatilep;
8276 tree base
8277 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
8278 &offset, &mode, &unsignedp, &reversep,
8279 &volatilep);
8280 /* If the reference was to a (constant) zero offset, we can use
8281 the address of the base if it has the same base type
8282 as the result type and the pointer type is unqualified. */
8283 if (!offset
8284 && known_eq (bitpos, 0)
8285 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8286 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8287 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8288 return fold_convert_loc (loc, type,
8289 build_fold_addr_expr_loc (loc, base));
8292 if (TREE_CODE (op0) == MODIFY_EXPR
8293 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8294 /* Detect assigning a bitfield. */
8295 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8296 && DECL_BIT_FIELD
8297 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8299 /* Don't leave an assignment inside a conversion
8300 unless assigning a bitfield. */
8301 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8302 /* First do the assignment, then return converted constant. */
8303 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8304 TREE_NO_WARNING (tem) = 1;
8305 TREE_USED (tem) = 1;
8306 return tem;
8309 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8310 constants (if x has signed type, the sign bit cannot be set
8311 in c). This folds extension into the BIT_AND_EXPR.
8312 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8313 very likely don't have maximal range for their precision and this
8314 transformation effectively doesn't preserve non-maximal ranges. */
8315 if (TREE_CODE (type) == INTEGER_TYPE
8316 && TREE_CODE (op0) == BIT_AND_EXPR
8317 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8319 tree and_expr = op0;
8320 tree and0 = TREE_OPERAND (and_expr, 0);
8321 tree and1 = TREE_OPERAND (and_expr, 1);
8322 int change = 0;
8324 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8325 || (TYPE_PRECISION (type)
8326 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8327 change = 1;
8328 else if (TYPE_PRECISION (TREE_TYPE (and1))
8329 <= HOST_BITS_PER_WIDE_INT
8330 && tree_fits_uhwi_p (and1))
8332 unsigned HOST_WIDE_INT cst;
8334 cst = tree_to_uhwi (and1);
8335 cst &= HOST_WIDE_INT_M1U
8336 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8337 change = (cst == 0);
8338 if (change
8339 && !flag_syntax_only
8340 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
8341 == ZERO_EXTEND))
8343 tree uns = unsigned_type_for (TREE_TYPE (and0));
8344 and0 = fold_convert_loc (loc, uns, and0);
8345 and1 = fold_convert_loc (loc, uns, and1);
8348 if (change)
8350 tem = force_fit_type (type, wi::to_widest (and1), 0,
8351 TREE_OVERFLOW (and1));
8352 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8353 fold_convert_loc (loc, type, and0), tem);
8357 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
8358 cast (T1)X will fold away. We assume that this happens when X itself
8359 is a cast. */
8360 if (POINTER_TYPE_P (type)
8361 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8362 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
8364 tree arg00 = TREE_OPERAND (arg0, 0);
8365 tree arg01 = TREE_OPERAND (arg0, 1);
8367 return fold_build_pointer_plus_loc
8368 (loc, fold_convert_loc (loc, type, arg00), arg01);
8371 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8372 of the same precision, and X is an integer type not narrower than
8373 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8374 if (INTEGRAL_TYPE_P (type)
8375 && TREE_CODE (op0) == BIT_NOT_EXPR
8376 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8377 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8378 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8380 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8381 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8382 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8383 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8384 fold_convert_loc (loc, type, tem));
8387 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8388 type of X and Y (integer types only). */
8389 if (INTEGRAL_TYPE_P (type)
8390 && TREE_CODE (op0) == MULT_EXPR
8391 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8392 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8394 /* Be careful not to introduce new overflows. */
8395 tree mult_type;
8396 if (TYPE_OVERFLOW_WRAPS (type))
8397 mult_type = type;
8398 else
8399 mult_type = unsigned_type_for (type);
8401 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8403 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8404 fold_convert_loc (loc, mult_type,
8405 TREE_OPERAND (op0, 0)),
8406 fold_convert_loc (loc, mult_type,
8407 TREE_OPERAND (op0, 1)));
8408 return fold_convert_loc (loc, type, tem);
8412 return NULL_TREE;
8414 case VIEW_CONVERT_EXPR:
8415 if (TREE_CODE (op0) == MEM_REF)
8417 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
8418 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
8419 tem = fold_build2_loc (loc, MEM_REF, type,
8420 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8421 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
8422 return tem;
8425 return NULL_TREE;
8427 case NEGATE_EXPR:
8428 tem = fold_negate_expr (loc, arg0);
8429 if (tem)
8430 return fold_convert_loc (loc, type, tem);
8431 return NULL_TREE;
8433 case ABS_EXPR:
8434 /* Convert fabs((double)float) into (double)fabsf(float). */
8435 if (TREE_CODE (arg0) == NOP_EXPR
8436 && TREE_CODE (type) == REAL_TYPE)
8438 tree targ0 = strip_float_extensions (arg0);
8439 if (targ0 != arg0)
8440 return fold_convert_loc (loc, type,
8441 fold_build1_loc (loc, ABS_EXPR,
8442 TREE_TYPE (targ0),
8443 targ0));
8445 return NULL_TREE;
8447 case BIT_NOT_EXPR:
8448 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8449 if (TREE_CODE (arg0) == BIT_XOR_EXPR
8450 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8451 fold_convert_loc (loc, type,
8452 TREE_OPERAND (arg0, 0)))))
8453 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8454 fold_convert_loc (loc, type,
8455 TREE_OPERAND (arg0, 1)));
8456 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8457 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8458 fold_convert_loc (loc, type,
8459 TREE_OPERAND (arg0, 1)))))
8460 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8461 fold_convert_loc (loc, type,
8462 TREE_OPERAND (arg0, 0)), tem);
8464 return NULL_TREE;
8466 case TRUTH_NOT_EXPR:
8467 /* Note that the operand of this must be an int
8468 and its values must be 0 or 1.
8469 ("true" is a fixed value perhaps depending on the language,
8470 but we don't handle values other than 1 correctly yet.) */
8471 tem = fold_truth_not_expr (loc, arg0);
8472 if (!tem)
8473 return NULL_TREE;
8474 return fold_convert_loc (loc, type, tem);
8476 case INDIRECT_REF:
8477 /* Fold *&X to X if X is an lvalue. */
8478 if (TREE_CODE (op0) == ADDR_EXPR)
8480 tree op00 = TREE_OPERAND (op0, 0);
8481 if ((VAR_P (op00)
8482 || TREE_CODE (op00) == PARM_DECL
8483 || TREE_CODE (op00) == RESULT_DECL)
8484 && !TREE_READONLY (op00))
8485 return op00;
8487 return NULL_TREE;
8489 default:
8490 return NULL_TREE;
8491 } /* switch (code) */
8495 /* If the operation was a conversion do _not_ mark a resulting constant
8496 with TREE_OVERFLOW if the original constant was not. These conversions
8497 have implementation defined behavior and retaining the TREE_OVERFLOW
8498 flag here would confuse later passes such as VRP. */
8499 tree
8500 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8501 tree type, tree op0)
8503 tree res = fold_unary_loc (loc, code, type, op0);
8504 if (res
8505 && TREE_CODE (res) == INTEGER_CST
8506 && TREE_CODE (op0) == INTEGER_CST
8507 && CONVERT_EXPR_CODE_P (code))
8508 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8510 return res;
8513 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8514 operands OP0 and OP1. LOC is the location of the resulting expression.
8515 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8516 Return the folded expression if folding is successful. Otherwise,
8517 return NULL_TREE. */
8518 static tree
8519 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8520 tree arg0, tree arg1, tree op0, tree op1)
8522 tree tem;
8524 /* We only do these simplifications if we are optimizing. */
8525 if (!optimize)
8526 return NULL_TREE;
8528 /* Check for things like (A || B) && (A || C). We can convert this
8529 to A || (B && C). Note that either operator can be any of the four
8530 truth and/or operations and the transformation will still be
8531 valid. Also note that we only care about order for the
8532 ANDIF and ORIF operators. If B contains side effects, this
8533 might change the truth-value of A. */
8534 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8535 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8536 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8537 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8538 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8539 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8541 tree a00 = TREE_OPERAND (arg0, 0);
8542 tree a01 = TREE_OPERAND (arg0, 1);
8543 tree a10 = TREE_OPERAND (arg1, 0);
8544 tree a11 = TREE_OPERAND (arg1, 1);
8545 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8546 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8547 && (code == TRUTH_AND_EXPR
8548 || code == TRUTH_OR_EXPR));
8550 if (operand_equal_p (a00, a10, 0))
8551 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8552 fold_build2_loc (loc, code, type, a01, a11));
8553 else if (commutative && operand_equal_p (a00, a11, 0))
8554 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8555 fold_build2_loc (loc, code, type, a01, a10));
8556 else if (commutative && operand_equal_p (a01, a10, 0))
8557 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8558 fold_build2_loc (loc, code, type, a00, a11));
8560 /* This case if tricky because we must either have commutative
8561 operators or else A10 must not have side-effects. */
8563 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8564 && operand_equal_p (a01, a11, 0))
8565 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8566 fold_build2_loc (loc, code, type, a00, a10),
8567 a01);
8570 /* See if we can build a range comparison. */
8571 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
8572 return tem;
8574 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8575 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8577 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8578 if (tem)
8579 return fold_build2_loc (loc, code, type, tem, arg1);
8582 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8583 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8585 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8586 if (tem)
8587 return fold_build2_loc (loc, code, type, arg0, tem);
8590 /* Check for the possibility of merging component references. If our
8591 lhs is another similar operation, try to merge its rhs with our
8592 rhs. Then try to merge our lhs and rhs. */
8593 if (TREE_CODE (arg0) == code
8594 && (tem = fold_truth_andor_1 (loc, code, type,
8595 TREE_OPERAND (arg0, 1), arg1)) != 0)
8596 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8598 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8599 return tem;
8601 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
8602 if (param_logical_op_non_short_circuit != -1)
8603 logical_op_non_short_circuit
8604 = param_logical_op_non_short_circuit;
8605 if (logical_op_non_short_circuit
8606 && !flag_sanitize_coverage
8607 && (code == TRUTH_AND_EXPR
8608 || code == TRUTH_ANDIF_EXPR
8609 || code == TRUTH_OR_EXPR
8610 || code == TRUTH_ORIF_EXPR))
8612 enum tree_code ncode, icode;
8614 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8615 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8616 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8618 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8619 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8620 We don't want to pack more than two leafs to a non-IF AND/OR
8621 expression.
8622 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8623 equal to IF-CODE, then we don't want to add right-hand operand.
8624 If the inner right-hand side of left-hand operand has
8625 side-effects, or isn't simple, then we can't add to it,
8626 as otherwise we might destroy if-sequence. */
8627 if (TREE_CODE (arg0) == icode
8628 && simple_operand_p_2 (arg1)
8629 /* Needed for sequence points to handle trappings, and
8630 side-effects. */
8631 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8633 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8634 arg1);
8635 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8636 tem);
8638 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8639 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8640 else if (TREE_CODE (arg1) == icode
8641 && simple_operand_p_2 (arg0)
8642 /* Needed for sequence points to handle trappings, and
8643 side-effects. */
8644 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8646 tem = fold_build2_loc (loc, ncode, type,
8647 arg0, TREE_OPERAND (arg1, 0));
8648 return fold_build2_loc (loc, icode, type, tem,
8649 TREE_OPERAND (arg1, 1));
8651 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8652 into (A OR B).
8653 For sequence point consistancy, we need to check for trapping,
8654 and side-effects. */
8655 else if (code == icode && simple_operand_p_2 (arg0)
8656 && simple_operand_p_2 (arg1))
8657 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8660 return NULL_TREE;
8663 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8664 by changing CODE to reduce the magnitude of constants involved in
8665 ARG0 of the comparison.
8666 Returns a canonicalized comparison tree if a simplification was
8667 possible, otherwise returns NULL_TREE.
8668 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8669 valid if signed overflow is undefined. */
8671 static tree
8672 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8673 tree arg0, tree arg1,
8674 bool *strict_overflow_p)
8676 enum tree_code code0 = TREE_CODE (arg0);
8677 tree t, cst0 = NULL_TREE;
8678 int sgn0;
8680 /* Match A +- CST code arg1. We can change this only if overflow
8681 is undefined. */
8682 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8683 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8684 /* In principle pointers also have undefined overflow behavior,
8685 but that causes problems elsewhere. */
8686 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8687 && (code0 == MINUS_EXPR
8688 || code0 == PLUS_EXPR)
8689 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8690 return NULL_TREE;
8692 /* Identify the constant in arg0 and its sign. */
8693 cst0 = TREE_OPERAND (arg0, 1);
8694 sgn0 = tree_int_cst_sgn (cst0);
8696 /* Overflowed constants and zero will cause problems. */
8697 if (integer_zerop (cst0)
8698 || TREE_OVERFLOW (cst0))
8699 return NULL_TREE;
8701 /* See if we can reduce the magnitude of the constant in
8702 arg0 by changing the comparison code. */
8703 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8704 if (code == LT_EXPR
8705 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8706 code = LE_EXPR;
8707 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8708 else if (code == GT_EXPR
8709 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8710 code = GE_EXPR;
8711 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8712 else if (code == LE_EXPR
8713 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8714 code = LT_EXPR;
8715 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8716 else if (code == GE_EXPR
8717 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8718 code = GT_EXPR;
8719 else
8720 return NULL_TREE;
8721 *strict_overflow_p = true;
8723 /* Now build the constant reduced in magnitude. But not if that
8724 would produce one outside of its types range. */
8725 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8726 && ((sgn0 == 1
8727 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8728 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8729 || (sgn0 == -1
8730 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8731 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8732 return NULL_TREE;
8734 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8735 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8736 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8737 t = fold_convert (TREE_TYPE (arg1), t);
8739 return fold_build2_loc (loc, code, type, t, arg1);
8742 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8743 overflow further. Try to decrease the magnitude of constants involved
8744 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8745 and put sole constants at the second argument position.
8746 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8748 static tree
8749 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8750 tree arg0, tree arg1)
8752 tree t;
8753 bool strict_overflow_p;
8754 const char * const warnmsg = G_("assuming signed overflow does not occur "
8755 "when reducing constant in comparison");
8757 /* Try canonicalization by simplifying arg0. */
8758 strict_overflow_p = false;
8759 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8760 &strict_overflow_p);
8761 if (t)
8763 if (strict_overflow_p)
8764 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8765 return t;
8768 /* Try canonicalization by simplifying arg1 using the swapped
8769 comparison. */
8770 code = swap_tree_comparison (code);
8771 strict_overflow_p = false;
8772 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8773 &strict_overflow_p);
8774 if (t && strict_overflow_p)
8775 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8776 return t;
8779 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8780 space. This is used to avoid issuing overflow warnings for
8781 expressions like &p->x which cannot wrap. */
8783 static bool
8784 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
8786 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8787 return true;
8789 if (maybe_lt (bitpos, 0))
8790 return true;
8792 poly_wide_int wi_offset;
8793 int precision = TYPE_PRECISION (TREE_TYPE (base));
8794 if (offset == NULL_TREE)
8795 wi_offset = wi::zero (precision);
8796 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
8797 return true;
8798 else
8799 wi_offset = wi::to_poly_wide (offset);
8801 wi::overflow_type overflow;
8802 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
8803 precision);
8804 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8805 if (overflow)
8806 return true;
8808 poly_uint64 total_hwi, size;
8809 if (!total.to_uhwi (&total_hwi)
8810 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
8811 &size)
8812 || known_eq (size, 0U))
8813 return true;
8815 if (known_le (total_hwi, size))
8816 return false;
8818 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8819 array. */
8820 if (TREE_CODE (base) == ADDR_EXPR
8821 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
8822 &size)
8823 && maybe_ne (size, 0U)
8824 && known_le (total_hwi, size))
8825 return false;
8827 return true;
8830 /* Return a positive integer when the symbol DECL is known to have
8831 a nonzero address, zero when it's known not to (e.g., it's a weak
8832 symbol), and a negative integer when the symbol is not yet in the
8833 symbol table and so whether or not its address is zero is unknown.
8834 For function local objects always return positive integer. */
8835 static int
8836 maybe_nonzero_address (tree decl)
8838 if (DECL_P (decl) && decl_in_symtab_p (decl))
8839 if (struct symtab_node *symbol = symtab_node::get_create (decl))
8840 return symbol->nonzero_address ();
8842 /* Function local objects are never NULL. */
8843 if (DECL_P (decl)
8844 && (DECL_CONTEXT (decl)
8845 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8846 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8847 return 1;
8849 return -1;
8852 /* Subroutine of fold_binary. This routine performs all of the
8853 transformations that are common to the equality/inequality
8854 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8855 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8856 fold_binary should call fold_binary. Fold a comparison with
8857 tree code CODE and type TYPE with operands OP0 and OP1. Return
8858 the folded comparison or NULL_TREE. */
8860 static tree
8861 fold_comparison (location_t loc, enum tree_code code, tree type,
8862 tree op0, tree op1)
8864 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8865 tree arg0, arg1, tem;
8867 arg0 = op0;
8868 arg1 = op1;
8870 STRIP_SIGN_NOPS (arg0);
8871 STRIP_SIGN_NOPS (arg1);
8873 /* For comparisons of pointers we can decompose it to a compile time
8874 comparison of the base objects and the offsets into the object.
8875 This requires at least one operand being an ADDR_EXPR or a
8876 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8877 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8878 && (TREE_CODE (arg0) == ADDR_EXPR
8879 || TREE_CODE (arg1) == ADDR_EXPR
8880 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8881 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8883 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8884 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
8885 machine_mode mode;
8886 int volatilep, reversep, unsignedp;
8887 bool indirect_base0 = false, indirect_base1 = false;
8889 /* Get base and offset for the access. Strip ADDR_EXPR for
8890 get_inner_reference, but put it back by stripping INDIRECT_REF
8891 off the base object if possible. indirect_baseN will be true
8892 if baseN is not an address but refers to the object itself. */
8893 base0 = arg0;
8894 if (TREE_CODE (arg0) == ADDR_EXPR)
8896 base0
8897 = get_inner_reference (TREE_OPERAND (arg0, 0),
8898 &bitsize, &bitpos0, &offset0, &mode,
8899 &unsignedp, &reversep, &volatilep);
8900 if (TREE_CODE (base0) == INDIRECT_REF)
8901 base0 = TREE_OPERAND (base0, 0);
8902 else
8903 indirect_base0 = true;
8905 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8907 base0 = TREE_OPERAND (arg0, 0);
8908 STRIP_SIGN_NOPS (base0);
8909 if (TREE_CODE (base0) == ADDR_EXPR)
8911 base0
8912 = get_inner_reference (TREE_OPERAND (base0, 0),
8913 &bitsize, &bitpos0, &offset0, &mode,
8914 &unsignedp, &reversep, &volatilep);
8915 if (TREE_CODE (base0) == INDIRECT_REF)
8916 base0 = TREE_OPERAND (base0, 0);
8917 else
8918 indirect_base0 = true;
8920 if (offset0 == NULL_TREE || integer_zerop (offset0))
8921 offset0 = TREE_OPERAND (arg0, 1);
8922 else
8923 offset0 = size_binop (PLUS_EXPR, offset0,
8924 TREE_OPERAND (arg0, 1));
8925 if (poly_int_tree_p (offset0))
8927 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
8928 TYPE_PRECISION (sizetype));
8929 tem <<= LOG2_BITS_PER_UNIT;
8930 tem += bitpos0;
8931 if (tem.to_shwi (&bitpos0))
8932 offset0 = NULL_TREE;
8936 base1 = arg1;
8937 if (TREE_CODE (arg1) == ADDR_EXPR)
8939 base1
8940 = get_inner_reference (TREE_OPERAND (arg1, 0),
8941 &bitsize, &bitpos1, &offset1, &mode,
8942 &unsignedp, &reversep, &volatilep);
8943 if (TREE_CODE (base1) == INDIRECT_REF)
8944 base1 = TREE_OPERAND (base1, 0);
8945 else
8946 indirect_base1 = true;
8948 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8950 base1 = TREE_OPERAND (arg1, 0);
8951 STRIP_SIGN_NOPS (base1);
8952 if (TREE_CODE (base1) == ADDR_EXPR)
8954 base1
8955 = get_inner_reference (TREE_OPERAND (base1, 0),
8956 &bitsize, &bitpos1, &offset1, &mode,
8957 &unsignedp, &reversep, &volatilep);
8958 if (TREE_CODE (base1) == INDIRECT_REF)
8959 base1 = TREE_OPERAND (base1, 0);
8960 else
8961 indirect_base1 = true;
8963 if (offset1 == NULL_TREE || integer_zerop (offset1))
8964 offset1 = TREE_OPERAND (arg1, 1);
8965 else
8966 offset1 = size_binop (PLUS_EXPR, offset1,
8967 TREE_OPERAND (arg1, 1));
8968 if (poly_int_tree_p (offset1))
8970 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
8971 TYPE_PRECISION (sizetype));
8972 tem <<= LOG2_BITS_PER_UNIT;
8973 tem += bitpos1;
8974 if (tem.to_shwi (&bitpos1))
8975 offset1 = NULL_TREE;
8979 /* If we have equivalent bases we might be able to simplify. */
8980 if (indirect_base0 == indirect_base1
8981 && operand_equal_p (base0, base1,
8982 indirect_base0 ? OEP_ADDRESS_OF : 0))
8984 /* We can fold this expression to a constant if the non-constant
8985 offset parts are equal. */
8986 if ((offset0 == offset1
8987 || (offset0 && offset1
8988 && operand_equal_p (offset0, offset1, 0)))
8989 && (equality_code
8990 || (indirect_base0
8991 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8992 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8994 if (!equality_code
8995 && maybe_ne (bitpos0, bitpos1)
8996 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8997 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8998 fold_overflow_warning (("assuming pointer wraparound does not "
8999 "occur when comparing P +- C1 with "
9000 "P +- C2"),
9001 WARN_STRICT_OVERFLOW_CONDITIONAL);
9003 switch (code)
9005 case EQ_EXPR:
9006 if (known_eq (bitpos0, bitpos1))
9007 return constant_boolean_node (true, type);
9008 if (known_ne (bitpos0, bitpos1))
9009 return constant_boolean_node (false, type);
9010 break;
9011 case NE_EXPR:
9012 if (known_ne (bitpos0, bitpos1))
9013 return constant_boolean_node (true, type);
9014 if (known_eq (bitpos0, bitpos1))
9015 return constant_boolean_node (false, type);
9016 break;
9017 case LT_EXPR:
9018 if (known_lt (bitpos0, bitpos1))
9019 return constant_boolean_node (true, type);
9020 if (known_ge (bitpos0, bitpos1))
9021 return constant_boolean_node (false, type);
9022 break;
9023 case LE_EXPR:
9024 if (known_le (bitpos0, bitpos1))
9025 return constant_boolean_node (true, type);
9026 if (known_gt (bitpos0, bitpos1))
9027 return constant_boolean_node (false, type);
9028 break;
9029 case GE_EXPR:
9030 if (known_ge (bitpos0, bitpos1))
9031 return constant_boolean_node (true, type);
9032 if (known_lt (bitpos0, bitpos1))
9033 return constant_boolean_node (false, type);
9034 break;
9035 case GT_EXPR:
9036 if (known_gt (bitpos0, bitpos1))
9037 return constant_boolean_node (true, type);
9038 if (known_le (bitpos0, bitpos1))
9039 return constant_boolean_node (false, type);
9040 break;
9041 default:;
9044 /* We can simplify the comparison to a comparison of the variable
9045 offset parts if the constant offset parts are equal.
9046 Be careful to use signed sizetype here because otherwise we
9047 mess with array offsets in the wrong way. This is possible
9048 because pointer arithmetic is restricted to retain within an
9049 object and overflow on pointer differences is undefined as of
9050 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9051 else if (known_eq (bitpos0, bitpos1)
9052 && (equality_code
9053 || (indirect_base0
9054 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
9055 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9057 /* By converting to signed sizetype we cover middle-end pointer
9058 arithmetic which operates on unsigned pointer types of size
9059 type size and ARRAY_REF offsets which are properly sign or
9060 zero extended from their type in case it is narrower than
9061 sizetype. */
9062 if (offset0 == NULL_TREE)
9063 offset0 = build_int_cst (ssizetype, 0);
9064 else
9065 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9066 if (offset1 == NULL_TREE)
9067 offset1 = build_int_cst (ssizetype, 0);
9068 else
9069 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9071 if (!equality_code
9072 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9073 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9074 fold_overflow_warning (("assuming pointer wraparound does not "
9075 "occur when comparing P +- C1 with "
9076 "P +- C2"),
9077 WARN_STRICT_OVERFLOW_COMPARISON);
9079 return fold_build2_loc (loc, code, type, offset0, offset1);
9082 /* For equal offsets we can simplify to a comparison of the
9083 base addresses. */
9084 else if (known_eq (bitpos0, bitpos1)
9085 && (indirect_base0
9086 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9087 && (indirect_base1
9088 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9089 && ((offset0 == offset1)
9090 || (offset0 && offset1
9091 && operand_equal_p (offset0, offset1, 0))))
9093 if (indirect_base0)
9094 base0 = build_fold_addr_expr_loc (loc, base0);
9095 if (indirect_base1)
9096 base1 = build_fold_addr_expr_loc (loc, base1);
9097 return fold_build2_loc (loc, code, type, base0, base1);
9099 /* Comparison between an ordinary (non-weak) symbol and a null
9100 pointer can be eliminated since such symbols must have a non
9101 null address. In C, relational expressions between pointers
9102 to objects and null pointers are undefined. The results
9103 below follow the C++ rules with the additional property that
9104 every object pointer compares greater than a null pointer.
9106 else if (((DECL_P (base0)
9107 && maybe_nonzero_address (base0) > 0
9108 /* Avoid folding references to struct members at offset 0 to
9109 prevent tests like '&ptr->firstmember == 0' from getting
9110 eliminated. When ptr is null, although the -> expression
9111 is strictly speaking invalid, GCC retains it as a matter
9112 of QoI. See PR c/44555. */
9113 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
9114 || CONSTANT_CLASS_P (base0))
9115 && indirect_base0
9116 /* The caller guarantees that when one of the arguments is
9117 constant (i.e., null in this case) it is second. */
9118 && integer_zerop (arg1))
9120 switch (code)
9122 case EQ_EXPR:
9123 case LE_EXPR:
9124 case LT_EXPR:
9125 return constant_boolean_node (false, type);
9126 case GE_EXPR:
9127 case GT_EXPR:
9128 case NE_EXPR:
9129 return constant_boolean_node (true, type);
9130 default:
9131 gcc_unreachable ();
9136 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9137 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9138 the resulting offset is smaller in absolute value than the
9139 original one and has the same sign. */
9140 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9141 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9142 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9143 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9144 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9145 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9146 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9147 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9149 tree const1 = TREE_OPERAND (arg0, 1);
9150 tree const2 = TREE_OPERAND (arg1, 1);
9151 tree variable1 = TREE_OPERAND (arg0, 0);
9152 tree variable2 = TREE_OPERAND (arg1, 0);
9153 tree cst;
9154 const char * const warnmsg = G_("assuming signed overflow does not "
9155 "occur when combining constants around "
9156 "a comparison");
9158 /* Put the constant on the side where it doesn't overflow and is
9159 of lower absolute value and of same sign than before. */
9160 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9161 ? MINUS_EXPR : PLUS_EXPR,
9162 const2, const1);
9163 if (!TREE_OVERFLOW (cst)
9164 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9165 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9167 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9168 return fold_build2_loc (loc, code, type,
9169 variable1,
9170 fold_build2_loc (loc, TREE_CODE (arg1),
9171 TREE_TYPE (arg1),
9172 variable2, cst));
9175 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9176 ? MINUS_EXPR : PLUS_EXPR,
9177 const1, const2);
9178 if (!TREE_OVERFLOW (cst)
9179 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9180 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9182 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9183 return fold_build2_loc (loc, code, type,
9184 fold_build2_loc (loc, TREE_CODE (arg0),
9185 TREE_TYPE (arg0),
9186 variable1, cst),
9187 variable2);
9191 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9192 if (tem)
9193 return tem;
9195 /* If we are comparing an expression that just has comparisons
9196 of two integer values, arithmetic expressions of those comparisons,
9197 and constants, we can simplify it. There are only three cases
9198 to check: the two values can either be equal, the first can be
9199 greater, or the second can be greater. Fold the expression for
9200 those three values. Since each value must be 0 or 1, we have
9201 eight possibilities, each of which corresponds to the constant 0
9202 or 1 or one of the six possible comparisons.
9204 This handles common cases like (a > b) == 0 but also handles
9205 expressions like ((x > y) - (y > x)) > 0, which supposedly
9206 occur in macroized code. */
9208 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9210 tree cval1 = 0, cval2 = 0;
9212 if (twoval_comparison_p (arg0, &cval1, &cval2)
9213 /* Don't handle degenerate cases here; they should already
9214 have been handled anyway. */
9215 && cval1 != 0 && cval2 != 0
9216 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9217 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9218 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9219 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9220 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9221 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9222 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9224 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9225 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9227 /* We can't just pass T to eval_subst in case cval1 or cval2
9228 was the same as ARG1. */
9230 tree high_result
9231 = fold_build2_loc (loc, code, type,
9232 eval_subst (loc, arg0, cval1, maxval,
9233 cval2, minval),
9234 arg1);
9235 tree equal_result
9236 = fold_build2_loc (loc, code, type,
9237 eval_subst (loc, arg0, cval1, maxval,
9238 cval2, maxval),
9239 arg1);
9240 tree low_result
9241 = fold_build2_loc (loc, code, type,
9242 eval_subst (loc, arg0, cval1, minval,
9243 cval2, maxval),
9244 arg1);
9246 /* All three of these results should be 0 or 1. Confirm they are.
9247 Then use those values to select the proper code to use. */
9249 if (TREE_CODE (high_result) == INTEGER_CST
9250 && TREE_CODE (equal_result) == INTEGER_CST
9251 && TREE_CODE (low_result) == INTEGER_CST)
9253 /* Make a 3-bit mask with the high-order bit being the
9254 value for `>', the next for '=', and the low for '<'. */
9255 switch ((integer_onep (high_result) * 4)
9256 + (integer_onep (equal_result) * 2)
9257 + integer_onep (low_result))
9259 case 0:
9260 /* Always false. */
9261 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9262 case 1:
9263 code = LT_EXPR;
9264 break;
9265 case 2:
9266 code = EQ_EXPR;
9267 break;
9268 case 3:
9269 code = LE_EXPR;
9270 break;
9271 case 4:
9272 code = GT_EXPR;
9273 break;
9274 case 5:
9275 code = NE_EXPR;
9276 break;
9277 case 6:
9278 code = GE_EXPR;
9279 break;
9280 case 7:
9281 /* Always true. */
9282 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9285 return fold_build2_loc (loc, code, type, cval1, cval2);
9290 return NULL_TREE;
9294 /* Subroutine of fold_binary. Optimize complex multiplications of the
9295 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9296 argument EXPR represents the expression "z" of type TYPE. */
9298 static tree
9299 fold_mult_zconjz (location_t loc, tree type, tree expr)
9301 tree itype = TREE_TYPE (type);
9302 tree rpart, ipart, tem;
9304 if (TREE_CODE (expr) == COMPLEX_EXPR)
9306 rpart = TREE_OPERAND (expr, 0);
9307 ipart = TREE_OPERAND (expr, 1);
9309 else if (TREE_CODE (expr) == COMPLEX_CST)
9311 rpart = TREE_REALPART (expr);
9312 ipart = TREE_IMAGPART (expr);
9314 else
9316 expr = save_expr (expr);
9317 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9318 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9321 rpart = save_expr (rpart);
9322 ipart = save_expr (ipart);
9323 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9324 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9325 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9326 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9327 build_zero_cst (itype));
9331 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9332 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
9333 true if successful. */
9335 static bool
9336 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
9338 unsigned HOST_WIDE_INT i, nunits;
9340 if (TREE_CODE (arg) == VECTOR_CST
9341 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
9343 for (i = 0; i < nunits; ++i)
9344 elts[i] = VECTOR_CST_ELT (arg, i);
9346 else if (TREE_CODE (arg) == CONSTRUCTOR)
9348 constructor_elt *elt;
9350 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9351 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9352 return false;
9353 else
9354 elts[i] = elt->value;
9356 else
9357 return false;
9358 for (; i < nelts; i++)
9359 elts[i]
9360 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9361 return true;
9364 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9365 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9366 NULL_TREE otherwise. */
9368 tree
9369 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
9371 unsigned int i;
9372 unsigned HOST_WIDE_INT nelts;
9373 bool need_ctor = false;
9375 if (!sel.length ().is_constant (&nelts))
9376 return NULL_TREE;
9377 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
9378 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
9379 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
9380 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9381 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9382 return NULL_TREE;
9384 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
9385 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
9386 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
9387 return NULL_TREE;
9389 tree_vector_builder out_elts (type, nelts, 1);
9390 for (i = 0; i < nelts; i++)
9392 HOST_WIDE_INT index;
9393 if (!sel[i].is_constant (&index))
9394 return NULL_TREE;
9395 if (!CONSTANT_CLASS_P (in_elts[index]))
9396 need_ctor = true;
9397 out_elts.quick_push (unshare_expr (in_elts[index]));
9400 if (need_ctor)
9402 vec<constructor_elt, va_gc> *v;
9403 vec_alloc (v, nelts);
9404 for (i = 0; i < nelts; i++)
9405 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
9406 return build_constructor (type, v);
9408 else
9409 return out_elts.build ();
9412 /* Try to fold a pointer difference of type TYPE two address expressions of
9413 array references AREF0 and AREF1 using location LOC. Return a
9414 simplified expression for the difference or NULL_TREE. */
9416 static tree
9417 fold_addr_of_array_ref_difference (location_t loc, tree type,
9418 tree aref0, tree aref1,
9419 bool use_pointer_diff)
9421 tree base0 = TREE_OPERAND (aref0, 0);
9422 tree base1 = TREE_OPERAND (aref1, 0);
9423 tree base_offset = build_int_cst (type, 0);
9425 /* If the bases are array references as well, recurse. If the bases
9426 are pointer indirections compute the difference of the pointers.
9427 If the bases are equal, we are set. */
9428 if ((TREE_CODE (base0) == ARRAY_REF
9429 && TREE_CODE (base1) == ARRAY_REF
9430 && (base_offset
9431 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
9432 use_pointer_diff)))
9433 || (INDIRECT_REF_P (base0)
9434 && INDIRECT_REF_P (base1)
9435 && (base_offset
9436 = use_pointer_diff
9437 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
9438 TREE_OPERAND (base0, 0),
9439 TREE_OPERAND (base1, 0))
9440 : fold_binary_loc (loc, MINUS_EXPR, type,
9441 fold_convert (type,
9442 TREE_OPERAND (base0, 0)),
9443 fold_convert (type,
9444 TREE_OPERAND (base1, 0)))))
9445 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
9447 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9448 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9449 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9450 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
9451 return fold_build2_loc (loc, PLUS_EXPR, type,
9452 base_offset,
9453 fold_build2_loc (loc, MULT_EXPR, type,
9454 diff, esz));
9456 return NULL_TREE;
9459 /* If the real or vector real constant CST of type TYPE has an exact
9460 inverse, return it, else return NULL. */
9462 tree
9463 exact_inverse (tree type, tree cst)
9465 REAL_VALUE_TYPE r;
9466 tree unit_type;
9467 machine_mode mode;
9469 switch (TREE_CODE (cst))
9471 case REAL_CST:
9472 r = TREE_REAL_CST (cst);
9474 if (exact_real_inverse (TYPE_MODE (type), &r))
9475 return build_real (type, r);
9477 return NULL_TREE;
9479 case VECTOR_CST:
9481 unit_type = TREE_TYPE (type);
9482 mode = TYPE_MODE (unit_type);
9484 tree_vector_builder elts;
9485 if (!elts.new_unary_operation (type, cst, false))
9486 return NULL_TREE;
9487 unsigned int count = elts.encoded_nelts ();
9488 for (unsigned int i = 0; i < count; ++i)
9490 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9491 if (!exact_real_inverse (mode, &r))
9492 return NULL_TREE;
9493 elts.quick_push (build_real (unit_type, r));
9496 return elts.build ();
9499 default:
9500 return NULL_TREE;
9504 /* Mask out the tz least significant bits of X of type TYPE where
9505 tz is the number of trailing zeroes in Y. */
9506 static wide_int
9507 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9509 int tz = wi::ctz (y);
9510 if (tz > 0)
9511 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9512 return x;
9515 /* Return true when T is an address and is known to be nonzero.
9516 For floating point we further ensure that T is not denormal.
9517 Similar logic is present in nonzero_address in rtlanal.h.
9519 If the return value is based on the assumption that signed overflow
9520 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9521 change *STRICT_OVERFLOW_P. */
9523 static bool
9524 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9526 tree type = TREE_TYPE (t);
9527 enum tree_code code;
9529 /* Doing something useful for floating point would need more work. */
9530 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9531 return false;
9533 code = TREE_CODE (t);
9534 switch (TREE_CODE_CLASS (code))
9536 case tcc_unary:
9537 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9538 strict_overflow_p);
9539 case tcc_binary:
9540 case tcc_comparison:
9541 return tree_binary_nonzero_warnv_p (code, type,
9542 TREE_OPERAND (t, 0),
9543 TREE_OPERAND (t, 1),
9544 strict_overflow_p);
9545 case tcc_constant:
9546 case tcc_declaration:
9547 case tcc_reference:
9548 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9550 default:
9551 break;
9554 switch (code)
9556 case TRUTH_NOT_EXPR:
9557 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9558 strict_overflow_p);
9560 case TRUTH_AND_EXPR:
9561 case TRUTH_OR_EXPR:
9562 case TRUTH_XOR_EXPR:
9563 return tree_binary_nonzero_warnv_p (code, type,
9564 TREE_OPERAND (t, 0),
9565 TREE_OPERAND (t, 1),
9566 strict_overflow_p);
9568 case COND_EXPR:
9569 case CONSTRUCTOR:
9570 case OBJ_TYPE_REF:
9571 case ASSERT_EXPR:
9572 case ADDR_EXPR:
9573 case WITH_SIZE_EXPR:
9574 case SSA_NAME:
9575 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9577 case COMPOUND_EXPR:
9578 case MODIFY_EXPR:
9579 case BIND_EXPR:
9580 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9581 strict_overflow_p);
9583 case SAVE_EXPR:
9584 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9585 strict_overflow_p);
9587 case CALL_EXPR:
9589 tree fndecl = get_callee_fndecl (t);
9590 if (!fndecl) return false;
9591 if (flag_delete_null_pointer_checks && !flag_check_new
9592 && DECL_IS_OPERATOR_NEW_P (fndecl)
9593 && !TREE_NOTHROW (fndecl))
9594 return true;
9595 if (flag_delete_null_pointer_checks
9596 && lookup_attribute ("returns_nonnull",
9597 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9598 return true;
9599 return alloca_call_p (t);
9602 default:
9603 break;
9605 return false;
9608 /* Return true when T is an address and is known to be nonzero.
9609 Handle warnings about undefined signed overflow. */
9611 bool
9612 tree_expr_nonzero_p (tree t)
9614 bool ret, strict_overflow_p;
9616 strict_overflow_p = false;
9617 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9618 if (strict_overflow_p)
9619 fold_overflow_warning (("assuming signed overflow does not occur when "
9620 "determining that expression is always "
9621 "non-zero"),
9622 WARN_STRICT_OVERFLOW_MISC);
9623 return ret;
9626 /* Return true if T is known not to be equal to an integer W. */
9628 bool
9629 expr_not_equal_to (tree t, const wide_int &w)
9631 wide_int min, max, nz;
9632 value_range_kind rtype;
9633 switch (TREE_CODE (t))
9635 case INTEGER_CST:
9636 return wi::to_wide (t) != w;
9638 case SSA_NAME:
9639 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9640 return false;
9641 rtype = get_range_info (t, &min, &max);
9642 if (rtype == VR_RANGE)
9644 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9645 return true;
9646 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9647 return true;
9649 else if (rtype == VR_ANTI_RANGE
9650 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9651 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9652 return true;
9653 /* If T has some known zero bits and W has any of those bits set,
9654 then T is known not to be equal to W. */
9655 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9656 TYPE_PRECISION (TREE_TYPE (t))), 0))
9657 return true;
9658 return false;
9660 default:
9661 return false;
9665 /* Fold a binary expression of code CODE and type TYPE with operands
9666 OP0 and OP1. LOC is the location of the resulting expression.
9667 Return the folded expression if folding is successful. Otherwise,
9668 return NULL_TREE. */
9670 tree
9671 fold_binary_loc (location_t loc, enum tree_code code, tree type,
9672 tree op0, tree op1)
9674 enum tree_code_class kind = TREE_CODE_CLASS (code);
9675 tree arg0, arg1, tem;
9676 tree t1 = NULL_TREE;
9677 bool strict_overflow_p;
9678 unsigned int prec;
9680 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9681 && TREE_CODE_LENGTH (code) == 2
9682 && op0 != NULL_TREE
9683 && op1 != NULL_TREE);
9685 arg0 = op0;
9686 arg1 = op1;
9688 /* Strip any conversions that don't change the mode. This is
9689 safe for every expression, except for a comparison expression
9690 because its signedness is derived from its operands. So, in
9691 the latter case, only strip conversions that don't change the
9692 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9693 preserved.
9695 Note that this is done as an internal manipulation within the
9696 constant folder, in order to find the simplest representation
9697 of the arguments so that their form can be studied. In any
9698 cases, the appropriate type conversions should be put back in
9699 the tree that will get out of the constant folder. */
9701 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9703 STRIP_SIGN_NOPS (arg0);
9704 STRIP_SIGN_NOPS (arg1);
9706 else
9708 STRIP_NOPS (arg0);
9709 STRIP_NOPS (arg1);
9712 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9713 constant but we can't do arithmetic on them. */
9714 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9716 tem = const_binop (code, type, arg0, arg1);
9717 if (tem != NULL_TREE)
9719 if (TREE_TYPE (tem) != type)
9720 tem = fold_convert_loc (loc, type, tem);
9721 return tem;
9725 /* If this is a commutative operation, and ARG0 is a constant, move it
9726 to ARG1 to reduce the number of tests below. */
9727 if (commutative_tree_code (code)
9728 && tree_swap_operands_p (arg0, arg1))
9729 return fold_build2_loc (loc, code, type, op1, op0);
9731 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9732 to ARG1 to reduce the number of tests below. */
9733 if (kind == tcc_comparison
9734 && tree_swap_operands_p (arg0, arg1))
9735 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9737 tem = generic_simplify (loc, code, type, op0, op1);
9738 if (tem)
9739 return tem;
9741 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9743 First check for cases where an arithmetic operation is applied to a
9744 compound, conditional, or comparison operation. Push the arithmetic
9745 operation inside the compound or conditional to see if any folding
9746 can then be done. Convert comparison to conditional for this purpose.
9747 The also optimizes non-constant cases that used to be done in
9748 expand_expr.
9750 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9751 one of the operands is a comparison and the other is a comparison, a
9752 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9753 code below would make the expression more complex. Change it to a
9754 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9755 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9757 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9758 || code == EQ_EXPR || code == NE_EXPR)
9759 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
9760 && ((truth_value_p (TREE_CODE (arg0))
9761 && (truth_value_p (TREE_CODE (arg1))
9762 || (TREE_CODE (arg1) == BIT_AND_EXPR
9763 && integer_onep (TREE_OPERAND (arg1, 1)))))
9764 || (truth_value_p (TREE_CODE (arg1))
9765 && (truth_value_p (TREE_CODE (arg0))
9766 || (TREE_CODE (arg0) == BIT_AND_EXPR
9767 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9769 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9770 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9771 : TRUTH_XOR_EXPR,
9772 boolean_type_node,
9773 fold_convert_loc (loc, boolean_type_node, arg0),
9774 fold_convert_loc (loc, boolean_type_node, arg1));
9776 if (code == EQ_EXPR)
9777 tem = invert_truthvalue_loc (loc, tem);
9779 return fold_convert_loc (loc, type, tem);
9782 if (TREE_CODE_CLASS (code) == tcc_binary
9783 || TREE_CODE_CLASS (code) == tcc_comparison)
9785 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9787 tem = fold_build2_loc (loc, code, type,
9788 fold_convert_loc (loc, TREE_TYPE (op0),
9789 TREE_OPERAND (arg0, 1)), op1);
9790 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9791 tem);
9793 if (TREE_CODE (arg1) == COMPOUND_EXPR)
9795 tem = fold_build2_loc (loc, code, type, op0,
9796 fold_convert_loc (loc, TREE_TYPE (op1),
9797 TREE_OPERAND (arg1, 1)));
9798 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9799 tem);
9802 if (TREE_CODE (arg0) == COND_EXPR
9803 || TREE_CODE (arg0) == VEC_COND_EXPR
9804 || COMPARISON_CLASS_P (arg0))
9806 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9807 arg0, arg1,
9808 /*cond_first_p=*/1);
9809 if (tem != NULL_TREE)
9810 return tem;
9813 if (TREE_CODE (arg1) == COND_EXPR
9814 || TREE_CODE (arg1) == VEC_COND_EXPR
9815 || COMPARISON_CLASS_P (arg1))
9817 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9818 arg1, arg0,
9819 /*cond_first_p=*/0);
9820 if (tem != NULL_TREE)
9821 return tem;
9825 switch (code)
9827 case MEM_REF:
9828 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9829 if (TREE_CODE (arg0) == ADDR_EXPR
9830 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9832 tree iref = TREE_OPERAND (arg0, 0);
9833 return fold_build2 (MEM_REF, type,
9834 TREE_OPERAND (iref, 0),
9835 int_const_binop (PLUS_EXPR, arg1,
9836 TREE_OPERAND (iref, 1)));
9839 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9840 if (TREE_CODE (arg0) == ADDR_EXPR
9841 && handled_component_p (TREE_OPERAND (arg0, 0)))
9843 tree base;
9844 poly_int64 coffset;
9845 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9846 &coffset);
9847 if (!base)
9848 return NULL_TREE;
9849 return fold_build2 (MEM_REF, type,
9850 build_fold_addr_expr (base),
9851 int_const_binop (PLUS_EXPR, arg1,
9852 size_int (coffset)));
9855 return NULL_TREE;
9857 case POINTER_PLUS_EXPR:
9858 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9859 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9860 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9861 return fold_convert_loc (loc, type,
9862 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9863 fold_convert_loc (loc, sizetype,
9864 arg1),
9865 fold_convert_loc (loc, sizetype,
9866 arg0)));
9868 return NULL_TREE;
9870 case PLUS_EXPR:
9871 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9873 /* X + (X / CST) * -CST is X % CST. */
9874 if (TREE_CODE (arg1) == MULT_EXPR
9875 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9876 && operand_equal_p (arg0,
9877 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9879 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9880 tree cst1 = TREE_OPERAND (arg1, 1);
9881 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9882 cst1, cst0);
9883 if (sum && integer_zerop (sum))
9884 return fold_convert_loc (loc, type,
9885 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9886 TREE_TYPE (arg0), arg0,
9887 cst0));
9891 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9892 one. Make sure the type is not saturating and has the signedness of
9893 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9894 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9895 if ((TREE_CODE (arg0) == MULT_EXPR
9896 || TREE_CODE (arg1) == MULT_EXPR)
9897 && !TYPE_SATURATING (type)
9898 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9899 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9900 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9902 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9903 if (tem)
9904 return tem;
9907 if (! FLOAT_TYPE_P (type))
9909 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9910 (plus (plus (mult) (mult)) (foo)) so that we can
9911 take advantage of the factoring cases below. */
9912 if (ANY_INTEGRAL_TYPE_P (type)
9913 && TYPE_OVERFLOW_WRAPS (type)
9914 && (((TREE_CODE (arg0) == PLUS_EXPR
9915 || TREE_CODE (arg0) == MINUS_EXPR)
9916 && TREE_CODE (arg1) == MULT_EXPR)
9917 || ((TREE_CODE (arg1) == PLUS_EXPR
9918 || TREE_CODE (arg1) == MINUS_EXPR)
9919 && TREE_CODE (arg0) == MULT_EXPR)))
9921 tree parg0, parg1, parg, marg;
9922 enum tree_code pcode;
9924 if (TREE_CODE (arg1) == MULT_EXPR)
9925 parg = arg0, marg = arg1;
9926 else
9927 parg = arg1, marg = arg0;
9928 pcode = TREE_CODE (parg);
9929 parg0 = TREE_OPERAND (parg, 0);
9930 parg1 = TREE_OPERAND (parg, 1);
9931 STRIP_NOPS (parg0);
9932 STRIP_NOPS (parg1);
9934 if (TREE_CODE (parg0) == MULT_EXPR
9935 && TREE_CODE (parg1) != MULT_EXPR)
9936 return fold_build2_loc (loc, pcode, type,
9937 fold_build2_loc (loc, PLUS_EXPR, type,
9938 fold_convert_loc (loc, type,
9939 parg0),
9940 fold_convert_loc (loc, type,
9941 marg)),
9942 fold_convert_loc (loc, type, parg1));
9943 if (TREE_CODE (parg0) != MULT_EXPR
9944 && TREE_CODE (parg1) == MULT_EXPR)
9945 return
9946 fold_build2_loc (loc, PLUS_EXPR, type,
9947 fold_convert_loc (loc, type, parg0),
9948 fold_build2_loc (loc, pcode, type,
9949 fold_convert_loc (loc, type, marg),
9950 fold_convert_loc (loc, type,
9951 parg1)));
9954 else
9956 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9957 to __complex__ ( x, y ). This is not the same for SNaNs or
9958 if signed zeros are involved. */
9959 if (!HONOR_SNANS (element_mode (arg0))
9960 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9961 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9963 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9964 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9965 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9966 bool arg0rz = false, arg0iz = false;
9967 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9968 || (arg0i && (arg0iz = real_zerop (arg0i))))
9970 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9971 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9972 if (arg0rz && arg1i && real_zerop (arg1i))
9974 tree rp = arg1r ? arg1r
9975 : build1 (REALPART_EXPR, rtype, arg1);
9976 tree ip = arg0i ? arg0i
9977 : build1 (IMAGPART_EXPR, rtype, arg0);
9978 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9980 else if (arg0iz && arg1r && real_zerop (arg1r))
9982 tree rp = arg0r ? arg0r
9983 : build1 (REALPART_EXPR, rtype, arg0);
9984 tree ip = arg1i ? arg1i
9985 : build1 (IMAGPART_EXPR, rtype, arg1);
9986 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9991 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9992 We associate floats only if the user has specified
9993 -fassociative-math. */
9994 if (flag_associative_math
9995 && TREE_CODE (arg1) == PLUS_EXPR
9996 && TREE_CODE (arg0) != MULT_EXPR)
9998 tree tree10 = TREE_OPERAND (arg1, 0);
9999 tree tree11 = TREE_OPERAND (arg1, 1);
10000 if (TREE_CODE (tree11) == MULT_EXPR
10001 && TREE_CODE (tree10) == MULT_EXPR)
10003 tree tree0;
10004 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10005 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10008 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10009 We associate floats only if the user has specified
10010 -fassociative-math. */
10011 if (flag_associative_math
10012 && TREE_CODE (arg0) == PLUS_EXPR
10013 && TREE_CODE (arg1) != MULT_EXPR)
10015 tree tree00 = TREE_OPERAND (arg0, 0);
10016 tree tree01 = TREE_OPERAND (arg0, 1);
10017 if (TREE_CODE (tree01) == MULT_EXPR
10018 && TREE_CODE (tree00) == MULT_EXPR)
10020 tree tree0;
10021 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10022 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10027 bit_rotate:
10028 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10029 is a rotate of A by C1 bits. */
10030 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10031 is a rotate of A by B bits.
10032 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
10033 though in this case CODE must be | and not + or ^, otherwise
10034 it doesn't return A when B is 0. */
10036 enum tree_code code0, code1;
10037 tree rtype;
10038 code0 = TREE_CODE (arg0);
10039 code1 = TREE_CODE (arg1);
10040 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10041 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10042 && operand_equal_p (TREE_OPERAND (arg0, 0),
10043 TREE_OPERAND (arg1, 0), 0)
10044 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10045 TYPE_UNSIGNED (rtype))
10046 /* Only create rotates in complete modes. Other cases are not
10047 expanded properly. */
10048 && (element_precision (rtype)
10049 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
10051 tree tree01, tree11;
10052 tree orig_tree01, orig_tree11;
10053 enum tree_code code01, code11;
10055 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
10056 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
10057 STRIP_NOPS (tree01);
10058 STRIP_NOPS (tree11);
10059 code01 = TREE_CODE (tree01);
10060 code11 = TREE_CODE (tree11);
10061 if (code11 != MINUS_EXPR
10062 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
10064 std::swap (code0, code1);
10065 std::swap (code01, code11);
10066 std::swap (tree01, tree11);
10067 std::swap (orig_tree01, orig_tree11);
10069 if (code01 == INTEGER_CST
10070 && code11 == INTEGER_CST
10071 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10072 == element_precision (rtype)))
10074 tem = build2_loc (loc, LROTATE_EXPR,
10075 rtype, TREE_OPERAND (arg0, 0),
10076 code0 == LSHIFT_EXPR
10077 ? orig_tree01 : orig_tree11);
10078 return fold_convert_loc (loc, type, tem);
10080 else if (code11 == MINUS_EXPR)
10082 tree tree110, tree111;
10083 tree110 = TREE_OPERAND (tree11, 0);
10084 tree111 = TREE_OPERAND (tree11, 1);
10085 STRIP_NOPS (tree110);
10086 STRIP_NOPS (tree111);
10087 if (TREE_CODE (tree110) == INTEGER_CST
10088 && compare_tree_int (tree110,
10089 element_precision (rtype)) == 0
10090 && operand_equal_p (tree01, tree111, 0))
10092 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
10093 ? LROTATE_EXPR : RROTATE_EXPR),
10094 rtype, TREE_OPERAND (arg0, 0),
10095 orig_tree01);
10096 return fold_convert_loc (loc, type, tem);
10099 else if (code == BIT_IOR_EXPR
10100 && code11 == BIT_AND_EXPR
10101 && pow2p_hwi (element_precision (rtype)))
10103 tree tree110, tree111;
10104 tree110 = TREE_OPERAND (tree11, 0);
10105 tree111 = TREE_OPERAND (tree11, 1);
10106 STRIP_NOPS (tree110);
10107 STRIP_NOPS (tree111);
10108 if (TREE_CODE (tree110) == NEGATE_EXPR
10109 && TREE_CODE (tree111) == INTEGER_CST
10110 && compare_tree_int (tree111,
10111 element_precision (rtype) - 1) == 0
10112 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
10114 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
10115 ? LROTATE_EXPR : RROTATE_EXPR),
10116 rtype, TREE_OPERAND (arg0, 0),
10117 orig_tree01);
10118 return fold_convert_loc (loc, type, tem);
10124 associate:
10125 /* In most languages, can't associate operations on floats through
10126 parentheses. Rather than remember where the parentheses were, we
10127 don't associate floats at all, unless the user has specified
10128 -fassociative-math.
10129 And, we need to make sure type is not saturating. */
10131 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10132 && !TYPE_SATURATING (type))
10134 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
10135 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
10136 tree atype = type;
10137 bool ok = true;
10139 /* Split both trees into variables, constants, and literals. Then
10140 associate each group together, the constants with literals,
10141 then the result with variables. This increases the chances of
10142 literals being recombined later and of generating relocatable
10143 expressions for the sum of a constant and literal. */
10144 var0 = split_tree (arg0, type, code,
10145 &minus_var0, &con0, &minus_con0,
10146 &lit0, &minus_lit0, 0);
10147 var1 = split_tree (arg1, type, code,
10148 &minus_var1, &con1, &minus_con1,
10149 &lit1, &minus_lit1, code == MINUS_EXPR);
10151 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10152 if (code == MINUS_EXPR)
10153 code = PLUS_EXPR;
10155 /* With undefined overflow prefer doing association in a type
10156 which wraps on overflow, if that is one of the operand types. */
10157 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
10158 && !TYPE_OVERFLOW_WRAPS (type))
10160 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10161 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10162 atype = TREE_TYPE (arg0);
10163 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10164 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10165 atype = TREE_TYPE (arg1);
10166 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10169 /* With undefined overflow we can only associate constants with one
10170 variable, and constants whose association doesn't overflow. */
10171 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
10172 && !TYPE_OVERFLOW_WRAPS (atype))
10174 if ((var0 && var1) || (minus_var0 && minus_var1))
10176 /* ??? If split_tree would handle NEGATE_EXPR we could
10177 simply reject these cases and the allowed cases would
10178 be the var0/minus_var1 ones. */
10179 tree tmp0 = var0 ? var0 : minus_var0;
10180 tree tmp1 = var1 ? var1 : minus_var1;
10181 bool one_neg = false;
10183 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10185 tmp0 = TREE_OPERAND (tmp0, 0);
10186 one_neg = !one_neg;
10188 if (CONVERT_EXPR_P (tmp0)
10189 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10190 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10191 <= TYPE_PRECISION (atype)))
10192 tmp0 = TREE_OPERAND (tmp0, 0);
10193 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10195 tmp1 = TREE_OPERAND (tmp1, 0);
10196 one_neg = !one_neg;
10198 if (CONVERT_EXPR_P (tmp1)
10199 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10200 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10201 <= TYPE_PRECISION (atype)))
10202 tmp1 = TREE_OPERAND (tmp1, 0);
10203 /* The only case we can still associate with two variables
10204 is if they cancel out. */
10205 if (!one_neg
10206 || !operand_equal_p (tmp0, tmp1, 0))
10207 ok = false;
10209 else if ((var0 && minus_var1
10210 && ! operand_equal_p (var0, minus_var1, 0))
10211 || (minus_var0 && var1
10212 && ! operand_equal_p (minus_var0, var1, 0)))
10213 ok = false;
10216 /* Only do something if we found more than two objects. Otherwise,
10217 nothing has changed and we risk infinite recursion. */
10218 if (ok
10219 && ((var0 != 0) + (var1 != 0)
10220 + (minus_var0 != 0) + (minus_var1 != 0)
10221 + (con0 != 0) + (con1 != 0)
10222 + (minus_con0 != 0) + (minus_con1 != 0)
10223 + (lit0 != 0) + (lit1 != 0)
10224 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
10226 var0 = associate_trees (loc, var0, var1, code, atype);
10227 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
10228 code, atype);
10229 con0 = associate_trees (loc, con0, con1, code, atype);
10230 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
10231 code, atype);
10232 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10233 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10234 code, atype);
10236 if (minus_var0 && var0)
10238 var0 = associate_trees (loc, var0, minus_var0,
10239 MINUS_EXPR, atype);
10240 minus_var0 = 0;
10242 if (minus_con0 && con0)
10244 con0 = associate_trees (loc, con0, minus_con0,
10245 MINUS_EXPR, atype);
10246 minus_con0 = 0;
10249 /* Preserve the MINUS_EXPR if the negative part of the literal is
10250 greater than the positive part. Otherwise, the multiplicative
10251 folding code (i.e extract_muldiv) may be fooled in case
10252 unsigned constants are subtracted, like in the following
10253 example: ((X*2 + 4) - 8U)/2. */
10254 if (minus_lit0 && lit0)
10256 if (TREE_CODE (lit0) == INTEGER_CST
10257 && TREE_CODE (minus_lit0) == INTEGER_CST
10258 && tree_int_cst_lt (lit0, minus_lit0)
10259 /* But avoid ending up with only negated parts. */
10260 && (var0 || con0))
10262 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10263 MINUS_EXPR, atype);
10264 lit0 = 0;
10266 else
10268 lit0 = associate_trees (loc, lit0, minus_lit0,
10269 MINUS_EXPR, atype);
10270 minus_lit0 = 0;
10274 /* Don't introduce overflows through reassociation. */
10275 if ((lit0 && TREE_OVERFLOW_P (lit0))
10276 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
10277 return NULL_TREE;
10279 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
10280 con0 = associate_trees (loc, con0, lit0, code, atype);
10281 lit0 = 0;
10282 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
10283 code, atype);
10284 minus_lit0 = 0;
10286 /* Eliminate minus_con0. */
10287 if (minus_con0)
10289 if (con0)
10290 con0 = associate_trees (loc, con0, minus_con0,
10291 MINUS_EXPR, atype);
10292 else if (var0)
10293 var0 = associate_trees (loc, var0, minus_con0,
10294 MINUS_EXPR, atype);
10295 else
10296 gcc_unreachable ();
10297 minus_con0 = 0;
10300 /* Eliminate minus_var0. */
10301 if (minus_var0)
10303 if (con0)
10304 con0 = associate_trees (loc, con0, minus_var0,
10305 MINUS_EXPR, atype);
10306 else
10307 gcc_unreachable ();
10308 minus_var0 = 0;
10311 return
10312 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10313 code, atype));
10317 return NULL_TREE;
10319 case POINTER_DIFF_EXPR:
10320 case MINUS_EXPR:
10321 /* Fold &a[i] - &a[j] to i-j. */
10322 if (TREE_CODE (arg0) == ADDR_EXPR
10323 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10324 && TREE_CODE (arg1) == ADDR_EXPR
10325 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10327 tree tem = fold_addr_of_array_ref_difference (loc, type,
10328 TREE_OPERAND (arg0, 0),
10329 TREE_OPERAND (arg1, 0),
10330 code
10331 == POINTER_DIFF_EXPR);
10332 if (tem)
10333 return tem;
10336 /* Further transformations are not for pointers. */
10337 if (code == POINTER_DIFF_EXPR)
10338 return NULL_TREE;
10340 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10341 if (TREE_CODE (arg0) == NEGATE_EXPR
10342 && negate_expr_p (op1)
10343 /* If arg0 is e.g. unsigned int and type is int, then this could
10344 introduce UB, because if A is INT_MIN at runtime, the original
10345 expression can be well defined while the latter is not.
10346 See PR83269. */
10347 && !(ANY_INTEGRAL_TYPE_P (type)
10348 && TYPE_OVERFLOW_UNDEFINED (type)
10349 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10350 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10351 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
10352 fold_convert_loc (loc, type,
10353 TREE_OPERAND (arg0, 0)));
10355 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10356 __complex__ ( x, -y ). This is not the same for SNaNs or if
10357 signed zeros are involved. */
10358 if (!HONOR_SNANS (element_mode (arg0))
10359 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10360 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10362 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10363 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10364 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10365 bool arg0rz = false, arg0iz = false;
10366 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10367 || (arg0i && (arg0iz = real_zerop (arg0i))))
10369 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10370 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10371 if (arg0rz && arg1i && real_zerop (arg1i))
10373 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10374 arg1r ? arg1r
10375 : build1 (REALPART_EXPR, rtype, arg1));
10376 tree ip = arg0i ? arg0i
10377 : build1 (IMAGPART_EXPR, rtype, arg0);
10378 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10380 else if (arg0iz && arg1r && real_zerop (arg1r))
10382 tree rp = arg0r ? arg0r
10383 : build1 (REALPART_EXPR, rtype, arg0);
10384 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10385 arg1i ? arg1i
10386 : build1 (IMAGPART_EXPR, rtype, arg1));
10387 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10392 /* A - B -> A + (-B) if B is easily negatable. */
10393 if (negate_expr_p (op1)
10394 && ! TYPE_OVERFLOW_SANITIZED (type)
10395 && ((FLOAT_TYPE_P (type)
10396 /* Avoid this transformation if B is a positive REAL_CST. */
10397 && (TREE_CODE (op1) != REAL_CST
10398 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
10399 || INTEGRAL_TYPE_P (type)))
10400 return fold_build2_loc (loc, PLUS_EXPR, type,
10401 fold_convert_loc (loc, type, arg0),
10402 negate_expr (op1));
10404 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10405 one. Make sure the type is not saturating and has the signedness of
10406 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10407 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10408 if ((TREE_CODE (arg0) == MULT_EXPR
10409 || TREE_CODE (arg1) == MULT_EXPR)
10410 && !TYPE_SATURATING (type)
10411 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10412 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10413 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10415 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10416 if (tem)
10417 return tem;
10420 goto associate;
10422 case MULT_EXPR:
10423 if (! FLOAT_TYPE_P (type))
10425 /* Transform x * -C into -x * C if x is easily negatable. */
10426 if (TREE_CODE (op1) == INTEGER_CST
10427 && tree_int_cst_sgn (op1) == -1
10428 && negate_expr_p (op0)
10429 && negate_expr_p (op1)
10430 && (tem = negate_expr (op1)) != op1
10431 && ! TREE_OVERFLOW (tem))
10432 return fold_build2_loc (loc, MULT_EXPR, type,
10433 fold_convert_loc (loc, type,
10434 negate_expr (op0)), tem);
10436 strict_overflow_p = false;
10437 if (TREE_CODE (arg1) == INTEGER_CST
10438 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10439 &strict_overflow_p)) != 0)
10441 if (strict_overflow_p)
10442 fold_overflow_warning (("assuming signed overflow does not "
10443 "occur when simplifying "
10444 "multiplication"),
10445 WARN_STRICT_OVERFLOW_MISC);
10446 return fold_convert_loc (loc, type, tem);
10449 /* Optimize z * conj(z) for integer complex numbers. */
10450 if (TREE_CODE (arg0) == CONJ_EXPR
10451 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10452 return fold_mult_zconjz (loc, type, arg1);
10453 if (TREE_CODE (arg1) == CONJ_EXPR
10454 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10455 return fold_mult_zconjz (loc, type, arg0);
10457 else
10459 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10460 This is not the same for NaNs or if signed zeros are
10461 involved. */
10462 if (!HONOR_NANS (arg0)
10463 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10464 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10465 && TREE_CODE (arg1) == COMPLEX_CST
10466 && real_zerop (TREE_REALPART (arg1)))
10468 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10469 if (real_onep (TREE_IMAGPART (arg1)))
10470 return
10471 fold_build2_loc (loc, COMPLEX_EXPR, type,
10472 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10473 rtype, arg0)),
10474 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10475 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10476 return
10477 fold_build2_loc (loc, COMPLEX_EXPR, type,
10478 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10479 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10480 rtype, arg0)));
10483 /* Optimize z * conj(z) for floating point complex numbers.
10484 Guarded by flag_unsafe_math_optimizations as non-finite
10485 imaginary components don't produce scalar results. */
10486 if (flag_unsafe_math_optimizations
10487 && TREE_CODE (arg0) == CONJ_EXPR
10488 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10489 return fold_mult_zconjz (loc, type, arg1);
10490 if (flag_unsafe_math_optimizations
10491 && TREE_CODE (arg1) == CONJ_EXPR
10492 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10493 return fold_mult_zconjz (loc, type, arg0);
10495 goto associate;
10497 case BIT_IOR_EXPR:
10498 /* Canonicalize (X & C1) | C2. */
10499 if (TREE_CODE (arg0) == BIT_AND_EXPR
10500 && TREE_CODE (arg1) == INTEGER_CST
10501 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10503 int width = TYPE_PRECISION (type), w;
10504 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
10505 wide_int c2 = wi::to_wide (arg1);
10507 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10508 if ((c1 & c2) == c1)
10509 return omit_one_operand_loc (loc, type, arg1,
10510 TREE_OPERAND (arg0, 0));
10512 wide_int msk = wi::mask (width, false,
10513 TYPE_PRECISION (TREE_TYPE (arg1)));
10515 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10516 if (wi::bit_and_not (msk, c1 | c2) == 0)
10518 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10519 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10522 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10523 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10524 mode which allows further optimizations. */
10525 c1 &= msk;
10526 c2 &= msk;
10527 wide_int c3 = wi::bit_and_not (c1, c2);
10528 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10530 wide_int mask = wi::mask (w, false,
10531 TYPE_PRECISION (type));
10532 if (((c1 | c2) & mask) == mask
10533 && wi::bit_and_not (c1, mask) == 0)
10535 c3 = mask;
10536 break;
10540 if (c3 != c1)
10542 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10543 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
10544 wide_int_to_tree (type, c3));
10545 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10549 /* See if this can be simplified into a rotate first. If that
10550 is unsuccessful continue in the association code. */
10551 goto bit_rotate;
10553 case BIT_XOR_EXPR:
10554 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10555 if (TREE_CODE (arg0) == BIT_AND_EXPR
10556 && INTEGRAL_TYPE_P (type)
10557 && integer_onep (TREE_OPERAND (arg0, 1))
10558 && integer_onep (arg1))
10559 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10560 build_zero_cst (TREE_TYPE (arg0)));
10562 /* See if this can be simplified into a rotate first. If that
10563 is unsuccessful continue in the association code. */
10564 goto bit_rotate;
10566 case BIT_AND_EXPR:
10567 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10568 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10569 && INTEGRAL_TYPE_P (type)
10570 && integer_onep (TREE_OPERAND (arg0, 1))
10571 && integer_onep (arg1))
10573 tree tem2;
10574 tem = TREE_OPERAND (arg0, 0);
10575 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10576 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10577 tem, tem2);
10578 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10579 build_zero_cst (TREE_TYPE (tem)));
10581 /* Fold ~X & 1 as (X & 1) == 0. */
10582 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10583 && INTEGRAL_TYPE_P (type)
10584 && integer_onep (arg1))
10586 tree tem2;
10587 tem = TREE_OPERAND (arg0, 0);
10588 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10589 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10590 tem, tem2);
10591 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10592 build_zero_cst (TREE_TYPE (tem)));
10594 /* Fold !X & 1 as X == 0. */
10595 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10596 && integer_onep (arg1))
10598 tem = TREE_OPERAND (arg0, 0);
10599 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10600 build_zero_cst (TREE_TYPE (tem)));
10603 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10604 multiple of 1 << CST. */
10605 if (TREE_CODE (arg1) == INTEGER_CST)
10607 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10608 wide_int ncst1 = -cst1;
10609 if ((cst1 & ncst1) == ncst1
10610 && multiple_of_p (type, arg0,
10611 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10612 return fold_convert_loc (loc, type, arg0);
10615 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10616 bits from CST2. */
10617 if (TREE_CODE (arg1) == INTEGER_CST
10618 && TREE_CODE (arg0) == MULT_EXPR
10619 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10621 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
10622 wide_int masked
10623 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
10625 if (masked == 0)
10626 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10627 arg0, arg1);
10628 else if (masked != warg1)
10630 /* Avoid the transform if arg1 is a mask of some
10631 mode which allows further optimizations. */
10632 int pop = wi::popcount (warg1);
10633 if (!(pop >= BITS_PER_UNIT
10634 && pow2p_hwi (pop)
10635 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10636 return fold_build2_loc (loc, code, type, op0,
10637 wide_int_to_tree (type, masked));
10641 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10642 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10643 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10645 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10647 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
10648 if (mask == -1)
10649 return
10650 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10653 goto associate;
10655 case RDIV_EXPR:
10656 /* Don't touch a floating-point divide by zero unless the mode
10657 of the constant can represent infinity. */
10658 if (TREE_CODE (arg1) == REAL_CST
10659 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10660 && real_zerop (arg1))
10661 return NULL_TREE;
10663 /* (-A) / (-B) -> A / B */
10664 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10665 return fold_build2_loc (loc, RDIV_EXPR, type,
10666 TREE_OPERAND (arg0, 0),
10667 negate_expr (arg1));
10668 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10669 return fold_build2_loc (loc, RDIV_EXPR, type,
10670 negate_expr (arg0),
10671 TREE_OPERAND (arg1, 0));
10672 return NULL_TREE;
10674 case TRUNC_DIV_EXPR:
10675 /* Fall through */
10677 case FLOOR_DIV_EXPR:
10678 /* Simplify A / (B << N) where A and B are positive and B is
10679 a power of 2, to A >> (N + log2(B)). */
10680 strict_overflow_p = false;
10681 if (TREE_CODE (arg1) == LSHIFT_EXPR
10682 && (TYPE_UNSIGNED (type)
10683 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10685 tree sval = TREE_OPERAND (arg1, 0);
10686 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10688 tree sh_cnt = TREE_OPERAND (arg1, 1);
10689 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10690 wi::exact_log2 (wi::to_wide (sval)));
10692 if (strict_overflow_p)
10693 fold_overflow_warning (("assuming signed overflow does not "
10694 "occur when simplifying A / (B << N)"),
10695 WARN_STRICT_OVERFLOW_MISC);
10697 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10698 sh_cnt, pow2);
10699 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10700 fold_convert_loc (loc, type, arg0), sh_cnt);
10704 /* Fall through */
10706 case ROUND_DIV_EXPR:
10707 case CEIL_DIV_EXPR:
10708 case EXACT_DIV_EXPR:
10709 if (integer_zerop (arg1))
10710 return NULL_TREE;
10712 /* Convert -A / -B to A / B when the type is signed and overflow is
10713 undefined. */
10714 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10715 && TREE_CODE (op0) == NEGATE_EXPR
10716 && negate_expr_p (op1))
10718 if (INTEGRAL_TYPE_P (type))
10719 fold_overflow_warning (("assuming signed overflow does not occur "
10720 "when distributing negation across "
10721 "division"),
10722 WARN_STRICT_OVERFLOW_MISC);
10723 return fold_build2_loc (loc, code, type,
10724 fold_convert_loc (loc, type,
10725 TREE_OPERAND (arg0, 0)),
10726 negate_expr (op1));
10728 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10729 && TREE_CODE (arg1) == NEGATE_EXPR
10730 && negate_expr_p (op0))
10732 if (INTEGRAL_TYPE_P (type))
10733 fold_overflow_warning (("assuming signed overflow does not occur "
10734 "when distributing negation across "
10735 "division"),
10736 WARN_STRICT_OVERFLOW_MISC);
10737 return fold_build2_loc (loc, code, type,
10738 negate_expr (op0),
10739 fold_convert_loc (loc, type,
10740 TREE_OPERAND (arg1, 0)));
10743 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10744 operation, EXACT_DIV_EXPR.
10746 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10747 At one time others generated faster code, it's not clear if they do
10748 after the last round to changes to the DIV code in expmed.c. */
10749 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10750 && multiple_of_p (type, arg0, arg1))
10751 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10752 fold_convert (type, arg0),
10753 fold_convert (type, arg1));
10755 strict_overflow_p = false;
10756 if (TREE_CODE (arg1) == INTEGER_CST
10757 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10758 &strict_overflow_p)) != 0)
10760 if (strict_overflow_p)
10761 fold_overflow_warning (("assuming signed overflow does not occur "
10762 "when simplifying division"),
10763 WARN_STRICT_OVERFLOW_MISC);
10764 return fold_convert_loc (loc, type, tem);
10767 return NULL_TREE;
10769 case CEIL_MOD_EXPR:
10770 case FLOOR_MOD_EXPR:
10771 case ROUND_MOD_EXPR:
10772 case TRUNC_MOD_EXPR:
10773 strict_overflow_p = false;
10774 if (TREE_CODE (arg1) == INTEGER_CST
10775 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10776 &strict_overflow_p)) != 0)
10778 if (strict_overflow_p)
10779 fold_overflow_warning (("assuming signed overflow does not occur "
10780 "when simplifying modulus"),
10781 WARN_STRICT_OVERFLOW_MISC);
10782 return fold_convert_loc (loc, type, tem);
10785 return NULL_TREE;
10787 case LROTATE_EXPR:
10788 case RROTATE_EXPR:
10789 case RSHIFT_EXPR:
10790 case LSHIFT_EXPR:
10791 /* Since negative shift count is not well-defined,
10792 don't try to compute it in the compiler. */
10793 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10794 return NULL_TREE;
10796 prec = element_precision (type);
10798 /* If we have a rotate of a bit operation with the rotate count and
10799 the second operand of the bit operation both constant,
10800 permute the two operations. */
10801 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10802 && (TREE_CODE (arg0) == BIT_AND_EXPR
10803 || TREE_CODE (arg0) == BIT_IOR_EXPR
10804 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10805 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10807 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10808 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10809 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10810 fold_build2_loc (loc, code, type,
10811 arg00, arg1),
10812 fold_build2_loc (loc, code, type,
10813 arg01, arg1));
10816 /* Two consecutive rotates adding up to the some integer
10817 multiple of the precision of the type can be ignored. */
10818 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10819 && TREE_CODE (arg0) == RROTATE_EXPR
10820 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10821 && wi::umod_trunc (wi::to_wide (arg1)
10822 + wi::to_wide (TREE_OPERAND (arg0, 1)),
10823 prec) == 0)
10824 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10826 return NULL_TREE;
10828 case MIN_EXPR:
10829 case MAX_EXPR:
10830 goto associate;
10832 case TRUTH_ANDIF_EXPR:
10833 /* Note that the operands of this must be ints
10834 and their values must be 0 or 1.
10835 ("true" is a fixed value perhaps depending on the language.) */
10836 /* If first arg is constant zero, return it. */
10837 if (integer_zerop (arg0))
10838 return fold_convert_loc (loc, type, arg0);
10839 /* FALLTHRU */
10840 case TRUTH_AND_EXPR:
10841 /* If either arg is constant true, drop it. */
10842 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10843 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10844 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10845 /* Preserve sequence points. */
10846 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10847 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10848 /* If second arg is constant zero, result is zero, but first arg
10849 must be evaluated. */
10850 if (integer_zerop (arg1))
10851 return omit_one_operand_loc (loc, type, arg1, arg0);
10852 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10853 case will be handled here. */
10854 if (integer_zerop (arg0))
10855 return omit_one_operand_loc (loc, type, arg0, arg1);
10857 /* !X && X is always false. */
10858 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10859 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10860 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10861 /* X && !X is always false. */
10862 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10863 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10864 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10866 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10867 means A >= Y && A != MAX, but in this case we know that
10868 A < X <= MAX. */
10870 if (!TREE_SIDE_EFFECTS (arg0)
10871 && !TREE_SIDE_EFFECTS (arg1))
10873 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10874 if (tem && !operand_equal_p (tem, arg0, 0))
10875 return fold_build2_loc (loc, code, type, tem, arg1);
10877 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10878 if (tem && !operand_equal_p (tem, arg1, 0))
10879 return fold_build2_loc (loc, code, type, arg0, tem);
10882 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10883 != NULL_TREE)
10884 return tem;
10886 return NULL_TREE;
10888 case TRUTH_ORIF_EXPR:
10889 /* Note that the operands of this must be ints
10890 and their values must be 0 or true.
10891 ("true" is a fixed value perhaps depending on the language.) */
10892 /* If first arg is constant true, return it. */
10893 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10894 return fold_convert_loc (loc, type, arg0);
10895 /* FALLTHRU */
10896 case TRUTH_OR_EXPR:
10897 /* If either arg is constant zero, drop it. */
10898 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10899 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10900 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10901 /* Preserve sequence points. */
10902 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10903 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10904 /* If second arg is constant true, result is true, but we must
10905 evaluate first arg. */
10906 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10907 return omit_one_operand_loc (loc, type, arg1, arg0);
10908 /* Likewise for first arg, but note this only occurs here for
10909 TRUTH_OR_EXPR. */
10910 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10911 return omit_one_operand_loc (loc, type, arg0, arg1);
10913 /* !X || X is always true. */
10914 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10915 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10916 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10917 /* X || !X is always true. */
10918 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10919 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10920 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10922 /* (X && !Y) || (!X && Y) is X ^ Y */
10923 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10924 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10926 tree a0, a1, l0, l1, n0, n1;
10928 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10929 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10931 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10932 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10934 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10935 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10937 if ((operand_equal_p (n0, a0, 0)
10938 && operand_equal_p (n1, a1, 0))
10939 || (operand_equal_p (n0, a1, 0)
10940 && operand_equal_p (n1, a0, 0)))
10941 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10944 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10945 != NULL_TREE)
10946 return tem;
10948 return NULL_TREE;
10950 case TRUTH_XOR_EXPR:
10951 /* If the second arg is constant zero, drop it. */
10952 if (integer_zerop (arg1))
10953 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10954 /* If the second arg is constant true, this is a logical inversion. */
10955 if (integer_onep (arg1))
10957 tem = invert_truthvalue_loc (loc, arg0);
10958 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10960 /* Identical arguments cancel to zero. */
10961 if (operand_equal_p (arg0, arg1, 0))
10962 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10964 /* !X ^ X is always true. */
10965 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10966 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10967 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10969 /* X ^ !X is always true. */
10970 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10971 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10972 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10974 return NULL_TREE;
10976 case EQ_EXPR:
10977 case NE_EXPR:
10978 STRIP_NOPS (arg0);
10979 STRIP_NOPS (arg1);
10981 tem = fold_comparison (loc, code, type, op0, op1);
10982 if (tem != NULL_TREE)
10983 return tem;
10985 /* bool_var != 1 becomes !bool_var. */
10986 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10987 && code == NE_EXPR)
10988 return fold_convert_loc (loc, type,
10989 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10990 TREE_TYPE (arg0), arg0));
10992 /* bool_var == 0 becomes !bool_var. */
10993 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10994 && code == EQ_EXPR)
10995 return fold_convert_loc (loc, type,
10996 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10997 TREE_TYPE (arg0), arg0));
10999 /* !exp != 0 becomes !exp */
11000 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
11001 && code == NE_EXPR)
11002 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11004 /* If this is an EQ or NE comparison with zero and ARG0 is
11005 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11006 two operations, but the latter can be done in one less insn
11007 on machines that have only two-operand insns or on which a
11008 constant cannot be the first operand. */
11009 if (TREE_CODE (arg0) == BIT_AND_EXPR
11010 && integer_zerop (arg1))
11012 tree arg00 = TREE_OPERAND (arg0, 0);
11013 tree arg01 = TREE_OPERAND (arg0, 1);
11014 if (TREE_CODE (arg00) == LSHIFT_EXPR
11015 && integer_onep (TREE_OPERAND (arg00, 0)))
11017 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
11018 arg01, TREE_OPERAND (arg00, 1));
11019 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11020 build_int_cst (TREE_TYPE (arg0), 1));
11021 return fold_build2_loc (loc, code, type,
11022 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11023 arg1);
11025 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11026 && integer_onep (TREE_OPERAND (arg01, 0)))
11028 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
11029 arg00, TREE_OPERAND (arg01, 1));
11030 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11031 build_int_cst (TREE_TYPE (arg0), 1));
11032 return fold_build2_loc (loc, code, type,
11033 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11034 arg1);
11038 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11039 C1 is a valid shift constant, and C2 is a power of two, i.e.
11040 a single bit. */
11041 if (TREE_CODE (arg0) == BIT_AND_EXPR
11042 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11043 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11044 == INTEGER_CST
11045 && integer_pow2p (TREE_OPERAND (arg0, 1))
11046 && integer_zerop (arg1))
11048 tree itype = TREE_TYPE (arg0);
11049 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11050 prec = TYPE_PRECISION (itype);
11052 /* Check for a valid shift count. */
11053 if (wi::ltu_p (wi::to_wide (arg001), prec))
11055 tree arg01 = TREE_OPERAND (arg0, 1);
11056 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11057 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11058 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11059 can be rewritten as (X & (C2 << C1)) != 0. */
11060 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11062 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
11063 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
11064 return fold_build2_loc (loc, code, type, tem,
11065 fold_convert_loc (loc, itype, arg1));
11067 /* Otherwise, for signed (arithmetic) shifts,
11068 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11069 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11070 else if (!TYPE_UNSIGNED (itype))
11071 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11072 arg000, build_int_cst (itype, 0));
11073 /* Otherwise, of unsigned (logical) shifts,
11074 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11075 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11076 else
11077 return omit_one_operand_loc (loc, type,
11078 code == EQ_EXPR ? integer_one_node
11079 : integer_zero_node,
11080 arg000);
11084 /* If this is a comparison of a field, we may be able to simplify it. */
11085 if ((TREE_CODE (arg0) == COMPONENT_REF
11086 || TREE_CODE (arg0) == BIT_FIELD_REF)
11087 /* Handle the constant case even without -O
11088 to make sure the warnings are given. */
11089 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11091 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
11092 if (t1)
11093 return t1;
11096 /* Optimize comparisons of strlen vs zero to a compare of the
11097 first character of the string vs zero. To wit,
11098 strlen(ptr) == 0 => *ptr == 0
11099 strlen(ptr) != 0 => *ptr != 0
11100 Other cases should reduce to one of these two (or a constant)
11101 due to the return value of strlen being unsigned. */
11102 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
11104 tree fndecl = get_callee_fndecl (arg0);
11106 if (fndecl
11107 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
11108 && call_expr_nargs (arg0) == 1
11109 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
11110 == POINTER_TYPE))
11112 tree ptrtype
11113 = build_pointer_type (build_qualified_type (char_type_node,
11114 TYPE_QUAL_CONST));
11115 tree ptr = fold_convert_loc (loc, ptrtype,
11116 CALL_EXPR_ARG (arg0, 0));
11117 tree iref = build_fold_indirect_ref_loc (loc, ptr);
11118 return fold_build2_loc (loc, code, type, iref,
11119 build_int_cst (TREE_TYPE (iref), 0));
11123 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11124 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11125 if (TREE_CODE (arg0) == RSHIFT_EXPR
11126 && integer_zerop (arg1)
11127 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11129 tree arg00 = TREE_OPERAND (arg0, 0);
11130 tree arg01 = TREE_OPERAND (arg0, 1);
11131 tree itype = TREE_TYPE (arg00);
11132 if (wi::to_wide (arg01) == element_precision (itype) - 1)
11134 if (TYPE_UNSIGNED (itype))
11136 itype = signed_type_for (itype);
11137 arg00 = fold_convert_loc (loc, itype, arg00);
11139 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11140 type, arg00, build_zero_cst (itype));
11144 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11145 (X & C) == 0 when C is a single bit. */
11146 if (TREE_CODE (arg0) == BIT_AND_EXPR
11147 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11148 && integer_zerop (arg1)
11149 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11151 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
11152 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11153 TREE_OPERAND (arg0, 1));
11154 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11155 type, tem,
11156 fold_convert_loc (loc, TREE_TYPE (arg0),
11157 arg1));
11160 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11161 constant C is a power of two, i.e. a single bit. */
11162 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11163 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11164 && integer_zerop (arg1)
11165 && integer_pow2p (TREE_OPERAND (arg0, 1))
11166 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11167 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11169 tree arg00 = TREE_OPERAND (arg0, 0);
11170 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11171 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11174 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11175 when is C is a power of two, i.e. a single bit. */
11176 if (TREE_CODE (arg0) == BIT_AND_EXPR
11177 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11178 && integer_zerop (arg1)
11179 && integer_pow2p (TREE_OPERAND (arg0, 1))
11180 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11181 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11183 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11184 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
11185 arg000, TREE_OPERAND (arg0, 1));
11186 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11187 tem, build_int_cst (TREE_TYPE (tem), 0));
11190 if (integer_zerop (arg1)
11191 && tree_expr_nonzero_p (arg0))
11193 tree res = constant_boolean_node (code==NE_EXPR, type);
11194 return omit_one_operand_loc (loc, type, res, arg0);
11197 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11198 if (TREE_CODE (arg0) == BIT_AND_EXPR
11199 && TREE_CODE (arg1) == BIT_AND_EXPR)
11201 tree arg00 = TREE_OPERAND (arg0, 0);
11202 tree arg01 = TREE_OPERAND (arg0, 1);
11203 tree arg10 = TREE_OPERAND (arg1, 0);
11204 tree arg11 = TREE_OPERAND (arg1, 1);
11205 tree itype = TREE_TYPE (arg0);
11207 if (operand_equal_p (arg01, arg11, 0))
11209 tem = fold_convert_loc (loc, itype, arg10);
11210 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11211 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
11212 return fold_build2_loc (loc, code, type, tem,
11213 build_zero_cst (itype));
11215 if (operand_equal_p (arg01, arg10, 0))
11217 tem = fold_convert_loc (loc, itype, arg11);
11218 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11219 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
11220 return fold_build2_loc (loc, code, type, tem,
11221 build_zero_cst (itype));
11223 if (operand_equal_p (arg00, arg11, 0))
11225 tem = fold_convert_loc (loc, itype, arg10);
11226 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
11227 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
11228 return fold_build2_loc (loc, code, type, tem,
11229 build_zero_cst (itype));
11231 if (operand_equal_p (arg00, arg10, 0))
11233 tem = fold_convert_loc (loc, itype, arg11);
11234 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
11235 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
11236 return fold_build2_loc (loc, code, type, tem,
11237 build_zero_cst (itype));
11241 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11242 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11244 tree arg00 = TREE_OPERAND (arg0, 0);
11245 tree arg01 = TREE_OPERAND (arg0, 1);
11246 tree arg10 = TREE_OPERAND (arg1, 0);
11247 tree arg11 = TREE_OPERAND (arg1, 1);
11248 tree itype = TREE_TYPE (arg0);
11250 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11251 operand_equal_p guarantees no side-effects so we don't need
11252 to use omit_one_operand on Z. */
11253 if (operand_equal_p (arg01, arg11, 0))
11254 return fold_build2_loc (loc, code, type, arg00,
11255 fold_convert_loc (loc, TREE_TYPE (arg00),
11256 arg10));
11257 if (operand_equal_p (arg01, arg10, 0))
11258 return fold_build2_loc (loc, code, type, arg00,
11259 fold_convert_loc (loc, TREE_TYPE (arg00),
11260 arg11));
11261 if (operand_equal_p (arg00, arg11, 0))
11262 return fold_build2_loc (loc, code, type, arg01,
11263 fold_convert_loc (loc, TREE_TYPE (arg01),
11264 arg10));
11265 if (operand_equal_p (arg00, arg10, 0))
11266 return fold_build2_loc (loc, code, type, arg01,
11267 fold_convert_loc (loc, TREE_TYPE (arg01),
11268 arg11));
11270 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11271 if (TREE_CODE (arg01) == INTEGER_CST
11272 && TREE_CODE (arg11) == INTEGER_CST)
11274 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11275 fold_convert_loc (loc, itype, arg11));
11276 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11277 return fold_build2_loc (loc, code, type, tem,
11278 fold_convert_loc (loc, itype, arg10));
11282 /* Attempt to simplify equality/inequality comparisons of complex
11283 values. Only lower the comparison if the result is known or
11284 can be simplified to a single scalar comparison. */
11285 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11286 || TREE_CODE (arg0) == COMPLEX_CST)
11287 && (TREE_CODE (arg1) == COMPLEX_EXPR
11288 || TREE_CODE (arg1) == COMPLEX_CST))
11290 tree real0, imag0, real1, imag1;
11291 tree rcond, icond;
11293 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11295 real0 = TREE_OPERAND (arg0, 0);
11296 imag0 = TREE_OPERAND (arg0, 1);
11298 else
11300 real0 = TREE_REALPART (arg0);
11301 imag0 = TREE_IMAGPART (arg0);
11304 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11306 real1 = TREE_OPERAND (arg1, 0);
11307 imag1 = TREE_OPERAND (arg1, 1);
11309 else
11311 real1 = TREE_REALPART (arg1);
11312 imag1 = TREE_IMAGPART (arg1);
11315 rcond = fold_binary_loc (loc, code, type, real0, real1);
11316 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11318 if (integer_zerop (rcond))
11320 if (code == EQ_EXPR)
11321 return omit_two_operands_loc (loc, type, boolean_false_node,
11322 imag0, imag1);
11323 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11325 else
11327 if (code == NE_EXPR)
11328 return omit_two_operands_loc (loc, type, boolean_true_node,
11329 imag0, imag1);
11330 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11334 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11335 if (icond && TREE_CODE (icond) == INTEGER_CST)
11337 if (integer_zerop (icond))
11339 if (code == EQ_EXPR)
11340 return omit_two_operands_loc (loc, type, boolean_false_node,
11341 real0, real1);
11342 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11344 else
11346 if (code == NE_EXPR)
11347 return omit_two_operands_loc (loc, type, boolean_true_node,
11348 real0, real1);
11349 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11354 return NULL_TREE;
11356 case LT_EXPR:
11357 case GT_EXPR:
11358 case LE_EXPR:
11359 case GE_EXPR:
11360 tem = fold_comparison (loc, code, type, op0, op1);
11361 if (tem != NULL_TREE)
11362 return tem;
11364 /* Transform comparisons of the form X +- C CMP X. */
11365 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11366 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11367 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11368 && !HONOR_SNANS (arg0))
11370 tree arg01 = TREE_OPERAND (arg0, 1);
11371 enum tree_code code0 = TREE_CODE (arg0);
11372 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11374 /* (X - c) > X becomes false. */
11375 if (code == GT_EXPR
11376 && ((code0 == MINUS_EXPR && is_positive >= 0)
11377 || (code0 == PLUS_EXPR && is_positive <= 0)))
11378 return constant_boolean_node (0, type);
11380 /* Likewise (X + c) < X becomes false. */
11381 if (code == LT_EXPR
11382 && ((code0 == PLUS_EXPR && is_positive >= 0)
11383 || (code0 == MINUS_EXPR && is_positive <= 0)))
11384 return constant_boolean_node (0, type);
11386 /* Convert (X - c) <= X to true. */
11387 if (!HONOR_NANS (arg1)
11388 && code == LE_EXPR
11389 && ((code0 == MINUS_EXPR && is_positive >= 0)
11390 || (code0 == PLUS_EXPR && is_positive <= 0)))
11391 return constant_boolean_node (1, type);
11393 /* Convert (X + c) >= X to true. */
11394 if (!HONOR_NANS (arg1)
11395 && code == GE_EXPR
11396 && ((code0 == PLUS_EXPR && is_positive >= 0)
11397 || (code0 == MINUS_EXPR && is_positive <= 0)))
11398 return constant_boolean_node (1, type);
11401 /* If we are comparing an ABS_EXPR with a constant, we can
11402 convert all the cases into explicit comparisons, but they may
11403 well not be faster than doing the ABS and one comparison.
11404 But ABS (X) <= C is a range comparison, which becomes a subtraction
11405 and a comparison, and is probably faster. */
11406 if (code == LE_EXPR
11407 && TREE_CODE (arg1) == INTEGER_CST
11408 && TREE_CODE (arg0) == ABS_EXPR
11409 && ! TREE_SIDE_EFFECTS (arg0)
11410 && (tem = negate_expr (arg1)) != 0
11411 && TREE_CODE (tem) == INTEGER_CST
11412 && !TREE_OVERFLOW (tem))
11413 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11414 build2 (GE_EXPR, type,
11415 TREE_OPERAND (arg0, 0), tem),
11416 build2 (LE_EXPR, type,
11417 TREE_OPERAND (arg0, 0), arg1));
11419 /* Convert ABS_EXPR<x> >= 0 to true. */
11420 strict_overflow_p = false;
11421 if (code == GE_EXPR
11422 && (integer_zerop (arg1)
11423 || (! HONOR_NANS (arg0)
11424 && real_zerop (arg1)))
11425 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11427 if (strict_overflow_p)
11428 fold_overflow_warning (("assuming signed overflow does not occur "
11429 "when simplifying comparison of "
11430 "absolute value and zero"),
11431 WARN_STRICT_OVERFLOW_CONDITIONAL);
11432 return omit_one_operand_loc (loc, type,
11433 constant_boolean_node (true, type),
11434 arg0);
11437 /* Convert ABS_EXPR<x> < 0 to false. */
11438 strict_overflow_p = false;
11439 if (code == LT_EXPR
11440 && (integer_zerop (arg1) || real_zerop (arg1))
11441 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11443 if (strict_overflow_p)
11444 fold_overflow_warning (("assuming signed overflow does not occur "
11445 "when simplifying comparison of "
11446 "absolute value and zero"),
11447 WARN_STRICT_OVERFLOW_CONDITIONAL);
11448 return omit_one_operand_loc (loc, type,
11449 constant_boolean_node (false, type),
11450 arg0);
11453 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11454 and similarly for >= into !=. */
11455 if ((code == LT_EXPR || code == GE_EXPR)
11456 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11457 && TREE_CODE (arg1) == LSHIFT_EXPR
11458 && integer_onep (TREE_OPERAND (arg1, 0)))
11459 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11460 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11461 TREE_OPERAND (arg1, 1)),
11462 build_zero_cst (TREE_TYPE (arg0)));
11464 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11465 otherwise Y might be >= # of bits in X's type and thus e.g.
11466 (unsigned char) (1 << Y) for Y 15 might be 0.
11467 If the cast is widening, then 1 << Y should have unsigned type,
11468 otherwise if Y is number of bits in the signed shift type minus 1,
11469 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11470 31 might be 0xffffffff80000000. */
11471 if ((code == LT_EXPR || code == GE_EXPR)
11472 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11473 && CONVERT_EXPR_P (arg1)
11474 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11475 && (element_precision (TREE_TYPE (arg1))
11476 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11477 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11478 || (element_precision (TREE_TYPE (arg1))
11479 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11480 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11482 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11483 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11484 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11485 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11486 build_zero_cst (TREE_TYPE (arg0)));
11489 return NULL_TREE;
11491 case UNORDERED_EXPR:
11492 case ORDERED_EXPR:
11493 case UNLT_EXPR:
11494 case UNLE_EXPR:
11495 case UNGT_EXPR:
11496 case UNGE_EXPR:
11497 case UNEQ_EXPR:
11498 case LTGT_EXPR:
11499 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11501 tree targ0 = strip_float_extensions (arg0);
11502 tree targ1 = strip_float_extensions (arg1);
11503 tree newtype = TREE_TYPE (targ0);
11505 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11506 newtype = TREE_TYPE (targ1);
11508 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11509 return fold_build2_loc (loc, code, type,
11510 fold_convert_loc (loc, newtype, targ0),
11511 fold_convert_loc (loc, newtype, targ1));
11514 return NULL_TREE;
11516 case COMPOUND_EXPR:
11517 /* When pedantic, a compound expression can be neither an lvalue
11518 nor an integer constant expression. */
11519 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11520 return NULL_TREE;
11521 /* Don't let (0, 0) be null pointer constant. */
11522 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11523 : fold_convert_loc (loc, type, arg1);
11524 return pedantic_non_lvalue_loc (loc, tem);
11526 case ASSERT_EXPR:
11527 /* An ASSERT_EXPR should never be passed to fold_binary. */
11528 gcc_unreachable ();
11530 default:
11531 return NULL_TREE;
11532 } /* switch (code) */
11535 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11536 ((A & N) + B) & M -> (A + B) & M
11537 Similarly if (N & M) == 0,
11538 ((A | N) + B) & M -> (A + B) & M
11539 and for - instead of + (or unary - instead of +)
11540 and/or ^ instead of |.
11541 If B is constant and (B & M) == 0, fold into A & M.
11543 This function is a helper for match.pd patterns. Return non-NULL
11544 type in which the simplified operation should be performed only
11545 if any optimization is possible.
11547 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
11548 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
11549 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
11550 +/-. */
11551 tree
11552 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
11553 tree arg00, enum tree_code code00, tree arg000, tree arg001,
11554 tree arg01, enum tree_code code01, tree arg010, tree arg011,
11555 tree *pmop)
11557 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
11558 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
11559 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11560 if (~cst1 == 0
11561 || (cst1 & (cst1 + 1)) != 0
11562 || !INTEGRAL_TYPE_P (type)
11563 || (!TYPE_OVERFLOW_WRAPS (type)
11564 && TREE_CODE (type) != INTEGER_TYPE)
11565 || (wi::max_value (type) & cst1) != cst1)
11566 return NULL_TREE;
11568 enum tree_code codes[2] = { code00, code01 };
11569 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
11570 int which = 0;
11571 wide_int cst0;
11573 /* Now we know that arg0 is (C + D) or (C - D) or -C and
11574 arg1 (M) is == (1LL << cst) - 1.
11575 Store C into PMOP[0] and D into PMOP[1]. */
11576 pmop[0] = arg00;
11577 pmop[1] = arg01;
11578 which = code != NEGATE_EXPR;
11580 for (; which >= 0; which--)
11581 switch (codes[which])
11583 case BIT_AND_EXPR:
11584 case BIT_IOR_EXPR:
11585 case BIT_XOR_EXPR:
11586 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
11587 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
11588 if (codes[which] == BIT_AND_EXPR)
11590 if (cst0 != cst1)
11591 break;
11593 else if (cst0 != 0)
11594 break;
11595 /* If C or D is of the form (A & N) where
11596 (N & M) == M, or of the form (A | N) or
11597 (A ^ N) where (N & M) == 0, replace it with A. */
11598 pmop[which] = arg0xx[2 * which];
11599 break;
11600 case ERROR_MARK:
11601 if (TREE_CODE (pmop[which]) != INTEGER_CST)
11602 break;
11603 /* If C or D is a N where (N & M) == 0, it can be
11604 omitted (replaced with 0). */
11605 if ((code == PLUS_EXPR
11606 || (code == MINUS_EXPR && which == 0))
11607 && (cst1 & wi::to_wide (pmop[which])) == 0)
11608 pmop[which] = build_int_cst (type, 0);
11609 /* Similarly, with C - N where (-N & M) == 0. */
11610 if (code == MINUS_EXPR
11611 && which == 1
11612 && (cst1 & -wi::to_wide (pmop[which])) == 0)
11613 pmop[which] = build_int_cst (type, 0);
11614 break;
11615 default:
11616 gcc_unreachable ();
11619 /* Only build anything new if we optimized one or both arguments above. */
11620 if (pmop[0] == arg00 && pmop[1] == arg01)
11621 return NULL_TREE;
11623 if (TYPE_OVERFLOW_WRAPS (type))
11624 return type;
11625 else
11626 return unsigned_type_for (type);
11629 /* Used by contains_label_[p1]. */
11631 struct contains_label_data
11633 hash_set<tree> *pset;
11634 bool inside_switch_p;
11637 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11638 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
11639 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
11641 static tree
11642 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
11644 contains_label_data *d = (contains_label_data *) data;
11645 switch (TREE_CODE (*tp))
11647 case LABEL_EXPR:
11648 return *tp;
11650 case CASE_LABEL_EXPR:
11651 if (!d->inside_switch_p)
11652 return *tp;
11653 return NULL_TREE;
11655 case SWITCH_EXPR:
11656 if (!d->inside_switch_p)
11658 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
11659 return *tp;
11660 d->inside_switch_p = true;
11661 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
11662 return *tp;
11663 d->inside_switch_p = false;
11664 *walk_subtrees = 0;
11666 return NULL_TREE;
11668 case GOTO_EXPR:
11669 *walk_subtrees = 0;
11670 return NULL_TREE;
11672 default:
11673 return NULL_TREE;
11677 /* Return whether the sub-tree ST contains a label which is accessible from
11678 outside the sub-tree. */
11680 static bool
11681 contains_label_p (tree st)
11683 hash_set<tree> pset;
11684 contains_label_data data = { &pset, false };
11685 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
11688 /* Fold a ternary expression of code CODE and type TYPE with operands
11689 OP0, OP1, and OP2. Return the folded expression if folding is
11690 successful. Otherwise, return NULL_TREE. */
11692 tree
11693 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11694 tree op0, tree op1, tree op2)
11696 tree tem;
11697 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11698 enum tree_code_class kind = TREE_CODE_CLASS (code);
11700 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11701 && TREE_CODE_LENGTH (code) == 3);
11703 /* If this is a commutative operation, and OP0 is a constant, move it
11704 to OP1 to reduce the number of tests below. */
11705 if (commutative_ternary_tree_code (code)
11706 && tree_swap_operands_p (op0, op1))
11707 return fold_build3_loc (loc, code, type, op1, op0, op2);
11709 tem = generic_simplify (loc, code, type, op0, op1, op2);
11710 if (tem)
11711 return tem;
11713 /* Strip any conversions that don't change the mode. This is safe
11714 for every expression, except for a comparison expression because
11715 its signedness is derived from its operands. So, in the latter
11716 case, only strip conversions that don't change the signedness.
11718 Note that this is done as an internal manipulation within the
11719 constant folder, in order to find the simplest representation of
11720 the arguments so that their form can be studied. In any cases,
11721 the appropriate type conversions should be put back in the tree
11722 that will get out of the constant folder. */
11723 if (op0)
11725 arg0 = op0;
11726 STRIP_NOPS (arg0);
11729 if (op1)
11731 arg1 = op1;
11732 STRIP_NOPS (arg1);
11735 if (op2)
11737 arg2 = op2;
11738 STRIP_NOPS (arg2);
11741 switch (code)
11743 case COMPONENT_REF:
11744 if (TREE_CODE (arg0) == CONSTRUCTOR
11745 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11747 unsigned HOST_WIDE_INT idx;
11748 tree field, value;
11749 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11750 if (field == arg1)
11751 return value;
11753 return NULL_TREE;
11755 case COND_EXPR:
11756 case VEC_COND_EXPR:
11757 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11758 so all simple results must be passed through pedantic_non_lvalue. */
11759 if (TREE_CODE (arg0) == INTEGER_CST)
11761 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11762 tem = integer_zerop (arg0) ? op2 : op1;
11763 /* Only optimize constant conditions when the selected branch
11764 has the same type as the COND_EXPR. This avoids optimizing
11765 away "c ? x : throw", where the throw has a void type.
11766 Avoid throwing away that operand which contains label. */
11767 if ((!TREE_SIDE_EFFECTS (unused_op)
11768 || !contains_label_p (unused_op))
11769 && (! VOID_TYPE_P (TREE_TYPE (tem))
11770 || VOID_TYPE_P (type)))
11771 return pedantic_non_lvalue_loc (loc, tem);
11772 return NULL_TREE;
11774 else if (TREE_CODE (arg0) == VECTOR_CST)
11776 unsigned HOST_WIDE_INT nelts;
11777 if ((TREE_CODE (arg1) == VECTOR_CST
11778 || TREE_CODE (arg1) == CONSTRUCTOR)
11779 && (TREE_CODE (arg2) == VECTOR_CST
11780 || TREE_CODE (arg2) == CONSTRUCTOR)
11781 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
11783 vec_perm_builder sel (nelts, nelts, 1);
11784 for (unsigned int i = 0; i < nelts; i++)
11786 tree val = VECTOR_CST_ELT (arg0, i);
11787 if (integer_all_onesp (val))
11788 sel.quick_push (i);
11789 else if (integer_zerop (val))
11790 sel.quick_push (nelts + i);
11791 else /* Currently unreachable. */
11792 return NULL_TREE;
11794 vec_perm_indices indices (sel, 2, nelts);
11795 tree t = fold_vec_perm (type, arg1, arg2, indices);
11796 if (t != NULL_TREE)
11797 return t;
11801 /* If we have A op B ? A : C, we may be able to convert this to a
11802 simpler expression, depending on the operation and the values
11803 of B and C. Signed zeros prevent all of these transformations,
11804 for reasons given above each one.
11806 Also try swapping the arguments and inverting the conditional. */
11807 if (COMPARISON_CLASS_P (arg0)
11808 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
11809 && !HONOR_SIGNED_ZEROS (element_mode (op1)))
11811 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11812 if (tem)
11813 return tem;
11816 if (COMPARISON_CLASS_P (arg0)
11817 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
11818 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11820 location_t loc0 = expr_location_or (arg0, loc);
11821 tem = fold_invert_truthvalue (loc0, arg0);
11822 if (tem && COMPARISON_CLASS_P (tem))
11824 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11825 if (tem)
11826 return tem;
11830 /* If the second operand is simpler than the third, swap them
11831 since that produces better jump optimization results. */
11832 if (truth_value_p (TREE_CODE (arg0))
11833 && tree_swap_operands_p (op1, op2))
11835 location_t loc0 = expr_location_or (arg0, loc);
11836 /* See if this can be inverted. If it can't, possibly because
11837 it was a floating-point inequality comparison, don't do
11838 anything. */
11839 tem = fold_invert_truthvalue (loc0, arg0);
11840 if (tem)
11841 return fold_build3_loc (loc, code, type, tem, op2, op1);
11844 /* Convert A ? 1 : 0 to simply A. */
11845 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11846 : (integer_onep (op1)
11847 && !VECTOR_TYPE_P (type)))
11848 && integer_zerop (op2)
11849 /* If we try to convert OP0 to our type, the
11850 call to fold will try to move the conversion inside
11851 a COND, which will recurse. In that case, the COND_EXPR
11852 is probably the best choice, so leave it alone. */
11853 && type == TREE_TYPE (arg0))
11854 return pedantic_non_lvalue_loc (loc, arg0);
11856 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11857 over COND_EXPR in cases such as floating point comparisons. */
11858 if (integer_zerop (op1)
11859 && code == COND_EXPR
11860 && integer_onep (op2)
11861 && !VECTOR_TYPE_P (type)
11862 && truth_value_p (TREE_CODE (arg0)))
11863 return pedantic_non_lvalue_loc (loc,
11864 fold_convert_loc (loc, type,
11865 invert_truthvalue_loc (loc,
11866 arg0)));
11868 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11869 if (TREE_CODE (arg0) == LT_EXPR
11870 && integer_zerop (TREE_OPERAND (arg0, 1))
11871 && integer_zerop (op2)
11872 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11874 /* sign_bit_p looks through both zero and sign extensions,
11875 but for this optimization only sign extensions are
11876 usable. */
11877 tree tem2 = TREE_OPERAND (arg0, 0);
11878 while (tem != tem2)
11880 if (TREE_CODE (tem2) != NOP_EXPR
11881 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11883 tem = NULL_TREE;
11884 break;
11886 tem2 = TREE_OPERAND (tem2, 0);
11888 /* sign_bit_p only checks ARG1 bits within A's precision.
11889 If <sign bit of A> has wider type than A, bits outside
11890 of A's precision in <sign bit of A> need to be checked.
11891 If they are all 0, this optimization needs to be done
11892 in unsigned A's type, if they are all 1 in signed A's type,
11893 otherwise this can't be done. */
11894 if (tem
11895 && TYPE_PRECISION (TREE_TYPE (tem))
11896 < TYPE_PRECISION (TREE_TYPE (arg1))
11897 && TYPE_PRECISION (TREE_TYPE (tem))
11898 < TYPE_PRECISION (type))
11900 int inner_width, outer_width;
11901 tree tem_type;
11903 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11904 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11905 if (outer_width > TYPE_PRECISION (type))
11906 outer_width = TYPE_PRECISION (type);
11908 wide_int mask = wi::shifted_mask
11909 (inner_width, outer_width - inner_width, false,
11910 TYPE_PRECISION (TREE_TYPE (arg1)));
11912 wide_int common = mask & wi::to_wide (arg1);
11913 if (common == mask)
11915 tem_type = signed_type_for (TREE_TYPE (tem));
11916 tem = fold_convert_loc (loc, tem_type, tem);
11918 else if (common == 0)
11920 tem_type = unsigned_type_for (TREE_TYPE (tem));
11921 tem = fold_convert_loc (loc, tem_type, tem);
11923 else
11924 tem = NULL;
11927 if (tem)
11928 return
11929 fold_convert_loc (loc, type,
11930 fold_build2_loc (loc, BIT_AND_EXPR,
11931 TREE_TYPE (tem), tem,
11932 fold_convert_loc (loc,
11933 TREE_TYPE (tem),
11934 arg1)));
11937 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11938 already handled above. */
11939 if (TREE_CODE (arg0) == BIT_AND_EXPR
11940 && integer_onep (TREE_OPERAND (arg0, 1))
11941 && integer_zerop (op2)
11942 && integer_pow2p (arg1))
11944 tree tem = TREE_OPERAND (arg0, 0);
11945 STRIP_NOPS (tem);
11946 if (TREE_CODE (tem) == RSHIFT_EXPR
11947 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11948 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
11949 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
11950 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11951 fold_convert_loc (loc, type,
11952 TREE_OPERAND (tem, 0)),
11953 op1);
11956 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11957 is probably obsolete because the first operand should be a
11958 truth value (that's why we have the two cases above), but let's
11959 leave it in until we can confirm this for all front-ends. */
11960 if (integer_zerop (op2)
11961 && TREE_CODE (arg0) == NE_EXPR
11962 && integer_zerop (TREE_OPERAND (arg0, 1))
11963 && integer_pow2p (arg1)
11964 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11965 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11966 arg1, OEP_ONLY_CONST)
11967 /* operand_equal_p compares just value, not precision, so e.g.
11968 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
11969 second operand 32-bit -128, which is not a power of two (or vice
11970 versa. */
11971 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
11972 return pedantic_non_lvalue_loc (loc,
11973 fold_convert_loc (loc, type,
11974 TREE_OPERAND (arg0,
11975 0)));
11977 /* Disable the transformations below for vectors, since
11978 fold_binary_op_with_conditional_arg may undo them immediately,
11979 yielding an infinite loop. */
11980 if (code == VEC_COND_EXPR)
11981 return NULL_TREE;
11983 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11984 if (integer_zerop (op2)
11985 && truth_value_p (TREE_CODE (arg0))
11986 && truth_value_p (TREE_CODE (arg1))
11987 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11988 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11989 : TRUTH_ANDIF_EXPR,
11990 type, fold_convert_loc (loc, type, arg0), op1);
11992 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11993 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11994 && truth_value_p (TREE_CODE (arg0))
11995 && truth_value_p (TREE_CODE (arg1))
11996 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11998 location_t loc0 = expr_location_or (arg0, loc);
11999 /* Only perform transformation if ARG0 is easily inverted. */
12000 tem = fold_invert_truthvalue (loc0, arg0);
12001 if (tem)
12002 return fold_build2_loc (loc, code == VEC_COND_EXPR
12003 ? BIT_IOR_EXPR
12004 : TRUTH_ORIF_EXPR,
12005 type, fold_convert_loc (loc, type, tem),
12006 op1);
12009 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12010 if (integer_zerop (arg1)
12011 && truth_value_p (TREE_CODE (arg0))
12012 && truth_value_p (TREE_CODE (op2))
12013 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12015 location_t loc0 = expr_location_or (arg0, loc);
12016 /* Only perform transformation if ARG0 is easily inverted. */
12017 tem = fold_invert_truthvalue (loc0, arg0);
12018 if (tem)
12019 return fold_build2_loc (loc, code == VEC_COND_EXPR
12020 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
12021 type, fold_convert_loc (loc, type, tem),
12022 op2);
12025 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12026 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
12027 && truth_value_p (TREE_CODE (arg0))
12028 && truth_value_p (TREE_CODE (op2))
12029 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12030 return fold_build2_loc (loc, code == VEC_COND_EXPR
12031 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
12032 type, fold_convert_loc (loc, type, arg0), op2);
12034 return NULL_TREE;
12036 case CALL_EXPR:
12037 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12038 of fold_ternary on them. */
12039 gcc_unreachable ();
12041 case BIT_FIELD_REF:
12042 if (TREE_CODE (arg0) == VECTOR_CST
12043 && (type == TREE_TYPE (TREE_TYPE (arg0))
12044 || (VECTOR_TYPE_P (type)
12045 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
12046 && tree_fits_uhwi_p (op1)
12047 && tree_fits_uhwi_p (op2))
12049 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
12050 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
12051 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
12052 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
12054 if (n != 0
12055 && (idx % width) == 0
12056 && (n % width) == 0
12057 && known_le ((idx + n) / width,
12058 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
12060 idx = idx / width;
12061 n = n / width;
12063 if (TREE_CODE (arg0) == VECTOR_CST)
12065 if (n == 1)
12067 tem = VECTOR_CST_ELT (arg0, idx);
12068 if (VECTOR_TYPE_P (type))
12069 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
12070 return tem;
12073 tree_vector_builder vals (type, n, 1);
12074 for (unsigned i = 0; i < n; ++i)
12075 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
12076 return vals.build ();
12081 /* On constants we can use native encode/interpret to constant
12082 fold (nearly) all BIT_FIELD_REFs. */
12083 if (CONSTANT_CLASS_P (arg0)
12084 && can_native_interpret_type_p (type)
12085 && BITS_PER_UNIT == 8
12086 && tree_fits_uhwi_p (op1)
12087 && tree_fits_uhwi_p (op2))
12089 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12090 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
12091 /* Limit us to a reasonable amount of work. To relax the
12092 other limitations we need bit-shifting of the buffer
12093 and rounding up the size. */
12094 if (bitpos % BITS_PER_UNIT == 0
12095 && bitsize % BITS_PER_UNIT == 0
12096 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
12098 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
12099 unsigned HOST_WIDE_INT len
12100 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
12101 bitpos / BITS_PER_UNIT);
12102 if (len > 0
12103 && len * BITS_PER_UNIT >= bitsize)
12105 tree v = native_interpret_expr (type, b,
12106 bitsize / BITS_PER_UNIT);
12107 if (v)
12108 return v;
12113 return NULL_TREE;
12115 case VEC_PERM_EXPR:
12116 /* Perform constant folding of BIT_INSERT_EXPR. */
12117 if (TREE_CODE (arg2) == VECTOR_CST
12118 && TREE_CODE (op0) == VECTOR_CST
12119 && TREE_CODE (op1) == VECTOR_CST)
12121 /* Build a vector of integers from the tree mask. */
12122 vec_perm_builder builder;
12123 if (!tree_to_vec_perm_builder (&builder, arg2))
12124 return NULL_TREE;
12126 /* Create a vec_perm_indices for the integer vector. */
12127 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
12128 bool single_arg = (op0 == op1);
12129 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
12130 return fold_vec_perm (type, op0, op1, sel);
12132 return NULL_TREE;
12134 case BIT_INSERT_EXPR:
12135 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
12136 if (TREE_CODE (arg0) == INTEGER_CST
12137 && TREE_CODE (arg1) == INTEGER_CST)
12139 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12140 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
12141 wide_int tem = (wi::to_wide (arg0)
12142 & wi::shifted_mask (bitpos, bitsize, true,
12143 TYPE_PRECISION (type)));
12144 wide_int tem2
12145 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
12146 bitsize), bitpos);
12147 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
12149 else if (TREE_CODE (arg0) == VECTOR_CST
12150 && CONSTANT_CLASS_P (arg1)
12151 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
12152 TREE_TYPE (arg1)))
12154 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12155 unsigned HOST_WIDE_INT elsize
12156 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
12157 if (bitpos % elsize == 0)
12159 unsigned k = bitpos / elsize;
12160 unsigned HOST_WIDE_INT nelts;
12161 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
12162 return arg0;
12163 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
12165 tree_vector_builder elts (type, nelts, 1);
12166 elts.quick_grow (nelts);
12167 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
12168 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
12169 return elts.build ();
12173 return NULL_TREE;
12175 default:
12176 return NULL_TREE;
12177 } /* switch (code) */
12180 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
12181 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
12182 constructor element index of the value returned. If the element is
12183 not found NULL_TREE is returned and *CTOR_IDX is updated to
12184 the index of the element after the ACCESS_INDEX position (which
12185 may be outside of the CTOR array). */
12187 tree
12188 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
12189 unsigned *ctor_idx)
12191 tree index_type = NULL_TREE;
12192 signop index_sgn = UNSIGNED;
12193 offset_int low_bound = 0;
12195 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
12197 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
12198 if (domain_type && TYPE_MIN_VALUE (domain_type))
12200 /* Static constructors for variably sized objects makes no sense. */
12201 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
12202 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
12203 /* ??? When it is obvious that the range is signed, treat it so. */
12204 if (TYPE_UNSIGNED (index_type)
12205 && TYPE_MAX_VALUE (domain_type)
12206 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
12207 TYPE_MIN_VALUE (domain_type)))
12209 index_sgn = SIGNED;
12210 low_bound
12211 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
12212 SIGNED);
12214 else
12216 index_sgn = TYPE_SIGN (index_type);
12217 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
12222 if (index_type)
12223 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
12224 index_sgn);
12226 offset_int index = low_bound;
12227 if (index_type)
12228 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
12230 offset_int max_index = index;
12231 unsigned cnt;
12232 tree cfield, cval;
12233 bool first_p = true;
12235 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
12237 /* Array constructor might explicitly set index, or specify a range,
12238 or leave index NULL meaning that it is next index after previous
12239 one. */
12240 if (cfield)
12242 if (TREE_CODE (cfield) == INTEGER_CST)
12243 max_index = index
12244 = offset_int::from (wi::to_wide (cfield), index_sgn);
12245 else
12247 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
12248 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
12249 index_sgn);
12250 max_index
12251 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
12252 index_sgn);
12253 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
12256 else if (!first_p)
12258 index = max_index + 1;
12259 if (index_type)
12260 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
12261 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
12262 max_index = index;
12264 else
12265 first_p = false;
12267 /* Do we have match? */
12268 if (wi::cmp (access_index, index, index_sgn) >= 0)
12270 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
12272 if (ctor_idx)
12273 *ctor_idx = cnt;
12274 return cval;
12277 else if (in_gimple_form)
12278 /* We're past the element we search for. Note during parsing
12279 the elements might not be sorted.
12280 ??? We should use a binary search and a flag on the
12281 CONSTRUCTOR as to whether elements are sorted in declaration
12282 order. */
12283 break;
12285 if (ctor_idx)
12286 *ctor_idx = cnt;
12287 return NULL_TREE;
12290 /* Perform constant folding and related simplification of EXPR.
12291 The related simplifications include x*1 => x, x*0 => 0, etc.,
12292 and application of the associative law.
12293 NOP_EXPR conversions may be removed freely (as long as we
12294 are careful not to change the type of the overall expression).
12295 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12296 but we can constant-fold them if they have constant operands. */
12298 #ifdef ENABLE_FOLD_CHECKING
12299 # define fold(x) fold_1 (x)
12300 static tree fold_1 (tree);
12301 static
12302 #endif
12303 tree
12304 fold (tree expr)
12306 const tree t = expr;
12307 enum tree_code code = TREE_CODE (t);
12308 enum tree_code_class kind = TREE_CODE_CLASS (code);
12309 tree tem;
12310 location_t loc = EXPR_LOCATION (expr);
12312 /* Return right away if a constant. */
12313 if (kind == tcc_constant)
12314 return t;
12316 /* CALL_EXPR-like objects with variable numbers of operands are
12317 treated specially. */
12318 if (kind == tcc_vl_exp)
12320 if (code == CALL_EXPR)
12322 tem = fold_call_expr (loc, expr, false);
12323 return tem ? tem : expr;
12325 return expr;
12328 if (IS_EXPR_CODE_CLASS (kind))
12330 tree type = TREE_TYPE (t);
12331 tree op0, op1, op2;
12333 switch (TREE_CODE_LENGTH (code))
12335 case 1:
12336 op0 = TREE_OPERAND (t, 0);
12337 tem = fold_unary_loc (loc, code, type, op0);
12338 return tem ? tem : expr;
12339 case 2:
12340 op0 = TREE_OPERAND (t, 0);
12341 op1 = TREE_OPERAND (t, 1);
12342 tem = fold_binary_loc (loc, code, type, op0, op1);
12343 return tem ? tem : expr;
12344 case 3:
12345 op0 = TREE_OPERAND (t, 0);
12346 op1 = TREE_OPERAND (t, 1);
12347 op2 = TREE_OPERAND (t, 2);
12348 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12349 return tem ? tem : expr;
12350 default:
12351 break;
12355 switch (code)
12357 case ARRAY_REF:
12359 tree op0 = TREE_OPERAND (t, 0);
12360 tree op1 = TREE_OPERAND (t, 1);
12362 if (TREE_CODE (op1) == INTEGER_CST
12363 && TREE_CODE (op0) == CONSTRUCTOR
12364 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12366 tree val = get_array_ctor_element_at_index (op0,
12367 wi::to_offset (op1));
12368 if (val)
12369 return val;
12372 return t;
12375 /* Return a VECTOR_CST if possible. */
12376 case CONSTRUCTOR:
12378 tree type = TREE_TYPE (t);
12379 if (TREE_CODE (type) != VECTOR_TYPE)
12380 return t;
12382 unsigned i;
12383 tree val;
12384 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12385 if (! CONSTANT_CLASS_P (val))
12386 return t;
12388 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12391 case CONST_DECL:
12392 return fold (DECL_INITIAL (t));
12394 default:
12395 return t;
12396 } /* switch (code) */
12399 #ifdef ENABLE_FOLD_CHECKING
12400 #undef fold
12402 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12403 hash_table<nofree_ptr_hash<const tree_node> > *);
12404 static void fold_check_failed (const_tree, const_tree);
12405 void print_fold_checksum (const_tree);
12407 /* When --enable-checking=fold, compute a digest of expr before
12408 and after actual fold call to see if fold did not accidentally
12409 change original expr. */
12411 tree
12412 fold (tree expr)
12414 tree ret;
12415 struct md5_ctx ctx;
12416 unsigned char checksum_before[16], checksum_after[16];
12417 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12419 md5_init_ctx (&ctx);
12420 fold_checksum_tree (expr, &ctx, &ht);
12421 md5_finish_ctx (&ctx, checksum_before);
12422 ht.empty ();
12424 ret = fold_1 (expr);
12426 md5_init_ctx (&ctx);
12427 fold_checksum_tree (expr, &ctx, &ht);
12428 md5_finish_ctx (&ctx, checksum_after);
12430 if (memcmp (checksum_before, checksum_after, 16))
12431 fold_check_failed (expr, ret);
12433 return ret;
12436 void
12437 print_fold_checksum (const_tree expr)
12439 struct md5_ctx ctx;
12440 unsigned char checksum[16], cnt;
12441 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12443 md5_init_ctx (&ctx);
12444 fold_checksum_tree (expr, &ctx, &ht);
12445 md5_finish_ctx (&ctx, checksum);
12446 for (cnt = 0; cnt < 16; ++cnt)
12447 fprintf (stderr, "%02x", checksum[cnt]);
12448 putc ('\n', stderr);
12451 static void
12452 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12454 internal_error ("fold check: original tree changed by fold");
12457 static void
12458 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12459 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12461 const tree_node **slot;
12462 enum tree_code code;
12463 union tree_node *buf;
12464 int i, len;
12466 recursive_label:
12467 if (expr == NULL)
12468 return;
12469 slot = ht->find_slot (expr, INSERT);
12470 if (*slot != NULL)
12471 return;
12472 *slot = expr;
12473 code = TREE_CODE (expr);
12474 if (TREE_CODE_CLASS (code) == tcc_declaration
12475 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12477 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12478 size_t sz = tree_size (expr);
12479 buf = XALLOCAVAR (union tree_node, sz);
12480 memcpy ((char *) buf, expr, sz);
12481 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
12482 buf->decl_with_vis.symtab_node = NULL;
12483 buf->base.nowarning_flag = 0;
12484 expr = (tree) buf;
12486 else if (TREE_CODE_CLASS (code) == tcc_type
12487 && (TYPE_POINTER_TO (expr)
12488 || TYPE_REFERENCE_TO (expr)
12489 || TYPE_CACHED_VALUES_P (expr)
12490 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12491 || TYPE_NEXT_VARIANT (expr)
12492 || TYPE_ALIAS_SET_KNOWN_P (expr)))
12494 /* Allow these fields to be modified. */
12495 tree tmp;
12496 size_t sz = tree_size (expr);
12497 buf = XALLOCAVAR (union tree_node, sz);
12498 memcpy ((char *) buf, expr, sz);
12499 expr = tmp = (tree) buf;
12500 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12501 TYPE_POINTER_TO (tmp) = NULL;
12502 TYPE_REFERENCE_TO (tmp) = NULL;
12503 TYPE_NEXT_VARIANT (tmp) = NULL;
12504 TYPE_ALIAS_SET (tmp) = -1;
12505 if (TYPE_CACHED_VALUES_P (tmp))
12507 TYPE_CACHED_VALUES_P (tmp) = 0;
12508 TYPE_CACHED_VALUES (tmp) = NULL;
12511 else if (TREE_NO_WARNING (expr) && (DECL_P (expr) || EXPR_P (expr)))
12513 /* Allow TREE_NO_WARNING to be set. Perhaps we shouldn't allow that
12514 and change builtins.c etc. instead - see PR89543. */
12515 size_t sz = tree_size (expr);
12516 buf = XALLOCAVAR (union tree_node, sz);
12517 memcpy ((char *) buf, expr, sz);
12518 buf->base.nowarning_flag = 0;
12519 expr = (tree) buf;
12521 md5_process_bytes (expr, tree_size (expr), ctx);
12522 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12523 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12524 if (TREE_CODE_CLASS (code) != tcc_type
12525 && TREE_CODE_CLASS (code) != tcc_declaration
12526 && code != TREE_LIST
12527 && code != SSA_NAME
12528 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12529 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12530 switch (TREE_CODE_CLASS (code))
12532 case tcc_constant:
12533 switch (code)
12535 case STRING_CST:
12536 md5_process_bytes (TREE_STRING_POINTER (expr),
12537 TREE_STRING_LENGTH (expr), ctx);
12538 break;
12539 case COMPLEX_CST:
12540 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12541 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12542 break;
12543 case VECTOR_CST:
12544 len = vector_cst_encoded_nelts (expr);
12545 for (i = 0; i < len; ++i)
12546 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
12547 break;
12548 default:
12549 break;
12551 break;
12552 case tcc_exceptional:
12553 switch (code)
12555 case TREE_LIST:
12556 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12557 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12558 expr = TREE_CHAIN (expr);
12559 goto recursive_label;
12560 break;
12561 case TREE_VEC:
12562 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12563 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12564 break;
12565 default:
12566 break;
12568 break;
12569 case tcc_expression:
12570 case tcc_reference:
12571 case tcc_comparison:
12572 case tcc_unary:
12573 case tcc_binary:
12574 case tcc_statement:
12575 case tcc_vl_exp:
12576 len = TREE_OPERAND_LENGTH (expr);
12577 for (i = 0; i < len; ++i)
12578 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12579 break;
12580 case tcc_declaration:
12581 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12582 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12583 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12585 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12586 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12587 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12588 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12589 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12592 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12594 if (TREE_CODE (expr) == FUNCTION_DECL)
12596 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12597 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12599 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12601 break;
12602 case tcc_type:
12603 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12604 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12605 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12606 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12607 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12608 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12609 if (INTEGRAL_TYPE_P (expr)
12610 || SCALAR_FLOAT_TYPE_P (expr))
12612 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12613 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12615 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12616 if (TREE_CODE (expr) == RECORD_TYPE
12617 || TREE_CODE (expr) == UNION_TYPE
12618 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12619 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12620 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12621 break;
12622 default:
12623 break;
12627 /* Helper function for outputting the checksum of a tree T. When
12628 debugging with gdb, you can "define mynext" to be "next" followed
12629 by "call debug_fold_checksum (op0)", then just trace down till the
12630 outputs differ. */
12632 DEBUG_FUNCTION void
12633 debug_fold_checksum (const_tree t)
12635 int i;
12636 unsigned char checksum[16];
12637 struct md5_ctx ctx;
12638 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12640 md5_init_ctx (&ctx);
12641 fold_checksum_tree (t, &ctx, &ht);
12642 md5_finish_ctx (&ctx, checksum);
12643 ht.empty ();
12645 for (i = 0; i < 16; i++)
12646 fprintf (stderr, "%d ", checksum[i]);
12648 fprintf (stderr, "\n");
12651 #endif
12653 /* Fold a unary tree expression with code CODE of type TYPE with an
12654 operand OP0. LOC is the location of the resulting expression.
12655 Return a folded expression if successful. Otherwise, return a tree
12656 expression with code CODE of type TYPE with an operand OP0. */
12658 tree
12659 fold_build1_loc (location_t loc,
12660 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12662 tree tem;
12663 #ifdef ENABLE_FOLD_CHECKING
12664 unsigned char checksum_before[16], checksum_after[16];
12665 struct md5_ctx ctx;
12666 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12668 md5_init_ctx (&ctx);
12669 fold_checksum_tree (op0, &ctx, &ht);
12670 md5_finish_ctx (&ctx, checksum_before);
12671 ht.empty ();
12672 #endif
12674 tem = fold_unary_loc (loc, code, type, op0);
12675 if (!tem)
12676 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
12678 #ifdef ENABLE_FOLD_CHECKING
12679 md5_init_ctx (&ctx);
12680 fold_checksum_tree (op0, &ctx, &ht);
12681 md5_finish_ctx (&ctx, checksum_after);
12683 if (memcmp (checksum_before, checksum_after, 16))
12684 fold_check_failed (op0, tem);
12685 #endif
12686 return tem;
12689 /* Fold a binary tree expression with code CODE of type TYPE with
12690 operands OP0 and OP1. LOC is the location of the resulting
12691 expression. Return a folded expression if successful. Otherwise,
12692 return a tree expression with code CODE of type TYPE with operands
12693 OP0 and OP1. */
12695 tree
12696 fold_build2_loc (location_t loc,
12697 enum tree_code code, tree type, tree op0, tree op1
12698 MEM_STAT_DECL)
12700 tree tem;
12701 #ifdef ENABLE_FOLD_CHECKING
12702 unsigned char checksum_before_op0[16],
12703 checksum_before_op1[16],
12704 checksum_after_op0[16],
12705 checksum_after_op1[16];
12706 struct md5_ctx ctx;
12707 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12709 md5_init_ctx (&ctx);
12710 fold_checksum_tree (op0, &ctx, &ht);
12711 md5_finish_ctx (&ctx, checksum_before_op0);
12712 ht.empty ();
12714 md5_init_ctx (&ctx);
12715 fold_checksum_tree (op1, &ctx, &ht);
12716 md5_finish_ctx (&ctx, checksum_before_op1);
12717 ht.empty ();
12718 #endif
12720 tem = fold_binary_loc (loc, code, type, op0, op1);
12721 if (!tem)
12722 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12724 #ifdef ENABLE_FOLD_CHECKING
12725 md5_init_ctx (&ctx);
12726 fold_checksum_tree (op0, &ctx, &ht);
12727 md5_finish_ctx (&ctx, checksum_after_op0);
12728 ht.empty ();
12730 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12731 fold_check_failed (op0, tem);
12733 md5_init_ctx (&ctx);
12734 fold_checksum_tree (op1, &ctx, &ht);
12735 md5_finish_ctx (&ctx, checksum_after_op1);
12737 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12738 fold_check_failed (op1, tem);
12739 #endif
12740 return tem;
12743 /* Fold a ternary tree expression with code CODE of type TYPE with
12744 operands OP0, OP1, and OP2. Return a folded expression if
12745 successful. Otherwise, return a tree expression with code CODE of
12746 type TYPE with operands OP0, OP1, and OP2. */
12748 tree
12749 fold_build3_loc (location_t loc, enum tree_code code, tree type,
12750 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12752 tree tem;
12753 #ifdef ENABLE_FOLD_CHECKING
12754 unsigned char checksum_before_op0[16],
12755 checksum_before_op1[16],
12756 checksum_before_op2[16],
12757 checksum_after_op0[16],
12758 checksum_after_op1[16],
12759 checksum_after_op2[16];
12760 struct md5_ctx ctx;
12761 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12763 md5_init_ctx (&ctx);
12764 fold_checksum_tree (op0, &ctx, &ht);
12765 md5_finish_ctx (&ctx, checksum_before_op0);
12766 ht.empty ();
12768 md5_init_ctx (&ctx);
12769 fold_checksum_tree (op1, &ctx, &ht);
12770 md5_finish_ctx (&ctx, checksum_before_op1);
12771 ht.empty ();
12773 md5_init_ctx (&ctx);
12774 fold_checksum_tree (op2, &ctx, &ht);
12775 md5_finish_ctx (&ctx, checksum_before_op2);
12776 ht.empty ();
12777 #endif
12779 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12780 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12781 if (!tem)
12782 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12784 #ifdef ENABLE_FOLD_CHECKING
12785 md5_init_ctx (&ctx);
12786 fold_checksum_tree (op0, &ctx, &ht);
12787 md5_finish_ctx (&ctx, checksum_after_op0);
12788 ht.empty ();
12790 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12791 fold_check_failed (op0, tem);
12793 md5_init_ctx (&ctx);
12794 fold_checksum_tree (op1, &ctx, &ht);
12795 md5_finish_ctx (&ctx, checksum_after_op1);
12796 ht.empty ();
12798 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12799 fold_check_failed (op1, tem);
12801 md5_init_ctx (&ctx);
12802 fold_checksum_tree (op2, &ctx, &ht);
12803 md5_finish_ctx (&ctx, checksum_after_op2);
12805 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12806 fold_check_failed (op2, tem);
12807 #endif
12808 return tem;
12811 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12812 arguments in ARGARRAY, and a null static chain.
12813 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12814 of type TYPE from the given operands as constructed by build_call_array. */
12816 tree
12817 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12818 int nargs, tree *argarray)
12820 tree tem;
12821 #ifdef ENABLE_FOLD_CHECKING
12822 unsigned char checksum_before_fn[16],
12823 checksum_before_arglist[16],
12824 checksum_after_fn[16],
12825 checksum_after_arglist[16];
12826 struct md5_ctx ctx;
12827 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12828 int i;
12830 md5_init_ctx (&ctx);
12831 fold_checksum_tree (fn, &ctx, &ht);
12832 md5_finish_ctx (&ctx, checksum_before_fn);
12833 ht.empty ();
12835 md5_init_ctx (&ctx);
12836 for (i = 0; i < nargs; i++)
12837 fold_checksum_tree (argarray[i], &ctx, &ht);
12838 md5_finish_ctx (&ctx, checksum_before_arglist);
12839 ht.empty ();
12840 #endif
12842 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12843 if (!tem)
12844 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12846 #ifdef ENABLE_FOLD_CHECKING
12847 md5_init_ctx (&ctx);
12848 fold_checksum_tree (fn, &ctx, &ht);
12849 md5_finish_ctx (&ctx, checksum_after_fn);
12850 ht.empty ();
12852 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12853 fold_check_failed (fn, tem);
12855 md5_init_ctx (&ctx);
12856 for (i = 0; i < nargs; i++)
12857 fold_checksum_tree (argarray[i], &ctx, &ht);
12858 md5_finish_ctx (&ctx, checksum_after_arglist);
12860 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12861 fold_check_failed (NULL_TREE, tem);
12862 #endif
12863 return tem;
12866 /* Perform constant folding and related simplification of initializer
12867 expression EXPR. These behave identically to "fold_buildN" but ignore
12868 potential run-time traps and exceptions that fold must preserve. */
12870 #define START_FOLD_INIT \
12871 int saved_signaling_nans = flag_signaling_nans;\
12872 int saved_trapping_math = flag_trapping_math;\
12873 int saved_rounding_math = flag_rounding_math;\
12874 int saved_trapv = flag_trapv;\
12875 int saved_folding_initializer = folding_initializer;\
12876 flag_signaling_nans = 0;\
12877 flag_trapping_math = 0;\
12878 flag_rounding_math = 0;\
12879 flag_trapv = 0;\
12880 folding_initializer = 1;
12882 #define END_FOLD_INIT \
12883 flag_signaling_nans = saved_signaling_nans;\
12884 flag_trapping_math = saved_trapping_math;\
12885 flag_rounding_math = saved_rounding_math;\
12886 flag_trapv = saved_trapv;\
12887 folding_initializer = saved_folding_initializer;
12889 tree
12890 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12891 tree type, tree op)
12893 tree result;
12894 START_FOLD_INIT;
12896 result = fold_build1_loc (loc, code, type, op);
12898 END_FOLD_INIT;
12899 return result;
12902 tree
12903 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12904 tree type, tree op0, tree op1)
12906 tree result;
12907 START_FOLD_INIT;
12909 result = fold_build2_loc (loc, code, type, op0, op1);
12911 END_FOLD_INIT;
12912 return result;
12915 tree
12916 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12917 int nargs, tree *argarray)
12919 tree result;
12920 START_FOLD_INIT;
12922 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12924 END_FOLD_INIT;
12925 return result;
12928 #undef START_FOLD_INIT
12929 #undef END_FOLD_INIT
12931 /* Determine if first argument is a multiple of second argument. Return 0 if
12932 it is not, or we cannot easily determined it to be.
12934 An example of the sort of thing we care about (at this point; this routine
12935 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12936 fold cases do now) is discovering that
12938 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12940 is a multiple of
12942 SAVE_EXPR (J * 8)
12944 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12946 This code also handles discovering that
12948 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12950 is a multiple of 8 so we don't have to worry about dealing with a
12951 possible remainder.
12953 Note that we *look* inside a SAVE_EXPR only to determine how it was
12954 calculated; it is not safe for fold to do much of anything else with the
12955 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12956 at run time. For example, the latter example above *cannot* be implemented
12957 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12958 evaluation time of the original SAVE_EXPR is not necessarily the same at
12959 the time the new expression is evaluated. The only optimization of this
12960 sort that would be valid is changing
12962 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12964 divided by 8 to
12966 SAVE_EXPR (I) * SAVE_EXPR (J)
12968 (where the same SAVE_EXPR (J) is used in the original and the
12969 transformed version). */
12972 multiple_of_p (tree type, const_tree top, const_tree bottom)
12974 gimple *stmt;
12975 tree t1, op1, op2;
12977 if (operand_equal_p (top, bottom, 0))
12978 return 1;
12980 if (TREE_CODE (type) != INTEGER_TYPE)
12981 return 0;
12983 switch (TREE_CODE (top))
12985 case BIT_AND_EXPR:
12986 /* Bitwise and provides a power of two multiple. If the mask is
12987 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12988 if (!integer_pow2p (bottom))
12989 return 0;
12990 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12991 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12993 case MULT_EXPR:
12994 if (TREE_CODE (bottom) == INTEGER_CST)
12996 op1 = TREE_OPERAND (top, 0);
12997 op2 = TREE_OPERAND (top, 1);
12998 if (TREE_CODE (op1) == INTEGER_CST)
12999 std::swap (op1, op2);
13000 if (TREE_CODE (op2) == INTEGER_CST)
13002 if (multiple_of_p (type, op2, bottom))
13003 return 1;
13004 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
13005 if (multiple_of_p (type, bottom, op2))
13007 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
13008 wi::to_widest (op2));
13009 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
13011 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
13012 return multiple_of_p (type, op1, op2);
13015 return multiple_of_p (type, op1, bottom);
13018 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13019 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13021 case MINUS_EXPR:
13022 /* It is impossible to prove if op0 - op1 is multiple of bottom
13023 precisely, so be conservative here checking if both op0 and op1
13024 are multiple of bottom. Note we check the second operand first
13025 since it's usually simpler. */
13026 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13027 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13029 case PLUS_EXPR:
13030 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
13031 as op0 - 3 if the expression has unsigned type. For example,
13032 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
13033 op1 = TREE_OPERAND (top, 1);
13034 if (TYPE_UNSIGNED (type)
13035 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
13036 op1 = fold_build1 (NEGATE_EXPR, type, op1);
13037 return (multiple_of_p (type, op1, bottom)
13038 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13040 case LSHIFT_EXPR:
13041 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13043 op1 = TREE_OPERAND (top, 1);
13044 /* const_binop may not detect overflow correctly,
13045 so check for it explicitly here. */
13046 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
13047 wi::to_wide (op1))
13048 && (t1 = fold_convert (type,
13049 const_binop (LSHIFT_EXPR, size_one_node,
13050 op1))) != 0
13051 && !TREE_OVERFLOW (t1))
13052 return multiple_of_p (type, t1, bottom);
13054 return 0;
13056 case NOP_EXPR:
13057 /* Can't handle conversions from non-integral or wider integral type. */
13058 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13059 || (TYPE_PRECISION (type)
13060 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13061 return 0;
13063 /* fall through */
13065 case SAVE_EXPR:
13066 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13068 case COND_EXPR:
13069 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13070 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
13072 case INTEGER_CST:
13073 if (TREE_CODE (bottom) != INTEGER_CST
13074 || integer_zerop (bottom)
13075 || (TYPE_UNSIGNED (type)
13076 && (tree_int_cst_sgn (top) < 0
13077 || tree_int_cst_sgn (bottom) < 0)))
13078 return 0;
13079 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
13080 SIGNED);
13082 case SSA_NAME:
13083 if (TREE_CODE (bottom) == INTEGER_CST
13084 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
13085 && gimple_code (stmt) == GIMPLE_ASSIGN)
13087 enum tree_code code = gimple_assign_rhs_code (stmt);
13089 /* Check for special cases to see if top is defined as multiple
13090 of bottom:
13092 top = (X & ~(bottom - 1) ; bottom is power of 2
13096 Y = X % bottom
13097 top = X - Y. */
13098 if (code == BIT_AND_EXPR
13099 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
13100 && TREE_CODE (op2) == INTEGER_CST
13101 && integer_pow2p (bottom)
13102 && wi::multiple_of_p (wi::to_widest (op2),
13103 wi::to_widest (bottom), UNSIGNED))
13104 return 1;
13106 op1 = gimple_assign_rhs1 (stmt);
13107 if (code == MINUS_EXPR
13108 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
13109 && TREE_CODE (op2) == SSA_NAME
13110 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
13111 && gimple_code (stmt) == GIMPLE_ASSIGN
13112 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
13113 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
13114 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
13115 return 1;
13118 /* fall through */
13120 default:
13121 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
13122 return multiple_p (wi::to_poly_widest (top),
13123 wi::to_poly_widest (bottom));
13125 return 0;
13129 #define tree_expr_nonnegative_warnv_p(X, Y) \
13130 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13132 #define RECURSE(X) \
13133 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
13135 /* Return true if CODE or TYPE is known to be non-negative. */
13137 static bool
13138 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13140 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13141 && truth_value_p (code))
13142 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13143 have a signed:1 type (where the value is -1 and 0). */
13144 return true;
13145 return false;
13148 /* Return true if (CODE OP0) is known to be non-negative. If the return
13149 value is based on the assumption that signed overflow is undefined,
13150 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13151 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13153 bool
13154 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13155 bool *strict_overflow_p, int depth)
13157 if (TYPE_UNSIGNED (type))
13158 return true;
13160 switch (code)
13162 case ABS_EXPR:
13163 /* We can't return 1 if flag_wrapv is set because
13164 ABS_EXPR<INT_MIN> = INT_MIN. */
13165 if (!ANY_INTEGRAL_TYPE_P (type))
13166 return true;
13167 if (TYPE_OVERFLOW_UNDEFINED (type))
13169 *strict_overflow_p = true;
13170 return true;
13172 break;
13174 case NON_LVALUE_EXPR:
13175 case FLOAT_EXPR:
13176 case FIX_TRUNC_EXPR:
13177 return RECURSE (op0);
13179 CASE_CONVERT:
13181 tree inner_type = TREE_TYPE (op0);
13182 tree outer_type = type;
13184 if (TREE_CODE (outer_type) == REAL_TYPE)
13186 if (TREE_CODE (inner_type) == REAL_TYPE)
13187 return RECURSE (op0);
13188 if (INTEGRAL_TYPE_P (inner_type))
13190 if (TYPE_UNSIGNED (inner_type))
13191 return true;
13192 return RECURSE (op0);
13195 else if (INTEGRAL_TYPE_P (outer_type))
13197 if (TREE_CODE (inner_type) == REAL_TYPE)
13198 return RECURSE (op0);
13199 if (INTEGRAL_TYPE_P (inner_type))
13200 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13201 && TYPE_UNSIGNED (inner_type);
13204 break;
13206 default:
13207 return tree_simple_nonnegative_warnv_p (code, type);
13210 /* We don't know sign of `t', so be conservative and return false. */
13211 return false;
13214 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13215 value is based on the assumption that signed overflow is undefined,
13216 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13217 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13219 bool
13220 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13221 tree op1, bool *strict_overflow_p,
13222 int depth)
13224 if (TYPE_UNSIGNED (type))
13225 return true;
13227 switch (code)
13229 case POINTER_PLUS_EXPR:
13230 case PLUS_EXPR:
13231 if (FLOAT_TYPE_P (type))
13232 return RECURSE (op0) && RECURSE (op1);
13234 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13235 both unsigned and at least 2 bits shorter than the result. */
13236 if (TREE_CODE (type) == INTEGER_TYPE
13237 && TREE_CODE (op0) == NOP_EXPR
13238 && TREE_CODE (op1) == NOP_EXPR)
13240 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13241 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13242 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13243 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13245 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13246 TYPE_PRECISION (inner2)) + 1;
13247 return prec < TYPE_PRECISION (type);
13250 break;
13252 case MULT_EXPR:
13253 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
13255 /* x * x is always non-negative for floating point x
13256 or without overflow. */
13257 if (operand_equal_p (op0, op1, 0)
13258 || (RECURSE (op0) && RECURSE (op1)))
13260 if (ANY_INTEGRAL_TYPE_P (type)
13261 && TYPE_OVERFLOW_UNDEFINED (type))
13262 *strict_overflow_p = true;
13263 return true;
13267 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13268 both unsigned and their total bits is shorter than the result. */
13269 if (TREE_CODE (type) == INTEGER_TYPE
13270 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
13271 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
13273 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
13274 ? TREE_TYPE (TREE_OPERAND (op0, 0))
13275 : TREE_TYPE (op0);
13276 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
13277 ? TREE_TYPE (TREE_OPERAND (op1, 0))
13278 : TREE_TYPE (op1);
13280 bool unsigned0 = TYPE_UNSIGNED (inner0);
13281 bool unsigned1 = TYPE_UNSIGNED (inner1);
13283 if (TREE_CODE (op0) == INTEGER_CST)
13284 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
13286 if (TREE_CODE (op1) == INTEGER_CST)
13287 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
13289 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
13290 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
13292 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
13293 ? tree_int_cst_min_precision (op0, UNSIGNED)
13294 : TYPE_PRECISION (inner0);
13296 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
13297 ? tree_int_cst_min_precision (op1, UNSIGNED)
13298 : TYPE_PRECISION (inner1);
13300 return precision0 + precision1 < TYPE_PRECISION (type);
13303 return false;
13305 case BIT_AND_EXPR:
13306 case MAX_EXPR:
13307 return RECURSE (op0) || RECURSE (op1);
13309 case BIT_IOR_EXPR:
13310 case BIT_XOR_EXPR:
13311 case MIN_EXPR:
13312 case RDIV_EXPR:
13313 case TRUNC_DIV_EXPR:
13314 case CEIL_DIV_EXPR:
13315 case FLOOR_DIV_EXPR:
13316 case ROUND_DIV_EXPR:
13317 return RECURSE (op0) && RECURSE (op1);
13319 case TRUNC_MOD_EXPR:
13320 return RECURSE (op0);
13322 case FLOOR_MOD_EXPR:
13323 return RECURSE (op1);
13325 case CEIL_MOD_EXPR:
13326 case ROUND_MOD_EXPR:
13327 default:
13328 return tree_simple_nonnegative_warnv_p (code, type);
13331 /* We don't know sign of `t', so be conservative and return false. */
13332 return false;
13335 /* Return true if T is known to be non-negative. If the return
13336 value is based on the assumption that signed overflow is undefined,
13337 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13338 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13340 bool
13341 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13343 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13344 return true;
13346 switch (TREE_CODE (t))
13348 case INTEGER_CST:
13349 return tree_int_cst_sgn (t) >= 0;
13351 case REAL_CST:
13352 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13354 case FIXED_CST:
13355 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13357 case COND_EXPR:
13358 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13360 case SSA_NAME:
13361 /* Limit the depth of recursion to avoid quadratic behavior.
13362 This is expected to catch almost all occurrences in practice.
13363 If this code misses important cases that unbounded recursion
13364 would not, passes that need this information could be revised
13365 to provide it through dataflow propagation. */
13366 return (!name_registered_for_update_p (t)
13367 && depth < param_max_ssa_name_query_depth
13368 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
13369 strict_overflow_p, depth));
13371 default:
13372 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13376 /* Return true if T is known to be non-negative. If the return
13377 value is based on the assumption that signed overflow is undefined,
13378 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13379 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13381 bool
13382 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13383 bool *strict_overflow_p, int depth)
13385 switch (fn)
13387 CASE_CFN_ACOS:
13388 CASE_CFN_ACOSH:
13389 CASE_CFN_CABS:
13390 CASE_CFN_COSH:
13391 CASE_CFN_ERFC:
13392 CASE_CFN_EXP:
13393 CASE_CFN_EXP10:
13394 CASE_CFN_EXP2:
13395 CASE_CFN_FABS:
13396 CASE_CFN_FDIM:
13397 CASE_CFN_HYPOT:
13398 CASE_CFN_POW10:
13399 CASE_CFN_FFS:
13400 CASE_CFN_PARITY:
13401 CASE_CFN_POPCOUNT:
13402 CASE_CFN_CLZ:
13403 CASE_CFN_CLRSB:
13404 case CFN_BUILT_IN_BSWAP32:
13405 case CFN_BUILT_IN_BSWAP64:
13406 /* Always true. */
13407 return true;
13409 CASE_CFN_SQRT:
13410 CASE_CFN_SQRT_FN:
13411 /* sqrt(-0.0) is -0.0. */
13412 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13413 return true;
13414 return RECURSE (arg0);
13416 CASE_CFN_ASINH:
13417 CASE_CFN_ATAN:
13418 CASE_CFN_ATANH:
13419 CASE_CFN_CBRT:
13420 CASE_CFN_CEIL:
13421 CASE_CFN_CEIL_FN:
13422 CASE_CFN_ERF:
13423 CASE_CFN_EXPM1:
13424 CASE_CFN_FLOOR:
13425 CASE_CFN_FLOOR_FN:
13426 CASE_CFN_FMOD:
13427 CASE_CFN_FREXP:
13428 CASE_CFN_ICEIL:
13429 CASE_CFN_IFLOOR:
13430 CASE_CFN_IRINT:
13431 CASE_CFN_IROUND:
13432 CASE_CFN_LCEIL:
13433 CASE_CFN_LDEXP:
13434 CASE_CFN_LFLOOR:
13435 CASE_CFN_LLCEIL:
13436 CASE_CFN_LLFLOOR:
13437 CASE_CFN_LLRINT:
13438 CASE_CFN_LLROUND:
13439 CASE_CFN_LRINT:
13440 CASE_CFN_LROUND:
13441 CASE_CFN_MODF:
13442 CASE_CFN_NEARBYINT:
13443 CASE_CFN_NEARBYINT_FN:
13444 CASE_CFN_RINT:
13445 CASE_CFN_RINT_FN:
13446 CASE_CFN_ROUND:
13447 CASE_CFN_ROUND_FN:
13448 CASE_CFN_ROUNDEVEN:
13449 CASE_CFN_ROUNDEVEN_FN:
13450 CASE_CFN_SCALB:
13451 CASE_CFN_SCALBLN:
13452 CASE_CFN_SCALBN:
13453 CASE_CFN_SIGNBIT:
13454 CASE_CFN_SIGNIFICAND:
13455 CASE_CFN_SINH:
13456 CASE_CFN_TANH:
13457 CASE_CFN_TRUNC:
13458 CASE_CFN_TRUNC_FN:
13459 /* True if the 1st argument is nonnegative. */
13460 return RECURSE (arg0);
13462 CASE_CFN_FMAX:
13463 CASE_CFN_FMAX_FN:
13464 /* True if the 1st OR 2nd arguments are nonnegative. */
13465 return RECURSE (arg0) || RECURSE (arg1);
13467 CASE_CFN_FMIN:
13468 CASE_CFN_FMIN_FN:
13469 /* True if the 1st AND 2nd arguments are nonnegative. */
13470 return RECURSE (arg0) && RECURSE (arg1);
13472 CASE_CFN_COPYSIGN:
13473 CASE_CFN_COPYSIGN_FN:
13474 /* True if the 2nd argument is nonnegative. */
13475 return RECURSE (arg1);
13477 CASE_CFN_POWI:
13478 /* True if the 1st argument is nonnegative or the second
13479 argument is an even integer. */
13480 if (TREE_CODE (arg1) == INTEGER_CST
13481 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13482 return true;
13483 return RECURSE (arg0);
13485 CASE_CFN_POW:
13486 /* True if the 1st argument is nonnegative or the second
13487 argument is an even integer valued real. */
13488 if (TREE_CODE (arg1) == REAL_CST)
13490 REAL_VALUE_TYPE c;
13491 HOST_WIDE_INT n;
13493 c = TREE_REAL_CST (arg1);
13494 n = real_to_integer (&c);
13495 if ((n & 1) == 0)
13497 REAL_VALUE_TYPE cint;
13498 real_from_integer (&cint, VOIDmode, n, SIGNED);
13499 if (real_identical (&c, &cint))
13500 return true;
13503 return RECURSE (arg0);
13505 default:
13506 break;
13508 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13511 /* Return true if T is known to be non-negative. If the return
13512 value is based on the assumption that signed overflow is undefined,
13513 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13514 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13516 static bool
13517 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13519 enum tree_code code = TREE_CODE (t);
13520 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13521 return true;
13523 switch (code)
13525 case TARGET_EXPR:
13527 tree temp = TARGET_EXPR_SLOT (t);
13528 t = TARGET_EXPR_INITIAL (t);
13530 /* If the initializer is non-void, then it's a normal expression
13531 that will be assigned to the slot. */
13532 if (!VOID_TYPE_P (t))
13533 return RECURSE (t);
13535 /* Otherwise, the initializer sets the slot in some way. One common
13536 way is an assignment statement at the end of the initializer. */
13537 while (1)
13539 if (TREE_CODE (t) == BIND_EXPR)
13540 t = expr_last (BIND_EXPR_BODY (t));
13541 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13542 || TREE_CODE (t) == TRY_CATCH_EXPR)
13543 t = expr_last (TREE_OPERAND (t, 0));
13544 else if (TREE_CODE (t) == STATEMENT_LIST)
13545 t = expr_last (t);
13546 else
13547 break;
13549 if (TREE_CODE (t) == MODIFY_EXPR
13550 && TREE_OPERAND (t, 0) == temp)
13551 return RECURSE (TREE_OPERAND (t, 1));
13553 return false;
13556 case CALL_EXPR:
13558 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13559 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13561 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13562 get_call_combined_fn (t),
13563 arg0,
13564 arg1,
13565 strict_overflow_p, depth);
13567 case COMPOUND_EXPR:
13568 case MODIFY_EXPR:
13569 return RECURSE (TREE_OPERAND (t, 1));
13571 case BIND_EXPR:
13572 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13574 case SAVE_EXPR:
13575 return RECURSE (TREE_OPERAND (t, 0));
13577 default:
13578 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13582 #undef RECURSE
13583 #undef tree_expr_nonnegative_warnv_p
13585 /* Return true if T is known to be non-negative. If the return
13586 value is based on the assumption that signed overflow is undefined,
13587 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13588 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13590 bool
13591 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13593 enum tree_code code;
13594 if (t == error_mark_node)
13595 return false;
13597 code = TREE_CODE (t);
13598 switch (TREE_CODE_CLASS (code))
13600 case tcc_binary:
13601 case tcc_comparison:
13602 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13603 TREE_TYPE (t),
13604 TREE_OPERAND (t, 0),
13605 TREE_OPERAND (t, 1),
13606 strict_overflow_p, depth);
13608 case tcc_unary:
13609 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13610 TREE_TYPE (t),
13611 TREE_OPERAND (t, 0),
13612 strict_overflow_p, depth);
13614 case tcc_constant:
13615 case tcc_declaration:
13616 case tcc_reference:
13617 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13619 default:
13620 break;
13623 switch (code)
13625 case TRUTH_AND_EXPR:
13626 case TRUTH_OR_EXPR:
13627 case TRUTH_XOR_EXPR:
13628 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13629 TREE_TYPE (t),
13630 TREE_OPERAND (t, 0),
13631 TREE_OPERAND (t, 1),
13632 strict_overflow_p, depth);
13633 case TRUTH_NOT_EXPR:
13634 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13635 TREE_TYPE (t),
13636 TREE_OPERAND (t, 0),
13637 strict_overflow_p, depth);
13639 case COND_EXPR:
13640 case CONSTRUCTOR:
13641 case OBJ_TYPE_REF:
13642 case ASSERT_EXPR:
13643 case ADDR_EXPR:
13644 case WITH_SIZE_EXPR:
13645 case SSA_NAME:
13646 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13648 default:
13649 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13653 /* Return true if `t' is known to be non-negative. Handle warnings
13654 about undefined signed overflow. */
13656 bool
13657 tree_expr_nonnegative_p (tree t)
13659 bool ret, strict_overflow_p;
13661 strict_overflow_p = false;
13662 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13663 if (strict_overflow_p)
13664 fold_overflow_warning (("assuming signed overflow does not occur when "
13665 "determining that expression is always "
13666 "non-negative"),
13667 WARN_STRICT_OVERFLOW_MISC);
13668 return ret;
13672 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13673 For floating point we further ensure that T is not denormal.
13674 Similar logic is present in nonzero_address in rtlanal.h.
13676 If the return value is based on the assumption that signed overflow
13677 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13678 change *STRICT_OVERFLOW_P. */
13680 bool
13681 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13682 bool *strict_overflow_p)
13684 switch (code)
13686 case ABS_EXPR:
13687 return tree_expr_nonzero_warnv_p (op0,
13688 strict_overflow_p);
13690 case NOP_EXPR:
13692 tree inner_type = TREE_TYPE (op0);
13693 tree outer_type = type;
13695 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13696 && tree_expr_nonzero_warnv_p (op0,
13697 strict_overflow_p));
13699 break;
13701 case NON_LVALUE_EXPR:
13702 return tree_expr_nonzero_warnv_p (op0,
13703 strict_overflow_p);
13705 default:
13706 break;
13709 return false;
13712 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13713 For floating point we further ensure that T is not denormal.
13714 Similar logic is present in nonzero_address in rtlanal.h.
13716 If the return value is based on the assumption that signed overflow
13717 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13718 change *STRICT_OVERFLOW_P. */
13720 bool
13721 tree_binary_nonzero_warnv_p (enum tree_code code,
13722 tree type,
13723 tree op0,
13724 tree op1, bool *strict_overflow_p)
13726 bool sub_strict_overflow_p;
13727 switch (code)
13729 case POINTER_PLUS_EXPR:
13730 case PLUS_EXPR:
13731 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13733 /* With the presence of negative values it is hard
13734 to say something. */
13735 sub_strict_overflow_p = false;
13736 if (!tree_expr_nonnegative_warnv_p (op0,
13737 &sub_strict_overflow_p)
13738 || !tree_expr_nonnegative_warnv_p (op1,
13739 &sub_strict_overflow_p))
13740 return false;
13741 /* One of operands must be positive and the other non-negative. */
13742 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13743 overflows, on a twos-complement machine the sum of two
13744 nonnegative numbers can never be zero. */
13745 return (tree_expr_nonzero_warnv_p (op0,
13746 strict_overflow_p)
13747 || tree_expr_nonzero_warnv_p (op1,
13748 strict_overflow_p));
13750 break;
13752 case MULT_EXPR:
13753 if (TYPE_OVERFLOW_UNDEFINED (type))
13755 if (tree_expr_nonzero_warnv_p (op0,
13756 strict_overflow_p)
13757 && tree_expr_nonzero_warnv_p (op1,
13758 strict_overflow_p))
13760 *strict_overflow_p = true;
13761 return true;
13764 break;
13766 case MIN_EXPR:
13767 sub_strict_overflow_p = false;
13768 if (tree_expr_nonzero_warnv_p (op0,
13769 &sub_strict_overflow_p)
13770 && tree_expr_nonzero_warnv_p (op1,
13771 &sub_strict_overflow_p))
13773 if (sub_strict_overflow_p)
13774 *strict_overflow_p = true;
13776 break;
13778 case MAX_EXPR:
13779 sub_strict_overflow_p = false;
13780 if (tree_expr_nonzero_warnv_p (op0,
13781 &sub_strict_overflow_p))
13783 if (sub_strict_overflow_p)
13784 *strict_overflow_p = true;
13786 /* When both operands are nonzero, then MAX must be too. */
13787 if (tree_expr_nonzero_warnv_p (op1,
13788 strict_overflow_p))
13789 return true;
13791 /* MAX where operand 0 is positive is positive. */
13792 return tree_expr_nonnegative_warnv_p (op0,
13793 strict_overflow_p);
13795 /* MAX where operand 1 is positive is positive. */
13796 else if (tree_expr_nonzero_warnv_p (op1,
13797 &sub_strict_overflow_p)
13798 && tree_expr_nonnegative_warnv_p (op1,
13799 &sub_strict_overflow_p))
13801 if (sub_strict_overflow_p)
13802 *strict_overflow_p = true;
13803 return true;
13805 break;
13807 case BIT_IOR_EXPR:
13808 return (tree_expr_nonzero_warnv_p (op1,
13809 strict_overflow_p)
13810 || tree_expr_nonzero_warnv_p (op0,
13811 strict_overflow_p));
13813 default:
13814 break;
13817 return false;
13820 /* Return true when T is an address and is known to be nonzero.
13821 For floating point we further ensure that T is not denormal.
13822 Similar logic is present in nonzero_address in rtlanal.h.
13824 If the return value is based on the assumption that signed overflow
13825 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13826 change *STRICT_OVERFLOW_P. */
13828 bool
13829 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13831 bool sub_strict_overflow_p;
13832 switch (TREE_CODE (t))
13834 case INTEGER_CST:
13835 return !integer_zerop (t);
13837 case ADDR_EXPR:
13839 tree base = TREE_OPERAND (t, 0);
13841 if (!DECL_P (base))
13842 base = get_base_address (base);
13844 if (base && TREE_CODE (base) == TARGET_EXPR)
13845 base = TARGET_EXPR_SLOT (base);
13847 if (!base)
13848 return false;
13850 /* For objects in symbol table check if we know they are non-zero.
13851 Don't do anything for variables and functions before symtab is built;
13852 it is quite possible that they will be declared weak later. */
13853 int nonzero_addr = maybe_nonzero_address (base);
13854 if (nonzero_addr >= 0)
13855 return nonzero_addr;
13857 /* Constants are never weak. */
13858 if (CONSTANT_CLASS_P (base))
13859 return true;
13861 return false;
13864 case COND_EXPR:
13865 sub_strict_overflow_p = false;
13866 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13867 &sub_strict_overflow_p)
13868 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13869 &sub_strict_overflow_p))
13871 if (sub_strict_overflow_p)
13872 *strict_overflow_p = true;
13873 return true;
13875 break;
13877 case SSA_NAME:
13878 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13879 break;
13880 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
13882 default:
13883 break;
13885 return false;
13888 #define integer_valued_real_p(X) \
13889 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13891 #define RECURSE(X) \
13892 ((integer_valued_real_p) (X, depth + 1))
13894 /* Return true if the floating point result of (CODE OP0) has an
13895 integer value. We also allow +Inf, -Inf and NaN to be considered
13896 integer values. Return false for signaling NaN.
13898 DEPTH is the current nesting depth of the query. */
13900 bool
13901 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13903 switch (code)
13905 case FLOAT_EXPR:
13906 return true;
13908 case ABS_EXPR:
13909 return RECURSE (op0);
13911 CASE_CONVERT:
13913 tree type = TREE_TYPE (op0);
13914 if (TREE_CODE (type) == INTEGER_TYPE)
13915 return true;
13916 if (TREE_CODE (type) == REAL_TYPE)
13917 return RECURSE (op0);
13918 break;
13921 default:
13922 break;
13924 return false;
13927 /* Return true if the floating point result of (CODE OP0 OP1) has an
13928 integer value. We also allow +Inf, -Inf and NaN to be considered
13929 integer values. Return false for signaling NaN.
13931 DEPTH is the current nesting depth of the query. */
13933 bool
13934 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13936 switch (code)
13938 case PLUS_EXPR:
13939 case MINUS_EXPR:
13940 case MULT_EXPR:
13941 case MIN_EXPR:
13942 case MAX_EXPR:
13943 return RECURSE (op0) && RECURSE (op1);
13945 default:
13946 break;
13948 return false;
13951 /* Return true if the floating point result of calling FNDECL with arguments
13952 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13953 considered integer values. Return false for signaling NaN. If FNDECL
13954 takes fewer than 2 arguments, the remaining ARGn are null.
13956 DEPTH is the current nesting depth of the query. */
13958 bool
13959 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13961 switch (fn)
13963 CASE_CFN_CEIL:
13964 CASE_CFN_CEIL_FN:
13965 CASE_CFN_FLOOR:
13966 CASE_CFN_FLOOR_FN:
13967 CASE_CFN_NEARBYINT:
13968 CASE_CFN_NEARBYINT_FN:
13969 CASE_CFN_RINT:
13970 CASE_CFN_RINT_FN:
13971 CASE_CFN_ROUND:
13972 CASE_CFN_ROUND_FN:
13973 CASE_CFN_ROUNDEVEN:
13974 CASE_CFN_ROUNDEVEN_FN:
13975 CASE_CFN_TRUNC:
13976 CASE_CFN_TRUNC_FN:
13977 return true;
13979 CASE_CFN_FMIN:
13980 CASE_CFN_FMIN_FN:
13981 CASE_CFN_FMAX:
13982 CASE_CFN_FMAX_FN:
13983 return RECURSE (arg0) && RECURSE (arg1);
13985 default:
13986 break;
13988 return false;
13991 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13992 has an integer value. We also allow +Inf, -Inf and NaN to be
13993 considered integer values. Return false for signaling NaN.
13995 DEPTH is the current nesting depth of the query. */
13997 bool
13998 integer_valued_real_single_p (tree t, int depth)
14000 switch (TREE_CODE (t))
14002 case REAL_CST:
14003 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
14005 case COND_EXPR:
14006 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14008 case SSA_NAME:
14009 /* Limit the depth of recursion to avoid quadratic behavior.
14010 This is expected to catch almost all occurrences in practice.
14011 If this code misses important cases that unbounded recursion
14012 would not, passes that need this information could be revised
14013 to provide it through dataflow propagation. */
14014 return (!name_registered_for_update_p (t)
14015 && depth < param_max_ssa_name_query_depth
14016 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
14017 depth));
14019 default:
14020 break;
14022 return false;
14025 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
14026 has an integer value. We also allow +Inf, -Inf and NaN to be
14027 considered integer values. Return false for signaling NaN.
14029 DEPTH is the current nesting depth of the query. */
14031 static bool
14032 integer_valued_real_invalid_p (tree t, int depth)
14034 switch (TREE_CODE (t))
14036 case COMPOUND_EXPR:
14037 case MODIFY_EXPR:
14038 case BIND_EXPR:
14039 return RECURSE (TREE_OPERAND (t, 1));
14041 case SAVE_EXPR:
14042 return RECURSE (TREE_OPERAND (t, 0));
14044 default:
14045 break;
14047 return false;
14050 #undef RECURSE
14051 #undef integer_valued_real_p
14053 /* Return true if the floating point expression T has an integer value.
14054 We also allow +Inf, -Inf and NaN to be considered integer values.
14055 Return false for signaling NaN.
14057 DEPTH is the current nesting depth of the query. */
14059 bool
14060 integer_valued_real_p (tree t, int depth)
14062 if (t == error_mark_node)
14063 return false;
14065 STRIP_ANY_LOCATION_WRAPPER (t);
14067 tree_code code = TREE_CODE (t);
14068 switch (TREE_CODE_CLASS (code))
14070 case tcc_binary:
14071 case tcc_comparison:
14072 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
14073 TREE_OPERAND (t, 1), depth);
14075 case tcc_unary:
14076 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
14078 case tcc_constant:
14079 case tcc_declaration:
14080 case tcc_reference:
14081 return integer_valued_real_single_p (t, depth);
14083 default:
14084 break;
14087 switch (code)
14089 case COND_EXPR:
14090 case SSA_NAME:
14091 return integer_valued_real_single_p (t, depth);
14093 case CALL_EXPR:
14095 tree arg0 = (call_expr_nargs (t) > 0
14096 ? CALL_EXPR_ARG (t, 0)
14097 : NULL_TREE);
14098 tree arg1 = (call_expr_nargs (t) > 1
14099 ? CALL_EXPR_ARG (t, 1)
14100 : NULL_TREE);
14101 return integer_valued_real_call_p (get_call_combined_fn (t),
14102 arg0, arg1, depth);
14105 default:
14106 return integer_valued_real_invalid_p (t, depth);
14110 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14111 attempt to fold the expression to a constant without modifying TYPE,
14112 OP0 or OP1.
14114 If the expression could be simplified to a constant, then return
14115 the constant. If the expression would not be simplified to a
14116 constant, then return NULL_TREE. */
14118 tree
14119 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14121 tree tem = fold_binary (code, type, op0, op1);
14122 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14125 /* Given the components of a unary expression CODE, TYPE and OP0,
14126 attempt to fold the expression to a constant without modifying
14127 TYPE or OP0.
14129 If the expression could be simplified to a constant, then return
14130 the constant. If the expression would not be simplified to a
14131 constant, then return NULL_TREE. */
14133 tree
14134 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14136 tree tem = fold_unary (code, type, op0);
14137 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14140 /* If EXP represents referencing an element in a constant string
14141 (either via pointer arithmetic or array indexing), return the
14142 tree representing the value accessed, otherwise return NULL. */
14144 tree
14145 fold_read_from_constant_string (tree exp)
14147 if ((TREE_CODE (exp) == INDIRECT_REF
14148 || TREE_CODE (exp) == ARRAY_REF)
14149 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14151 tree exp1 = TREE_OPERAND (exp, 0);
14152 tree index;
14153 tree string;
14154 location_t loc = EXPR_LOCATION (exp);
14156 if (TREE_CODE (exp) == INDIRECT_REF)
14157 string = string_constant (exp1, &index, NULL, NULL);
14158 else
14160 tree low_bound = array_ref_low_bound (exp);
14161 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
14163 /* Optimize the special-case of a zero lower bound.
14165 We convert the low_bound to sizetype to avoid some problems
14166 with constant folding. (E.g. suppose the lower bound is 1,
14167 and its mode is QI. Without the conversion,l (ARRAY
14168 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14169 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14170 if (! integer_zerop (low_bound))
14171 index = size_diffop_loc (loc, index,
14172 fold_convert_loc (loc, sizetype, low_bound));
14174 string = exp1;
14177 scalar_int_mode char_mode;
14178 if (string
14179 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14180 && TREE_CODE (string) == STRING_CST
14181 && TREE_CODE (index) == INTEGER_CST
14182 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14183 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
14184 &char_mode)
14185 && GET_MODE_SIZE (char_mode) == 1)
14186 return build_int_cst_type (TREE_TYPE (exp),
14187 (TREE_STRING_POINTER (string)
14188 [TREE_INT_CST_LOW (index)]));
14190 return NULL;
14193 /* Folds a read from vector element at IDX of vector ARG. */
14195 tree
14196 fold_read_from_vector (tree arg, poly_uint64 idx)
14198 unsigned HOST_WIDE_INT i;
14199 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
14200 && known_ge (idx, 0u)
14201 && idx.is_constant (&i))
14203 if (TREE_CODE (arg) == VECTOR_CST)
14204 return VECTOR_CST_ELT (arg, i);
14205 else if (TREE_CODE (arg) == CONSTRUCTOR)
14207 if (i >= CONSTRUCTOR_NELTS (arg))
14208 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
14209 return CONSTRUCTOR_ELT (arg, i)->value;
14212 return NULL_TREE;
14215 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14216 an integer constant, real, or fixed-point constant.
14218 TYPE is the type of the result. */
14220 static tree
14221 fold_negate_const (tree arg0, tree type)
14223 tree t = NULL_TREE;
14225 switch (TREE_CODE (arg0))
14227 case REAL_CST:
14228 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14229 break;
14231 case FIXED_CST:
14233 FIXED_VALUE_TYPE f;
14234 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14235 &(TREE_FIXED_CST (arg0)), NULL,
14236 TYPE_SATURATING (type));
14237 t = build_fixed (type, f);
14238 /* Propagate overflow flags. */
14239 if (overflow_p | TREE_OVERFLOW (arg0))
14240 TREE_OVERFLOW (t) = 1;
14241 break;
14244 default:
14245 if (poly_int_tree_p (arg0))
14247 wi::overflow_type overflow;
14248 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
14249 t = force_fit_type (type, res, 1,
14250 (overflow && ! TYPE_UNSIGNED (type))
14251 || TREE_OVERFLOW (arg0));
14252 break;
14255 gcc_unreachable ();
14258 return t;
14261 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14262 an integer constant or real constant.
14264 TYPE is the type of the result. */
14266 tree
14267 fold_abs_const (tree arg0, tree type)
14269 tree t = NULL_TREE;
14271 switch (TREE_CODE (arg0))
14273 case INTEGER_CST:
14275 /* If the value is unsigned or non-negative, then the absolute value
14276 is the same as the ordinary value. */
14277 wide_int val = wi::to_wide (arg0);
14278 wi::overflow_type overflow = wi::OVF_NONE;
14279 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
14282 /* If the value is negative, then the absolute value is
14283 its negation. */
14284 else
14285 val = wi::neg (val, &overflow);
14287 /* Force to the destination type, set TREE_OVERFLOW for signed
14288 TYPE only. */
14289 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
14291 break;
14293 case REAL_CST:
14294 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14295 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14296 else
14297 t = arg0;
14298 break;
14300 default:
14301 gcc_unreachable ();
14304 return t;
14307 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14308 constant. TYPE is the type of the result. */
14310 static tree
14311 fold_not_const (const_tree arg0, tree type)
14313 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14315 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
14318 /* Given CODE, a relational operator, the target type, TYPE and two
14319 constant operands OP0 and OP1, return the result of the
14320 relational operation. If the result is not a compile time
14321 constant, then return NULL_TREE. */
14323 static tree
14324 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14326 int result, invert;
14328 /* From here on, the only cases we handle are when the result is
14329 known to be a constant. */
14331 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14333 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14334 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14336 /* Handle the cases where either operand is a NaN. */
14337 if (real_isnan (c0) || real_isnan (c1))
14339 switch (code)
14341 case EQ_EXPR:
14342 case ORDERED_EXPR:
14343 result = 0;
14344 break;
14346 case NE_EXPR:
14347 case UNORDERED_EXPR:
14348 case UNLT_EXPR:
14349 case UNLE_EXPR:
14350 case UNGT_EXPR:
14351 case UNGE_EXPR:
14352 case UNEQ_EXPR:
14353 result = 1;
14354 break;
14356 case LT_EXPR:
14357 case LE_EXPR:
14358 case GT_EXPR:
14359 case GE_EXPR:
14360 case LTGT_EXPR:
14361 if (flag_trapping_math)
14362 return NULL_TREE;
14363 result = 0;
14364 break;
14366 default:
14367 gcc_unreachable ();
14370 return constant_boolean_node (result, type);
14373 return constant_boolean_node (real_compare (code, c0, c1), type);
14376 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14378 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14379 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14380 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14383 /* Handle equality/inequality of complex constants. */
14384 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14386 tree rcond = fold_relational_const (code, type,
14387 TREE_REALPART (op0),
14388 TREE_REALPART (op1));
14389 tree icond = fold_relational_const (code, type,
14390 TREE_IMAGPART (op0),
14391 TREE_IMAGPART (op1));
14392 if (code == EQ_EXPR)
14393 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14394 else if (code == NE_EXPR)
14395 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14396 else
14397 return NULL_TREE;
14400 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14402 if (!VECTOR_TYPE_P (type))
14404 /* Have vector comparison with scalar boolean result. */
14405 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
14406 && known_eq (VECTOR_CST_NELTS (op0),
14407 VECTOR_CST_NELTS (op1)));
14408 unsigned HOST_WIDE_INT nunits;
14409 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
14410 return NULL_TREE;
14411 for (unsigned i = 0; i < nunits; i++)
14413 tree elem0 = VECTOR_CST_ELT (op0, i);
14414 tree elem1 = VECTOR_CST_ELT (op1, i);
14415 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
14416 if (tmp == NULL_TREE)
14417 return NULL_TREE;
14418 if (integer_zerop (tmp))
14419 return constant_boolean_node (code == NE_EXPR, type);
14421 return constant_boolean_node (code == EQ_EXPR, type);
14423 tree_vector_builder elts;
14424 if (!elts.new_binary_operation (type, op0, op1, false))
14425 return NULL_TREE;
14426 unsigned int count = elts.encoded_nelts ();
14427 for (unsigned i = 0; i < count; i++)
14429 tree elem_type = TREE_TYPE (type);
14430 tree elem0 = VECTOR_CST_ELT (op0, i);
14431 tree elem1 = VECTOR_CST_ELT (op1, i);
14433 tree tem = fold_relational_const (code, elem_type,
14434 elem0, elem1);
14436 if (tem == NULL_TREE)
14437 return NULL_TREE;
14439 elts.quick_push (build_int_cst (elem_type,
14440 integer_zerop (tem) ? 0 : -1));
14443 return elts.build ();
14446 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14448 To compute GT, swap the arguments and do LT.
14449 To compute GE, do LT and invert the result.
14450 To compute LE, swap the arguments, do LT and invert the result.
14451 To compute NE, do EQ and invert the result.
14453 Therefore, the code below must handle only EQ and LT. */
14455 if (code == LE_EXPR || code == GT_EXPR)
14457 std::swap (op0, op1);
14458 code = swap_tree_comparison (code);
14461 /* Note that it is safe to invert for real values here because we
14462 have already handled the one case that it matters. */
14464 invert = 0;
14465 if (code == NE_EXPR || code == GE_EXPR)
14467 invert = 1;
14468 code = invert_tree_comparison (code, false);
14471 /* Compute a result for LT or EQ if args permit;
14472 Otherwise return T. */
14473 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14475 if (code == EQ_EXPR)
14476 result = tree_int_cst_equal (op0, op1);
14477 else
14478 result = tree_int_cst_lt (op0, op1);
14480 else
14481 return NULL_TREE;
14483 if (invert)
14484 result ^= 1;
14485 return constant_boolean_node (result, type);
14488 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14489 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14490 itself. */
14492 tree
14493 fold_build_cleanup_point_expr (tree type, tree expr)
14495 /* If the expression does not have side effects then we don't have to wrap
14496 it with a cleanup point expression. */
14497 if (!TREE_SIDE_EFFECTS (expr))
14498 return expr;
14500 /* If the expression is a return, check to see if the expression inside the
14501 return has no side effects or the right hand side of the modify expression
14502 inside the return. If either don't have side effects set we don't need to
14503 wrap the expression in a cleanup point expression. Note we don't check the
14504 left hand side of the modify because it should always be a return decl. */
14505 if (TREE_CODE (expr) == RETURN_EXPR)
14507 tree op = TREE_OPERAND (expr, 0);
14508 if (!op || !TREE_SIDE_EFFECTS (op))
14509 return expr;
14510 op = TREE_OPERAND (op, 1);
14511 if (!TREE_SIDE_EFFECTS (op))
14512 return expr;
14515 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
14518 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14519 of an indirection through OP0, or NULL_TREE if no simplification is
14520 possible. */
14522 tree
14523 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14525 tree sub = op0;
14526 tree subtype;
14527 poly_uint64 const_op01;
14529 STRIP_NOPS (sub);
14530 subtype = TREE_TYPE (sub);
14531 if (!POINTER_TYPE_P (subtype)
14532 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
14533 return NULL_TREE;
14535 if (TREE_CODE (sub) == ADDR_EXPR)
14537 tree op = TREE_OPERAND (sub, 0);
14538 tree optype = TREE_TYPE (op);
14540 /* *&CONST_DECL -> to the value of the const decl. */
14541 if (TREE_CODE (op) == CONST_DECL)
14542 return DECL_INITIAL (op);
14543 /* *&p => p; make sure to handle *&"str"[cst] here. */
14544 if (type == optype)
14546 tree fop = fold_read_from_constant_string (op);
14547 if (fop)
14548 return fop;
14549 else
14550 return op;
14552 /* *(foo *)&fooarray => fooarray[0] */
14553 else if (TREE_CODE (optype) == ARRAY_TYPE
14554 && type == TREE_TYPE (optype)
14555 && (!in_gimple_form
14556 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14558 tree type_domain = TYPE_DOMAIN (optype);
14559 tree min_val = size_zero_node;
14560 if (type_domain && TYPE_MIN_VALUE (type_domain))
14561 min_val = TYPE_MIN_VALUE (type_domain);
14562 if (in_gimple_form
14563 && TREE_CODE (min_val) != INTEGER_CST)
14564 return NULL_TREE;
14565 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14566 NULL_TREE, NULL_TREE);
14568 /* *(foo *)&complexfoo => __real__ complexfoo */
14569 else if (TREE_CODE (optype) == COMPLEX_TYPE
14570 && type == TREE_TYPE (optype))
14571 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14572 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14573 else if (VECTOR_TYPE_P (optype)
14574 && type == TREE_TYPE (optype))
14576 tree part_width = TYPE_SIZE (type);
14577 tree index = bitsize_int (0);
14578 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
14579 index);
14583 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14584 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
14586 tree op00 = TREE_OPERAND (sub, 0);
14587 tree op01 = TREE_OPERAND (sub, 1);
14589 STRIP_NOPS (op00);
14590 if (TREE_CODE (op00) == ADDR_EXPR)
14592 tree op00type;
14593 op00 = TREE_OPERAND (op00, 0);
14594 op00type = TREE_TYPE (op00);
14596 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14597 if (VECTOR_TYPE_P (op00type)
14598 && type == TREE_TYPE (op00type)
14599 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
14600 but we want to treat offsets with MSB set as negative.
14601 For the code below negative offsets are invalid and
14602 TYPE_SIZE of the element is something unsigned, so
14603 check whether op01 fits into poly_int64, which implies
14604 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
14605 then just use poly_uint64 because we want to treat the
14606 value as unsigned. */
14607 && tree_fits_poly_int64_p (op01))
14609 tree part_width = TYPE_SIZE (type);
14610 poly_uint64 max_offset
14611 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
14612 * TYPE_VECTOR_SUBPARTS (op00type));
14613 if (known_lt (const_op01, max_offset))
14615 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
14616 return fold_build3_loc (loc,
14617 BIT_FIELD_REF, type, op00,
14618 part_width, index);
14621 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14622 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14623 && type == TREE_TYPE (op00type))
14625 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
14626 const_op01))
14627 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14629 /* ((foo *)&fooarray)[1] => fooarray[1] */
14630 else if (TREE_CODE (op00type) == ARRAY_TYPE
14631 && type == TREE_TYPE (op00type))
14633 tree type_domain = TYPE_DOMAIN (op00type);
14634 tree min_val = size_zero_node;
14635 if (type_domain && TYPE_MIN_VALUE (type_domain))
14636 min_val = TYPE_MIN_VALUE (type_domain);
14637 poly_uint64 type_size, index;
14638 if (poly_int_tree_p (min_val)
14639 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
14640 && multiple_p (const_op01, type_size, &index))
14642 poly_offset_int off = index + wi::to_poly_offset (min_val);
14643 op01 = wide_int_to_tree (sizetype, off);
14644 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14645 NULL_TREE, NULL_TREE);
14651 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14652 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14653 && type == TREE_TYPE (TREE_TYPE (subtype))
14654 && (!in_gimple_form
14655 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14657 tree type_domain;
14658 tree min_val = size_zero_node;
14659 sub = build_fold_indirect_ref_loc (loc, sub);
14660 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14661 if (type_domain && TYPE_MIN_VALUE (type_domain))
14662 min_val = TYPE_MIN_VALUE (type_domain);
14663 if (in_gimple_form
14664 && TREE_CODE (min_val) != INTEGER_CST)
14665 return NULL_TREE;
14666 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14667 NULL_TREE);
14670 return NULL_TREE;
14673 /* Builds an expression for an indirection through T, simplifying some
14674 cases. */
14676 tree
14677 build_fold_indirect_ref_loc (location_t loc, tree t)
14679 tree type = TREE_TYPE (TREE_TYPE (t));
14680 tree sub = fold_indirect_ref_1 (loc, type, t);
14682 if (sub)
14683 return sub;
14685 return build1_loc (loc, INDIRECT_REF, type, t);
14688 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14690 tree
14691 fold_indirect_ref_loc (location_t loc, tree t)
14693 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14695 if (sub)
14696 return sub;
14697 else
14698 return t;
14701 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14702 whose result is ignored. The type of the returned tree need not be
14703 the same as the original expression. */
14705 tree
14706 fold_ignored_result (tree t)
14708 if (!TREE_SIDE_EFFECTS (t))
14709 return integer_zero_node;
14711 for (;;)
14712 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14714 case tcc_unary:
14715 t = TREE_OPERAND (t, 0);
14716 break;
14718 case tcc_binary:
14719 case tcc_comparison:
14720 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14721 t = TREE_OPERAND (t, 0);
14722 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14723 t = TREE_OPERAND (t, 1);
14724 else
14725 return t;
14726 break;
14728 case tcc_expression:
14729 switch (TREE_CODE (t))
14731 case COMPOUND_EXPR:
14732 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14733 return t;
14734 t = TREE_OPERAND (t, 0);
14735 break;
14737 case COND_EXPR:
14738 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14739 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14740 return t;
14741 t = TREE_OPERAND (t, 0);
14742 break;
14744 default:
14745 return t;
14747 break;
14749 default:
14750 return t;
14754 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14756 tree
14757 round_up_loc (location_t loc, tree value, unsigned int divisor)
14759 tree div = NULL_TREE;
14761 if (divisor == 1)
14762 return value;
14764 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14765 have to do anything. Only do this when we are not given a const,
14766 because in that case, this check is more expensive than just
14767 doing it. */
14768 if (TREE_CODE (value) != INTEGER_CST)
14770 div = build_int_cst (TREE_TYPE (value), divisor);
14772 if (multiple_of_p (TREE_TYPE (value), value, div))
14773 return value;
14776 /* If divisor is a power of two, simplify this to bit manipulation. */
14777 if (pow2_or_zerop (divisor))
14779 if (TREE_CODE (value) == INTEGER_CST)
14781 wide_int val = wi::to_wide (value);
14782 bool overflow_p;
14784 if ((val & (divisor - 1)) == 0)
14785 return value;
14787 overflow_p = TREE_OVERFLOW (value);
14788 val += divisor - 1;
14789 val &= (int) -divisor;
14790 if (val == 0)
14791 overflow_p = true;
14793 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14795 else
14797 tree t;
14799 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14800 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14801 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14802 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14805 else
14807 if (!div)
14808 div = build_int_cst (TREE_TYPE (value), divisor);
14809 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14810 value = size_binop_loc (loc, MULT_EXPR, value, div);
14813 return value;
14816 /* Likewise, but round down. */
14818 tree
14819 round_down_loc (location_t loc, tree value, int divisor)
14821 tree div = NULL_TREE;
14823 gcc_assert (divisor > 0);
14824 if (divisor == 1)
14825 return value;
14827 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14828 have to do anything. Only do this when we are not given a const,
14829 because in that case, this check is more expensive than just
14830 doing it. */
14831 if (TREE_CODE (value) != INTEGER_CST)
14833 div = build_int_cst (TREE_TYPE (value), divisor);
14835 if (multiple_of_p (TREE_TYPE (value), value, div))
14836 return value;
14839 /* If divisor is a power of two, simplify this to bit manipulation. */
14840 if (pow2_or_zerop (divisor))
14842 tree t;
14844 t = build_int_cst (TREE_TYPE (value), -divisor);
14845 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14847 else
14849 if (!div)
14850 div = build_int_cst (TREE_TYPE (value), divisor);
14851 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14852 value = size_binop_loc (loc, MULT_EXPR, value, div);
14855 return value;
14858 /* Returns the pointer to the base of the object addressed by EXP and
14859 extracts the information about the offset of the access, storing it
14860 to PBITPOS and POFFSET. */
14862 static tree
14863 split_address_to_core_and_offset (tree exp,
14864 poly_int64_pod *pbitpos, tree *poffset)
14866 tree core;
14867 machine_mode mode;
14868 int unsignedp, reversep, volatilep;
14869 poly_int64 bitsize;
14870 location_t loc = EXPR_LOCATION (exp);
14872 if (TREE_CODE (exp) == ADDR_EXPR)
14874 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14875 poffset, &mode, &unsignedp, &reversep,
14876 &volatilep);
14877 core = build_fold_addr_expr_loc (loc, core);
14879 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
14881 core = TREE_OPERAND (exp, 0);
14882 STRIP_NOPS (core);
14883 *pbitpos = 0;
14884 *poffset = TREE_OPERAND (exp, 1);
14885 if (poly_int_tree_p (*poffset))
14887 poly_offset_int tem
14888 = wi::sext (wi::to_poly_offset (*poffset),
14889 TYPE_PRECISION (TREE_TYPE (*poffset)));
14890 tem <<= LOG2_BITS_PER_UNIT;
14891 if (tem.to_shwi (pbitpos))
14892 *poffset = NULL_TREE;
14895 else
14897 core = exp;
14898 *pbitpos = 0;
14899 *poffset = NULL_TREE;
14902 return core;
14905 /* Returns true if addresses of E1 and E2 differ by a constant, false
14906 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14908 bool
14909 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
14911 tree core1, core2;
14912 poly_int64 bitpos1, bitpos2;
14913 tree toffset1, toffset2, tdiff, type;
14915 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14916 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14918 poly_int64 bytepos1, bytepos2;
14919 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
14920 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
14921 || !operand_equal_p (core1, core2, 0))
14922 return false;
14924 if (toffset1 && toffset2)
14926 type = TREE_TYPE (toffset1);
14927 if (type != TREE_TYPE (toffset2))
14928 toffset2 = fold_convert (type, toffset2);
14930 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14931 if (!cst_and_fits_in_hwi (tdiff))
14932 return false;
14934 *diff = int_cst_value (tdiff);
14936 else if (toffset1 || toffset2)
14938 /* If only one of the offsets is non-constant, the difference cannot
14939 be a constant. */
14940 return false;
14942 else
14943 *diff = 0;
14945 *diff += bytepos1 - bytepos2;
14946 return true;
14949 /* Return OFF converted to a pointer offset type suitable as offset for
14950 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14951 tree
14952 convert_to_ptrofftype_loc (location_t loc, tree off)
14954 return fold_convert_loc (loc, sizetype, off);
14957 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14958 tree
14959 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14961 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14962 ptr, convert_to_ptrofftype_loc (loc, off));
14965 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14966 tree
14967 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14969 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14970 ptr, size_int (off));
14973 /* Return a pointer P to a NUL-terminated string representing the sequence
14974 of constant characters referred to by SRC (or a subsequence of such
14975 characters within it if SRC is a reference to a string plus some
14976 constant offset). If STRLEN is non-null, store the number of bytes
14977 in the string constant including the terminating NUL char. *STRLEN is
14978 typically strlen(P) + 1 in the absence of embedded NUL characters. */
14980 const char *
14981 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen /* = NULL */)
14983 tree offset_node;
14984 tree mem_size;
14986 if (strlen)
14987 *strlen = 0;
14989 src = string_constant (src, &offset_node, &mem_size, NULL);
14990 if (src == 0)
14991 return NULL;
14993 unsigned HOST_WIDE_INT offset = 0;
14994 if (offset_node != NULL_TREE)
14996 if (!tree_fits_uhwi_p (offset_node))
14997 return NULL;
14998 else
14999 offset = tree_to_uhwi (offset_node);
15002 if (!tree_fits_uhwi_p (mem_size))
15003 return NULL;
15005 /* STRING_LENGTH is the size of the string literal, including any
15006 embedded NULs. STRING_SIZE is the size of the array the string
15007 literal is stored in. */
15008 unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
15009 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size);
15011 /* Ideally this would turn into a gcc_checking_assert over time. */
15012 if (string_length > string_size)
15013 string_length = string_size;
15015 const char *string = TREE_STRING_POINTER (src);
15017 /* Ideally this would turn into a gcc_checking_assert over time. */
15018 if (string_length > string_size)
15019 string_length = string_size;
15021 if (string_length == 0
15022 || offset >= string_size)
15023 return NULL;
15025 if (strlen)
15027 /* Compute and store the length of the substring at OFFSET.
15028 All offsets past the initial length refer to null strings. */
15029 if (offset < string_length)
15030 *strlen = string_length - offset;
15031 else
15032 *strlen = 1;
15034 else
15036 tree eltype = TREE_TYPE (TREE_TYPE (src));
15037 /* Support only properly NUL-terminated single byte strings. */
15038 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
15039 return NULL;
15040 if (string[string_length - 1] != '\0')
15041 return NULL;
15044 return offset < string_length ? string + offset : "";
15047 /* Given a tree T, compute which bits in T may be nonzero. */
15049 wide_int
15050 tree_nonzero_bits (const_tree t)
15052 switch (TREE_CODE (t))
15054 case INTEGER_CST:
15055 return wi::to_wide (t);
15056 case SSA_NAME:
15057 return get_nonzero_bits (t);
15058 case NON_LVALUE_EXPR:
15059 case SAVE_EXPR:
15060 return tree_nonzero_bits (TREE_OPERAND (t, 0));
15061 case BIT_AND_EXPR:
15062 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15063 tree_nonzero_bits (TREE_OPERAND (t, 1)));
15064 case BIT_IOR_EXPR:
15065 case BIT_XOR_EXPR:
15066 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15067 tree_nonzero_bits (TREE_OPERAND (t, 1)));
15068 case COND_EXPR:
15069 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
15070 tree_nonzero_bits (TREE_OPERAND (t, 2)));
15071 CASE_CONVERT:
15072 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15073 TYPE_PRECISION (TREE_TYPE (t)),
15074 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
15075 case PLUS_EXPR:
15076 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
15078 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
15079 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
15080 if (wi::bit_and (nzbits1, nzbits2) == 0)
15081 return wi::bit_or (nzbits1, nzbits2);
15083 break;
15084 case LSHIFT_EXPR:
15085 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
15087 tree type = TREE_TYPE (t);
15088 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
15089 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
15090 TYPE_PRECISION (type));
15091 return wi::neg_p (arg1)
15092 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
15093 : wi::lshift (nzbits, arg1);
15095 break;
15096 case RSHIFT_EXPR:
15097 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
15099 tree type = TREE_TYPE (t);
15100 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
15101 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
15102 TYPE_PRECISION (type));
15103 return wi::neg_p (arg1)
15104 ? wi::lshift (nzbits, -arg1)
15105 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
15107 break;
15108 default:
15109 break;
15112 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
15115 #if CHECKING_P
15117 namespace selftest {
15119 /* Helper functions for writing tests of folding trees. */
15121 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
15123 static void
15124 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
15125 tree constant)
15127 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
15130 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
15131 wrapping WRAPPED_EXPR. */
15133 static void
15134 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
15135 tree wrapped_expr)
15137 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
15138 ASSERT_NE (wrapped_expr, result);
15139 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
15140 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
15143 /* Verify that various arithmetic binary operations are folded
15144 correctly. */
15146 static void
15147 test_arithmetic_folding ()
15149 tree type = integer_type_node;
15150 tree x = create_tmp_var_raw (type, "x");
15151 tree zero = build_zero_cst (type);
15152 tree one = build_int_cst (type, 1);
15154 /* Addition. */
15155 /* 1 <-- (0 + 1) */
15156 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
15157 one);
15158 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
15159 one);
15161 /* (nonlvalue)x <-- (x + 0) */
15162 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
15165 /* Subtraction. */
15166 /* 0 <-- (x - x) */
15167 assert_binop_folds_to_const (x, MINUS_EXPR, x,
15168 zero);
15169 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
15172 /* Multiplication. */
15173 /* 0 <-- (x * 0) */
15174 assert_binop_folds_to_const (x, MULT_EXPR, zero,
15175 zero);
15177 /* (nonlvalue)x <-- (x * 1) */
15178 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
15182 /* Verify that various binary operations on vectors are folded
15183 correctly. */
15185 static void
15186 test_vector_folding ()
15188 tree inner_type = integer_type_node;
15189 tree type = build_vector_type (inner_type, 4);
15190 tree zero = build_zero_cst (type);
15191 tree one = build_one_cst (type);
15192 tree index = build_index_vector (type, 0, 1);
15194 /* Verify equality tests that return a scalar boolean result. */
15195 tree res_type = boolean_type_node;
15196 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
15197 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
15198 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
15199 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
15200 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
15201 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
15202 index, one)));
15203 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
15204 index, index)));
15205 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
15206 index, index)));
15209 /* Verify folding of VEC_DUPLICATE_EXPRs. */
15211 static void
15212 test_vec_duplicate_folding ()
15214 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
15215 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
15216 /* This will be 1 if VEC_MODE isn't a vector mode. */
15217 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
15219 tree type = build_vector_type (ssizetype, nunits);
15220 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
15221 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
15222 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
15225 /* Run all of the selftests within this file. */
15227 void
15228 fold_const_c_tests ()
15230 test_arithmetic_folding ();
15231 test_vector_folding ();
15232 test_vec_duplicate_folding ();
15235 } // namespace selftest
15237 #endif /* CHECKING_P */