re PR tree-optimization/63666 (FAIL: gcc.dg/vect/pr45752.c (internal compiler error))
[official-gcc.git] / gcc / fold-const.c
blob78d51829bfe20b8308aaf53bb9c824f9cc9b17ff
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "predict.h"
62 #include "vec.h"
63 #include "hashtab.h"
64 #include "hash-set.h"
65 #include "machmode.h"
66 #include "hard-reg-set.h"
67 #include "input.h"
68 #include "function.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "tree-eh.h"
73 #include "gimple-expr.h"
74 #include "is-a.h"
75 #include "gimple.h"
76 #include "gimplify.h"
77 #include "tree-dfa.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
79 #include "builtins.h"
80 #include "hash-map.h"
81 #include "plugin-api.h"
82 #include "ipa-ref.h"
83 #include "cgraph.h"
84 #include "generic-match.h"
85 #include "optabs.h"
87 /* Nonzero if we are folding constants inside an initializer; zero
88 otherwise. */
89 int folding_initializer = 0;
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code {
95 COMPCODE_FALSE = 0,
96 COMPCODE_LT = 1,
97 COMPCODE_EQ = 2,
98 COMPCODE_LE = 3,
99 COMPCODE_GT = 4,
100 COMPCODE_LTGT = 5,
101 COMPCODE_GE = 6,
102 COMPCODE_ORD = 7,
103 COMPCODE_UNORD = 8,
104 COMPCODE_UNLT = 9,
105 COMPCODE_UNEQ = 10,
106 COMPCODE_UNLE = 11,
107 COMPCODE_UNGT = 12,
108 COMPCODE_NE = 13,
109 COMPCODE_UNGE = 14,
110 COMPCODE_TRUE = 15
113 static bool negate_mathfn_p (enum built_in_function);
114 static bool negate_expr_p (tree);
115 static tree negate_expr (tree);
116 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
117 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
118 static tree const_binop (enum tree_code, tree, tree);
119 static enum comparison_code comparison_to_compcode (enum tree_code);
120 static enum tree_code compcode_to_comparison (enum comparison_code);
121 static int operand_equal_for_comparison_p (tree, tree, tree);
122 static int twoval_comparison_p (tree, tree *, tree *, int *);
123 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
124 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
125 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
126 static tree make_bit_field_ref (location_t, tree, tree,
127 HOST_WIDE_INT, HOST_WIDE_INT, int);
128 static tree optimize_bit_field_compare (location_t, enum tree_code,
129 tree, tree, tree);
130 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
131 HOST_WIDE_INT *,
132 machine_mode *, int *, int *,
133 tree *, tree *);
134 static tree sign_bit_p (tree, const_tree);
135 static int simple_operand_p (const_tree);
136 static bool simple_operand_p_2 (tree);
137 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
138 static tree range_predecessor (tree);
139 static tree range_successor (tree);
140 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
141 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
142 static tree unextend (tree, int, int, tree);
143 static tree optimize_minmax_comparison (location_t, enum tree_code,
144 tree, tree, tree);
145 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
146 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
147 static tree fold_binary_op_with_conditional_arg (location_t,
148 enum tree_code, tree,
149 tree, tree,
150 tree, tree, int);
151 static tree fold_mathfn_compare (location_t,
152 enum built_in_function, enum tree_code,
153 tree, tree, tree);
154 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
155 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
156 static bool reorder_operands_p (const_tree, const_tree);
157 static tree fold_negate_const (tree, tree);
158 static tree fold_not_const (const_tree, tree);
159 static tree fold_relational_const (enum tree_code, tree, tree, tree);
160 static tree fold_convert_const (enum tree_code, tree, tree);
162 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
163 Otherwise, return LOC. */
165 static location_t
166 expr_location_or (tree t, location_t loc)
168 location_t tloc = EXPR_LOCATION (t);
169 return tloc == UNKNOWN_LOCATION ? loc : tloc;
172 /* Similar to protected_set_expr_location, but never modify x in place,
173 if location can and needs to be set, unshare it. */
175 static inline tree
176 protected_set_expr_location_unshare (tree x, location_t loc)
178 if (CAN_HAVE_LOCATION_P (x)
179 && EXPR_LOCATION (x) != loc
180 && !(TREE_CODE (x) == SAVE_EXPR
181 || TREE_CODE (x) == TARGET_EXPR
182 || TREE_CODE (x) == BIND_EXPR))
184 x = copy_node (x);
185 SET_EXPR_LOCATION (x, loc);
187 return x;
190 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
191 division and returns the quotient. Otherwise returns
192 NULL_TREE. */
194 tree
195 div_if_zero_remainder (const_tree arg1, const_tree arg2)
197 widest_int quo;
199 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
200 SIGNED, &quo))
201 return wide_int_to_tree (TREE_TYPE (arg1), quo);
203 return NULL_TREE;
206 /* This is nonzero if we should defer warnings about undefined
207 overflow. This facility exists because these warnings are a
208 special case. The code to estimate loop iterations does not want
209 to issue any warnings, since it works with expressions which do not
210 occur in user code. Various bits of cleanup code call fold(), but
211 only use the result if it has certain characteristics (e.g., is a
212 constant); that code only wants to issue a warning if the result is
213 used. */
215 static int fold_deferring_overflow_warnings;
217 /* If a warning about undefined overflow is deferred, this is the
218 warning. Note that this may cause us to turn two warnings into
219 one, but that is fine since it is sufficient to only give one
220 warning per expression. */
222 static const char* fold_deferred_overflow_warning;
224 /* If a warning about undefined overflow is deferred, this is the
225 level at which the warning should be emitted. */
227 static enum warn_strict_overflow_code fold_deferred_overflow_code;
229 /* Start deferring overflow warnings. We could use a stack here to
230 permit nested calls, but at present it is not necessary. */
232 void
233 fold_defer_overflow_warnings (void)
235 ++fold_deferring_overflow_warnings;
238 /* Stop deferring overflow warnings. If there is a pending warning,
239 and ISSUE is true, then issue the warning if appropriate. STMT is
240 the statement with which the warning should be associated (used for
241 location information); STMT may be NULL. CODE is the level of the
242 warning--a warn_strict_overflow_code value. This function will use
243 the smaller of CODE and the deferred code when deciding whether to
244 issue the warning. CODE may be zero to mean to always use the
245 deferred code. */
247 void
248 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
250 const char *warnmsg;
251 location_t locus;
253 gcc_assert (fold_deferring_overflow_warnings > 0);
254 --fold_deferring_overflow_warnings;
255 if (fold_deferring_overflow_warnings > 0)
257 if (fold_deferred_overflow_warning != NULL
258 && code != 0
259 && code < (int) fold_deferred_overflow_code)
260 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
261 return;
264 warnmsg = fold_deferred_overflow_warning;
265 fold_deferred_overflow_warning = NULL;
267 if (!issue || warnmsg == NULL)
268 return;
270 if (gimple_no_warning_p (stmt))
271 return;
273 /* Use the smallest code level when deciding to issue the
274 warning. */
275 if (code == 0 || code > (int) fold_deferred_overflow_code)
276 code = fold_deferred_overflow_code;
278 if (!issue_strict_overflow_warning (code))
279 return;
281 if (stmt == NULL)
282 locus = input_location;
283 else
284 locus = gimple_location (stmt);
285 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
288 /* Stop deferring overflow warnings, ignoring any deferred
289 warnings. */
291 void
292 fold_undefer_and_ignore_overflow_warnings (void)
294 fold_undefer_overflow_warnings (false, NULL, 0);
297 /* Whether we are deferring overflow warnings. */
299 bool
300 fold_deferring_overflow_warnings_p (void)
302 return fold_deferring_overflow_warnings > 0;
305 /* This is called when we fold something based on the fact that signed
306 overflow is undefined. */
308 static void
309 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
311 if (fold_deferring_overflow_warnings > 0)
313 if (fold_deferred_overflow_warning == NULL
314 || wc < fold_deferred_overflow_code)
316 fold_deferred_overflow_warning = gmsgid;
317 fold_deferred_overflow_code = wc;
320 else if (issue_strict_overflow_warning (wc))
321 warning (OPT_Wstrict_overflow, gmsgid);
324 /* Return true if the built-in mathematical function specified by CODE
325 is odd, i.e. -f(x) == f(-x). */
327 static bool
328 negate_mathfn_p (enum built_in_function code)
330 switch (code)
332 CASE_FLT_FN (BUILT_IN_ASIN):
333 CASE_FLT_FN (BUILT_IN_ASINH):
334 CASE_FLT_FN (BUILT_IN_ATAN):
335 CASE_FLT_FN (BUILT_IN_ATANH):
336 CASE_FLT_FN (BUILT_IN_CASIN):
337 CASE_FLT_FN (BUILT_IN_CASINH):
338 CASE_FLT_FN (BUILT_IN_CATAN):
339 CASE_FLT_FN (BUILT_IN_CATANH):
340 CASE_FLT_FN (BUILT_IN_CBRT):
341 CASE_FLT_FN (BUILT_IN_CPROJ):
342 CASE_FLT_FN (BUILT_IN_CSIN):
343 CASE_FLT_FN (BUILT_IN_CSINH):
344 CASE_FLT_FN (BUILT_IN_CTAN):
345 CASE_FLT_FN (BUILT_IN_CTANH):
346 CASE_FLT_FN (BUILT_IN_ERF):
347 CASE_FLT_FN (BUILT_IN_LLROUND):
348 CASE_FLT_FN (BUILT_IN_LROUND):
349 CASE_FLT_FN (BUILT_IN_ROUND):
350 CASE_FLT_FN (BUILT_IN_SIN):
351 CASE_FLT_FN (BUILT_IN_SINH):
352 CASE_FLT_FN (BUILT_IN_TAN):
353 CASE_FLT_FN (BUILT_IN_TANH):
354 CASE_FLT_FN (BUILT_IN_TRUNC):
355 return true;
357 CASE_FLT_FN (BUILT_IN_LLRINT):
358 CASE_FLT_FN (BUILT_IN_LRINT):
359 CASE_FLT_FN (BUILT_IN_NEARBYINT):
360 CASE_FLT_FN (BUILT_IN_RINT):
361 return !flag_rounding_math;
363 default:
364 break;
366 return false;
369 /* Check whether we may negate an integer constant T without causing
370 overflow. */
372 bool
373 may_negate_without_overflow_p (const_tree t)
375 tree type;
377 gcc_assert (TREE_CODE (t) == INTEGER_CST);
379 type = TREE_TYPE (t);
380 if (TYPE_UNSIGNED (type))
381 return false;
383 return !wi::only_sign_bit_p (t);
386 /* Determine whether an expression T can be cheaply negated using
387 the function negate_expr without introducing undefined overflow. */
389 static bool
390 negate_expr_p (tree t)
392 tree type;
394 if (t == 0)
395 return false;
397 type = TREE_TYPE (t);
399 STRIP_SIGN_NOPS (t);
400 switch (TREE_CODE (t))
402 case INTEGER_CST:
403 if (TYPE_OVERFLOW_WRAPS (type))
404 return true;
406 /* Check that -CST will not overflow type. */
407 return may_negate_without_overflow_p (t);
408 case BIT_NOT_EXPR:
409 return (INTEGRAL_TYPE_P (type)
410 && TYPE_OVERFLOW_WRAPS (type));
412 case FIXED_CST:
413 case NEGATE_EXPR:
414 return true;
416 case REAL_CST:
417 /* We want to canonicalize to positive real constants. Pretend
418 that only negative ones can be easily negated. */
419 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
421 case COMPLEX_CST:
422 return negate_expr_p (TREE_REALPART (t))
423 && negate_expr_p (TREE_IMAGPART (t));
425 case VECTOR_CST:
427 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
428 return true;
430 int count = TYPE_VECTOR_SUBPARTS (type), i;
432 for (i = 0; i < count; i++)
433 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
434 return false;
436 return true;
439 case COMPLEX_EXPR:
440 return negate_expr_p (TREE_OPERAND (t, 0))
441 && negate_expr_p (TREE_OPERAND (t, 1));
443 case CONJ_EXPR:
444 return negate_expr_p (TREE_OPERAND (t, 0));
446 case PLUS_EXPR:
447 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
448 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
449 return false;
450 /* -(A + B) -> (-B) - A. */
451 if (negate_expr_p (TREE_OPERAND (t, 1))
452 && reorder_operands_p (TREE_OPERAND (t, 0),
453 TREE_OPERAND (t, 1)))
454 return true;
455 /* -(A + B) -> (-A) - B. */
456 return negate_expr_p (TREE_OPERAND (t, 0));
458 case MINUS_EXPR:
459 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
460 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
461 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
462 && reorder_operands_p (TREE_OPERAND (t, 0),
463 TREE_OPERAND (t, 1));
465 case MULT_EXPR:
466 if (TYPE_UNSIGNED (TREE_TYPE (t)))
467 break;
469 /* Fall through. */
471 case RDIV_EXPR:
472 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
473 return negate_expr_p (TREE_OPERAND (t, 1))
474 || negate_expr_p (TREE_OPERAND (t, 0));
475 break;
477 case TRUNC_DIV_EXPR:
478 case ROUND_DIV_EXPR:
479 case EXACT_DIV_EXPR:
480 /* In general we can't negate A / B, because if A is INT_MIN and
481 B is 1, we may turn this into INT_MIN / -1 which is undefined
482 and actually traps on some architectures. But if overflow is
483 undefined, we can negate, because - (INT_MIN / 1) is an
484 overflow. */
485 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
487 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
488 break;
489 /* If overflow is undefined then we have to be careful because
490 we ask whether it's ok to associate the negate with the
491 division which is not ok for example for
492 -((a - b) / c) where (-(a - b)) / c may invoke undefined
493 overflow because of negating INT_MIN. So do not use
494 negate_expr_p here but open-code the two important cases. */
495 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
496 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
497 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
498 return true;
500 else if (negate_expr_p (TREE_OPERAND (t, 0)))
501 return true;
502 return negate_expr_p (TREE_OPERAND (t, 1));
504 case NOP_EXPR:
505 /* Negate -((double)float) as (double)(-float). */
506 if (TREE_CODE (type) == REAL_TYPE)
508 tree tem = strip_float_extensions (t);
509 if (tem != t)
510 return negate_expr_p (tem);
512 break;
514 case CALL_EXPR:
515 /* Negate -f(x) as f(-x). */
516 if (negate_mathfn_p (builtin_mathfn_code (t)))
517 return negate_expr_p (CALL_EXPR_ARG (t, 0));
518 break;
520 case RSHIFT_EXPR:
521 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
522 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
524 tree op1 = TREE_OPERAND (t, 1);
525 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
526 return true;
528 break;
530 default:
531 break;
533 return false;
536 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
537 simplification is possible.
538 If negate_expr_p would return true for T, NULL_TREE will never be
539 returned. */
541 static tree
542 fold_negate_expr (location_t loc, tree t)
544 tree type = TREE_TYPE (t);
545 tree tem;
547 switch (TREE_CODE (t))
549 /* Convert - (~A) to A + 1. */
550 case BIT_NOT_EXPR:
551 if (INTEGRAL_TYPE_P (type))
552 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
553 build_one_cst (type));
554 break;
556 case INTEGER_CST:
557 tem = fold_negate_const (t, type);
558 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
559 || !TYPE_OVERFLOW_TRAPS (type))
560 return tem;
561 break;
563 case REAL_CST:
564 tem = fold_negate_const (t, type);
565 /* Two's complement FP formats, such as c4x, may overflow. */
566 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
567 return tem;
568 break;
570 case FIXED_CST:
571 tem = fold_negate_const (t, type);
572 return tem;
574 case COMPLEX_CST:
576 tree rpart = negate_expr (TREE_REALPART (t));
577 tree ipart = negate_expr (TREE_IMAGPART (t));
579 if ((TREE_CODE (rpart) == REAL_CST
580 && TREE_CODE (ipart) == REAL_CST)
581 || (TREE_CODE (rpart) == INTEGER_CST
582 && TREE_CODE (ipart) == INTEGER_CST))
583 return build_complex (type, rpart, ipart);
585 break;
587 case VECTOR_CST:
589 int count = TYPE_VECTOR_SUBPARTS (type), i;
590 tree *elts = XALLOCAVEC (tree, count);
592 for (i = 0; i < count; i++)
594 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
595 if (elts[i] == NULL_TREE)
596 return NULL_TREE;
599 return build_vector (type, elts);
602 case COMPLEX_EXPR:
603 if (negate_expr_p (t))
604 return fold_build2_loc (loc, COMPLEX_EXPR, type,
605 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
606 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
607 break;
609 case CONJ_EXPR:
610 if (negate_expr_p (t))
611 return fold_build1_loc (loc, CONJ_EXPR, type,
612 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
613 break;
615 case NEGATE_EXPR:
616 return TREE_OPERAND (t, 0);
618 case PLUS_EXPR:
619 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
620 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
622 /* -(A + B) -> (-B) - A. */
623 if (negate_expr_p (TREE_OPERAND (t, 1))
624 && reorder_operands_p (TREE_OPERAND (t, 0),
625 TREE_OPERAND (t, 1)))
627 tem = negate_expr (TREE_OPERAND (t, 1));
628 return fold_build2_loc (loc, MINUS_EXPR, type,
629 tem, TREE_OPERAND (t, 0));
632 /* -(A + B) -> (-A) - B. */
633 if (negate_expr_p (TREE_OPERAND (t, 0)))
635 tem = negate_expr (TREE_OPERAND (t, 0));
636 return fold_build2_loc (loc, MINUS_EXPR, type,
637 tem, TREE_OPERAND (t, 1));
640 break;
642 case MINUS_EXPR:
643 /* - (A - B) -> B - A */
644 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
645 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
646 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
647 return fold_build2_loc (loc, MINUS_EXPR, type,
648 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
649 break;
651 case MULT_EXPR:
652 if (TYPE_UNSIGNED (type))
653 break;
655 /* Fall through. */
657 case RDIV_EXPR:
658 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
660 tem = TREE_OPERAND (t, 1);
661 if (negate_expr_p (tem))
662 return fold_build2_loc (loc, TREE_CODE (t), type,
663 TREE_OPERAND (t, 0), negate_expr (tem));
664 tem = TREE_OPERAND (t, 0);
665 if (negate_expr_p (tem))
666 return fold_build2_loc (loc, TREE_CODE (t), type,
667 negate_expr (tem), TREE_OPERAND (t, 1));
669 break;
671 case TRUNC_DIV_EXPR:
672 case ROUND_DIV_EXPR:
673 case EXACT_DIV_EXPR:
674 /* In general we can't negate A / B, because if A is INT_MIN and
675 B is 1, we may turn this into INT_MIN / -1 which is undefined
676 and actually traps on some architectures. But if overflow is
677 undefined, we can negate, because - (INT_MIN / 1) is an
678 overflow. */
679 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
681 const char * const warnmsg = G_("assuming signed overflow does not "
682 "occur when negating a division");
683 tem = TREE_OPERAND (t, 1);
684 if (negate_expr_p (tem))
686 if (INTEGRAL_TYPE_P (type)
687 && (TREE_CODE (tem) != INTEGER_CST
688 || integer_onep (tem)))
689 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
690 return fold_build2_loc (loc, TREE_CODE (t), type,
691 TREE_OPERAND (t, 0), negate_expr (tem));
693 /* If overflow is undefined then we have to be careful because
694 we ask whether it's ok to associate the negate with the
695 division which is not ok for example for
696 -((a - b) / c) where (-(a - b)) / c may invoke undefined
697 overflow because of negating INT_MIN. So do not use
698 negate_expr_p here but open-code the two important cases. */
699 tem = TREE_OPERAND (t, 0);
700 if ((INTEGRAL_TYPE_P (type)
701 && (TREE_CODE (tem) == NEGATE_EXPR
702 || (TREE_CODE (tem) == INTEGER_CST
703 && may_negate_without_overflow_p (tem))))
704 || !INTEGRAL_TYPE_P (type))
705 return fold_build2_loc (loc, TREE_CODE (t), type,
706 negate_expr (tem), TREE_OPERAND (t, 1));
708 break;
710 case NOP_EXPR:
711 /* Convert -((double)float) into (double)(-float). */
712 if (TREE_CODE (type) == REAL_TYPE)
714 tem = strip_float_extensions (t);
715 if (tem != t && negate_expr_p (tem))
716 return fold_convert_loc (loc, type, negate_expr (tem));
718 break;
720 case CALL_EXPR:
721 /* Negate -f(x) as f(-x). */
722 if (negate_mathfn_p (builtin_mathfn_code (t))
723 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
725 tree fndecl, arg;
727 fndecl = get_callee_fndecl (t);
728 arg = negate_expr (CALL_EXPR_ARG (t, 0));
729 return build_call_expr_loc (loc, fndecl, 1, arg);
731 break;
733 case RSHIFT_EXPR:
734 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
735 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
737 tree op1 = TREE_OPERAND (t, 1);
738 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
740 tree ntype = TYPE_UNSIGNED (type)
741 ? signed_type_for (type)
742 : unsigned_type_for (type);
743 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
744 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
745 return fold_convert_loc (loc, type, temp);
748 break;
750 default:
751 break;
754 return NULL_TREE;
757 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
758 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
759 return NULL_TREE. */
761 static tree
762 negate_expr (tree t)
764 tree type, tem;
765 location_t loc;
767 if (t == NULL_TREE)
768 return NULL_TREE;
770 loc = EXPR_LOCATION (t);
771 type = TREE_TYPE (t);
772 STRIP_SIGN_NOPS (t);
774 tem = fold_negate_expr (loc, t);
775 if (!tem)
776 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
777 return fold_convert_loc (loc, type, tem);
780 /* Split a tree IN into a constant, literal and variable parts that could be
781 combined with CODE to make IN. "constant" means an expression with
782 TREE_CONSTANT but that isn't an actual constant. CODE must be a
783 commutative arithmetic operation. Store the constant part into *CONP,
784 the literal in *LITP and return the variable part. If a part isn't
785 present, set it to null. If the tree does not decompose in this way,
786 return the entire tree as the variable part and the other parts as null.
788 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
789 case, we negate an operand that was subtracted. Except if it is a
790 literal for which we use *MINUS_LITP instead.
792 If NEGATE_P is true, we are negating all of IN, again except a literal
793 for which we use *MINUS_LITP instead.
795 If IN is itself a literal or constant, return it as appropriate.
797 Note that we do not guarantee that any of the three values will be the
798 same type as IN, but they will have the same signedness and mode. */
800 static tree
801 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
802 tree *minus_litp, int negate_p)
804 tree var = 0;
806 *conp = 0;
807 *litp = 0;
808 *minus_litp = 0;
810 /* Strip any conversions that don't change the machine mode or signedness. */
811 STRIP_SIGN_NOPS (in);
813 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
814 || TREE_CODE (in) == FIXED_CST)
815 *litp = in;
816 else if (TREE_CODE (in) == code
817 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
818 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
819 /* We can associate addition and subtraction together (even
820 though the C standard doesn't say so) for integers because
821 the value is not affected. For reals, the value might be
822 affected, so we can't. */
823 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
824 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
826 tree op0 = TREE_OPERAND (in, 0);
827 tree op1 = TREE_OPERAND (in, 1);
828 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
829 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
831 /* First see if either of the operands is a literal, then a constant. */
832 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
833 || TREE_CODE (op0) == FIXED_CST)
834 *litp = op0, op0 = 0;
835 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
836 || TREE_CODE (op1) == FIXED_CST)
837 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
839 if (op0 != 0 && TREE_CONSTANT (op0))
840 *conp = op0, op0 = 0;
841 else if (op1 != 0 && TREE_CONSTANT (op1))
842 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
844 /* If we haven't dealt with either operand, this is not a case we can
845 decompose. Otherwise, VAR is either of the ones remaining, if any. */
846 if (op0 != 0 && op1 != 0)
847 var = in;
848 else if (op0 != 0)
849 var = op0;
850 else
851 var = op1, neg_var_p = neg1_p;
853 /* Now do any needed negations. */
854 if (neg_litp_p)
855 *minus_litp = *litp, *litp = 0;
856 if (neg_conp_p)
857 *conp = negate_expr (*conp);
858 if (neg_var_p)
859 var = negate_expr (var);
861 else if (TREE_CODE (in) == BIT_NOT_EXPR
862 && code == PLUS_EXPR)
864 /* -X - 1 is folded to ~X, undo that here. */
865 *minus_litp = build_one_cst (TREE_TYPE (in));
866 var = negate_expr (TREE_OPERAND (in, 0));
868 else if (TREE_CONSTANT (in))
869 *conp = in;
870 else
871 var = in;
873 if (negate_p)
875 if (*litp)
876 *minus_litp = *litp, *litp = 0;
877 else if (*minus_litp)
878 *litp = *minus_litp, *minus_litp = 0;
879 *conp = negate_expr (*conp);
880 var = negate_expr (var);
883 return var;
886 /* Re-associate trees split by the above function. T1 and T2 are
887 either expressions to associate or null. Return the new
888 expression, if any. LOC is the location of the new expression. If
889 we build an operation, do it in TYPE and with CODE. */
891 static tree
892 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
894 if (t1 == 0)
895 return t2;
896 else if (t2 == 0)
897 return t1;
899 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
900 try to fold this since we will have infinite recursion. But do
901 deal with any NEGATE_EXPRs. */
902 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
903 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
905 if (code == PLUS_EXPR)
907 if (TREE_CODE (t1) == NEGATE_EXPR)
908 return build2_loc (loc, MINUS_EXPR, type,
909 fold_convert_loc (loc, type, t2),
910 fold_convert_loc (loc, type,
911 TREE_OPERAND (t1, 0)));
912 else if (TREE_CODE (t2) == NEGATE_EXPR)
913 return build2_loc (loc, MINUS_EXPR, type,
914 fold_convert_loc (loc, type, t1),
915 fold_convert_loc (loc, type,
916 TREE_OPERAND (t2, 0)));
917 else if (integer_zerop (t2))
918 return fold_convert_loc (loc, type, t1);
920 else if (code == MINUS_EXPR)
922 if (integer_zerop (t2))
923 return fold_convert_loc (loc, type, t1);
926 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
927 fold_convert_loc (loc, type, t2));
930 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
931 fold_convert_loc (loc, type, t2));
934 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
935 for use in int_const_binop, size_binop and size_diffop. */
937 static bool
938 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
940 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
941 return false;
942 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
943 return false;
945 switch (code)
947 case LSHIFT_EXPR:
948 case RSHIFT_EXPR:
949 case LROTATE_EXPR:
950 case RROTATE_EXPR:
951 return true;
953 default:
954 break;
957 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
958 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
959 && TYPE_MODE (type1) == TYPE_MODE (type2);
963 /* Combine two integer constants ARG1 and ARG2 under operation CODE
964 to produce a new constant. Return NULL_TREE if we don't know how
965 to evaluate CODE at compile-time. */
967 static tree
968 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
969 int overflowable)
971 wide_int res;
972 tree t;
973 tree type = TREE_TYPE (arg1);
974 signop sign = TYPE_SIGN (type);
975 bool overflow = false;
977 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
978 TYPE_SIGN (TREE_TYPE (parg2)));
980 switch (code)
982 case BIT_IOR_EXPR:
983 res = wi::bit_or (arg1, arg2);
984 break;
986 case BIT_XOR_EXPR:
987 res = wi::bit_xor (arg1, arg2);
988 break;
990 case BIT_AND_EXPR:
991 res = wi::bit_and (arg1, arg2);
992 break;
994 case RSHIFT_EXPR:
995 case LSHIFT_EXPR:
996 if (wi::neg_p (arg2))
998 arg2 = -arg2;
999 if (code == RSHIFT_EXPR)
1000 code = LSHIFT_EXPR;
1001 else
1002 code = RSHIFT_EXPR;
1005 if (code == RSHIFT_EXPR)
1006 /* It's unclear from the C standard whether shifts can overflow.
1007 The following code ignores overflow; perhaps a C standard
1008 interpretation ruling is needed. */
1009 res = wi::rshift (arg1, arg2, sign);
1010 else
1011 res = wi::lshift (arg1, arg2);
1012 break;
1014 case RROTATE_EXPR:
1015 case LROTATE_EXPR:
1016 if (wi::neg_p (arg2))
1018 arg2 = -arg2;
1019 if (code == RROTATE_EXPR)
1020 code = LROTATE_EXPR;
1021 else
1022 code = RROTATE_EXPR;
1025 if (code == RROTATE_EXPR)
1026 res = wi::rrotate (arg1, arg2);
1027 else
1028 res = wi::lrotate (arg1, arg2);
1029 break;
1031 case PLUS_EXPR:
1032 res = wi::add (arg1, arg2, sign, &overflow);
1033 break;
1035 case MINUS_EXPR:
1036 res = wi::sub (arg1, arg2, sign, &overflow);
1037 break;
1039 case MULT_EXPR:
1040 res = wi::mul (arg1, arg2, sign, &overflow);
1041 break;
1043 case MULT_HIGHPART_EXPR:
1044 res = wi::mul_high (arg1, arg2, sign);
1045 break;
1047 case TRUNC_DIV_EXPR:
1048 case EXACT_DIV_EXPR:
1049 if (arg2 == 0)
1050 return NULL_TREE;
1051 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1052 break;
1054 case FLOOR_DIV_EXPR:
1055 if (arg2 == 0)
1056 return NULL_TREE;
1057 res = wi::div_floor (arg1, arg2, sign, &overflow);
1058 break;
1060 case CEIL_DIV_EXPR:
1061 if (arg2 == 0)
1062 return NULL_TREE;
1063 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1064 break;
1066 case ROUND_DIV_EXPR:
1067 if (arg2 == 0)
1068 return NULL_TREE;
1069 res = wi::div_round (arg1, arg2, sign, &overflow);
1070 break;
1072 case TRUNC_MOD_EXPR:
1073 if (arg2 == 0)
1074 return NULL_TREE;
1075 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1076 break;
1078 case FLOOR_MOD_EXPR:
1079 if (arg2 == 0)
1080 return NULL_TREE;
1081 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1082 break;
1084 case CEIL_MOD_EXPR:
1085 if (arg2 == 0)
1086 return NULL_TREE;
1087 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1088 break;
1090 case ROUND_MOD_EXPR:
1091 if (arg2 == 0)
1092 return NULL_TREE;
1093 res = wi::mod_round (arg1, arg2, sign, &overflow);
1094 break;
1096 case MIN_EXPR:
1097 res = wi::min (arg1, arg2, sign);
1098 break;
1100 case MAX_EXPR:
1101 res = wi::max (arg1, arg2, sign);
1102 break;
1104 default:
1105 return NULL_TREE;
1108 t = force_fit_type (type, res, overflowable,
1109 (((sign == SIGNED || overflowable == -1)
1110 && overflow)
1111 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1113 return t;
1116 tree
1117 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1119 return int_const_binop_1 (code, arg1, arg2, 1);
1122 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1123 constant. We assume ARG1 and ARG2 have the same data type, or at least
1124 are the same kind of constant and the same machine mode. Return zero if
1125 combining the constants is not allowed in the current operating mode. */
1127 static tree
1128 const_binop (enum tree_code code, tree arg1, tree arg2)
1130 /* Sanity check for the recursive cases. */
1131 if (!arg1 || !arg2)
1132 return NULL_TREE;
1134 STRIP_NOPS (arg1);
1135 STRIP_NOPS (arg2);
1137 if (TREE_CODE (arg1) == INTEGER_CST)
1138 return int_const_binop (code, arg1, arg2);
1140 if (TREE_CODE (arg1) == REAL_CST)
1142 machine_mode mode;
1143 REAL_VALUE_TYPE d1;
1144 REAL_VALUE_TYPE d2;
1145 REAL_VALUE_TYPE value;
1146 REAL_VALUE_TYPE result;
1147 bool inexact;
1148 tree t, type;
1150 /* The following codes are handled by real_arithmetic. */
1151 switch (code)
1153 case PLUS_EXPR:
1154 case MINUS_EXPR:
1155 case MULT_EXPR:
1156 case RDIV_EXPR:
1157 case MIN_EXPR:
1158 case MAX_EXPR:
1159 break;
1161 default:
1162 return NULL_TREE;
1165 d1 = TREE_REAL_CST (arg1);
1166 d2 = TREE_REAL_CST (arg2);
1168 type = TREE_TYPE (arg1);
1169 mode = TYPE_MODE (type);
1171 /* Don't perform operation if we honor signaling NaNs and
1172 either operand is a NaN. */
1173 if (HONOR_SNANS (mode)
1174 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1175 return NULL_TREE;
1177 /* Don't perform operation if it would raise a division
1178 by zero exception. */
1179 if (code == RDIV_EXPR
1180 && REAL_VALUES_EQUAL (d2, dconst0)
1181 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1182 return NULL_TREE;
1184 /* If either operand is a NaN, just return it. Otherwise, set up
1185 for floating-point trap; we return an overflow. */
1186 if (REAL_VALUE_ISNAN (d1))
1187 return arg1;
1188 else if (REAL_VALUE_ISNAN (d2))
1189 return arg2;
1191 inexact = real_arithmetic (&value, code, &d1, &d2);
1192 real_convert (&result, mode, &value);
1194 /* Don't constant fold this floating point operation if
1195 the result has overflowed and flag_trapping_math. */
1196 if (flag_trapping_math
1197 && MODE_HAS_INFINITIES (mode)
1198 && REAL_VALUE_ISINF (result)
1199 && !REAL_VALUE_ISINF (d1)
1200 && !REAL_VALUE_ISINF (d2))
1201 return NULL_TREE;
1203 /* Don't constant fold this floating point operation if the
1204 result may dependent upon the run-time rounding mode and
1205 flag_rounding_math is set, or if GCC's software emulation
1206 is unable to accurately represent the result. */
1207 if ((flag_rounding_math
1208 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1209 && (inexact || !real_identical (&result, &value)))
1210 return NULL_TREE;
1212 t = build_real (type, result);
1214 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1215 return t;
1218 if (TREE_CODE (arg1) == FIXED_CST)
1220 FIXED_VALUE_TYPE f1;
1221 FIXED_VALUE_TYPE f2;
1222 FIXED_VALUE_TYPE result;
1223 tree t, type;
1224 int sat_p;
1225 bool overflow_p;
1227 /* The following codes are handled by fixed_arithmetic. */
1228 switch (code)
1230 case PLUS_EXPR:
1231 case MINUS_EXPR:
1232 case MULT_EXPR:
1233 case TRUNC_DIV_EXPR:
1234 f2 = TREE_FIXED_CST (arg2);
1235 break;
1237 case LSHIFT_EXPR:
1238 case RSHIFT_EXPR:
1240 wide_int w2 = arg2;
1241 f2.data.high = w2.elt (1);
1242 f2.data.low = w2.elt (0);
1243 f2.mode = SImode;
1245 break;
1247 default:
1248 return NULL_TREE;
1251 f1 = TREE_FIXED_CST (arg1);
1252 type = TREE_TYPE (arg1);
1253 sat_p = TYPE_SATURATING (type);
1254 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1255 t = build_fixed (type, result);
1256 /* Propagate overflow flags. */
1257 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1258 TREE_OVERFLOW (t) = 1;
1259 return t;
1262 if (TREE_CODE (arg1) == COMPLEX_CST)
1264 tree type = TREE_TYPE (arg1);
1265 tree r1 = TREE_REALPART (arg1);
1266 tree i1 = TREE_IMAGPART (arg1);
1267 tree r2 = TREE_REALPART (arg2);
1268 tree i2 = TREE_IMAGPART (arg2);
1269 tree real, imag;
1271 switch (code)
1273 case PLUS_EXPR:
1274 case MINUS_EXPR:
1275 real = const_binop (code, r1, r2);
1276 imag = const_binop (code, i1, i2);
1277 break;
1279 case MULT_EXPR:
1280 if (COMPLEX_FLOAT_TYPE_P (type))
1281 return do_mpc_arg2 (arg1, arg2, type,
1282 /* do_nonfinite= */ folding_initializer,
1283 mpc_mul);
1285 real = const_binop (MINUS_EXPR,
1286 const_binop (MULT_EXPR, r1, r2),
1287 const_binop (MULT_EXPR, i1, i2));
1288 imag = const_binop (PLUS_EXPR,
1289 const_binop (MULT_EXPR, r1, i2),
1290 const_binop (MULT_EXPR, i1, r2));
1291 break;
1293 case RDIV_EXPR:
1294 if (COMPLEX_FLOAT_TYPE_P (type))
1295 return do_mpc_arg2 (arg1, arg2, type,
1296 /* do_nonfinite= */ folding_initializer,
1297 mpc_div);
1298 /* Fallthru ... */
1299 case TRUNC_DIV_EXPR:
1300 case CEIL_DIV_EXPR:
1301 case FLOOR_DIV_EXPR:
1302 case ROUND_DIV_EXPR:
1303 if (flag_complex_method == 0)
1305 /* Keep this algorithm in sync with
1306 tree-complex.c:expand_complex_div_straight().
1308 Expand complex division to scalars, straightforward algorithm.
1309 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1310 t = br*br + bi*bi
1312 tree magsquared
1313 = const_binop (PLUS_EXPR,
1314 const_binop (MULT_EXPR, r2, r2),
1315 const_binop (MULT_EXPR, i2, i2));
1316 tree t1
1317 = const_binop (PLUS_EXPR,
1318 const_binop (MULT_EXPR, r1, r2),
1319 const_binop (MULT_EXPR, i1, i2));
1320 tree t2
1321 = const_binop (MINUS_EXPR,
1322 const_binop (MULT_EXPR, i1, r2),
1323 const_binop (MULT_EXPR, r1, i2));
1325 real = const_binop (code, t1, magsquared);
1326 imag = const_binop (code, t2, magsquared);
1328 else
1330 /* Keep this algorithm in sync with
1331 tree-complex.c:expand_complex_div_wide().
1333 Expand complex division to scalars, modified algorithm to minimize
1334 overflow with wide input ranges. */
1335 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1336 fold_abs_const (r2, TREE_TYPE (type)),
1337 fold_abs_const (i2, TREE_TYPE (type)));
1339 if (integer_nonzerop (compare))
1341 /* In the TRUE branch, we compute
1342 ratio = br/bi;
1343 div = (br * ratio) + bi;
1344 tr = (ar * ratio) + ai;
1345 ti = (ai * ratio) - ar;
1346 tr = tr / div;
1347 ti = ti / div; */
1348 tree ratio = const_binop (code, r2, i2);
1349 tree div = const_binop (PLUS_EXPR, i2,
1350 const_binop (MULT_EXPR, r2, ratio));
1351 real = const_binop (MULT_EXPR, r1, ratio);
1352 real = const_binop (PLUS_EXPR, real, i1);
1353 real = const_binop (code, real, div);
1355 imag = const_binop (MULT_EXPR, i1, ratio);
1356 imag = const_binop (MINUS_EXPR, imag, r1);
1357 imag = const_binop (code, imag, div);
1359 else
1361 /* In the FALSE branch, we compute
1362 ratio = d/c;
1363 divisor = (d * ratio) + c;
1364 tr = (b * ratio) + a;
1365 ti = b - (a * ratio);
1366 tr = tr / div;
1367 ti = ti / div; */
1368 tree ratio = const_binop (code, i2, r2);
1369 tree div = const_binop (PLUS_EXPR, r2,
1370 const_binop (MULT_EXPR, i2, ratio));
1372 real = const_binop (MULT_EXPR, i1, ratio);
1373 real = const_binop (PLUS_EXPR, real, r1);
1374 real = const_binop (code, real, div);
1376 imag = const_binop (MULT_EXPR, r1, ratio);
1377 imag = const_binop (MINUS_EXPR, i1, imag);
1378 imag = const_binop (code, imag, div);
1381 break;
1383 default:
1384 return NULL_TREE;
1387 if (real && imag)
1388 return build_complex (type, real, imag);
1391 if (TREE_CODE (arg1) == VECTOR_CST
1392 && TREE_CODE (arg2) == VECTOR_CST)
1394 tree type = TREE_TYPE (arg1);
1395 int count = TYPE_VECTOR_SUBPARTS (type), i;
1396 tree *elts = XALLOCAVEC (tree, count);
1398 for (i = 0; i < count; i++)
1400 tree elem1 = VECTOR_CST_ELT (arg1, i);
1401 tree elem2 = VECTOR_CST_ELT (arg2, i);
1403 elts[i] = const_binop (code, elem1, elem2);
1405 /* It is possible that const_binop cannot handle the given
1406 code and return NULL_TREE */
1407 if (elts[i] == NULL_TREE)
1408 return NULL_TREE;
1411 return build_vector (type, elts);
1414 /* Shifts allow a scalar offset for a vector. */
1415 if (TREE_CODE (arg1) == VECTOR_CST
1416 && TREE_CODE (arg2) == INTEGER_CST)
1418 tree type = TREE_TYPE (arg1);
1419 int count = TYPE_VECTOR_SUBPARTS (type), i;
1420 tree *elts = XALLOCAVEC (tree, count);
1422 if (code == VEC_RSHIFT_EXPR)
1424 if (!tree_fits_uhwi_p (arg2))
1425 return NULL_TREE;
1427 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1428 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1429 unsigned HOST_WIDE_INT innerc
1430 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1431 if (shiftc >= outerc || (shiftc % innerc) != 0)
1432 return NULL_TREE;
1433 int offset = shiftc / innerc;
1434 /* The direction of VEC_RSHIFT_EXPR is endian dependent.
1435 For reductions, if !BYTES_BIG_ENDIAN then compiler picks first
1436 vector element, but last element if BYTES_BIG_ENDIAN. */
1437 if (BYTES_BIG_ENDIAN)
1438 offset = -offset;
1439 tree zero = build_zero_cst (TREE_TYPE (type));
1440 for (i = 0; i < count; i++)
1442 if (i + offset < 0 || i + offset >= count)
1443 elts[i] = zero;
1444 else
1445 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1448 else
1449 for (i = 0; i < count; i++)
1451 tree elem1 = VECTOR_CST_ELT (arg1, i);
1453 elts[i] = const_binop (code, elem1, arg2);
1455 /* It is possible that const_binop cannot handle the given
1456 code and return NULL_TREE */
1457 if (elts[i] == NULL_TREE)
1458 return NULL_TREE;
1461 return build_vector (type, elts);
1463 return NULL_TREE;
1466 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1467 indicates which particular sizetype to create. */
1469 tree
1470 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1472 return build_int_cst (sizetype_tab[(int) kind], number);
1475 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1476 is a tree code. The type of the result is taken from the operands.
1477 Both must be equivalent integer types, ala int_binop_types_match_p.
1478 If the operands are constant, so is the result. */
1480 tree
1481 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1483 tree type = TREE_TYPE (arg0);
1485 if (arg0 == error_mark_node || arg1 == error_mark_node)
1486 return error_mark_node;
1488 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1489 TREE_TYPE (arg1)));
1491 /* Handle the special case of two integer constants faster. */
1492 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1494 /* And some specific cases even faster than that. */
1495 if (code == PLUS_EXPR)
1497 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1498 return arg1;
1499 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1500 return arg0;
1502 else if (code == MINUS_EXPR)
1504 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1505 return arg0;
1507 else if (code == MULT_EXPR)
1509 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1510 return arg1;
1513 /* Handle general case of two integer constants. For sizetype
1514 constant calculations we always want to know about overflow,
1515 even in the unsigned case. */
1516 return int_const_binop_1 (code, arg0, arg1, -1);
1519 return fold_build2_loc (loc, code, type, arg0, arg1);
1522 /* Given two values, either both of sizetype or both of bitsizetype,
1523 compute the difference between the two values. Return the value
1524 in signed type corresponding to the type of the operands. */
1526 tree
1527 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1529 tree type = TREE_TYPE (arg0);
1530 tree ctype;
1532 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1533 TREE_TYPE (arg1)));
1535 /* If the type is already signed, just do the simple thing. */
1536 if (!TYPE_UNSIGNED (type))
1537 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1539 if (type == sizetype)
1540 ctype = ssizetype;
1541 else if (type == bitsizetype)
1542 ctype = sbitsizetype;
1543 else
1544 ctype = signed_type_for (type);
1546 /* If either operand is not a constant, do the conversions to the signed
1547 type and subtract. The hardware will do the right thing with any
1548 overflow in the subtraction. */
1549 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1550 return size_binop_loc (loc, MINUS_EXPR,
1551 fold_convert_loc (loc, ctype, arg0),
1552 fold_convert_loc (loc, ctype, arg1));
1554 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1555 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1556 overflow) and negate (which can't either). Special-case a result
1557 of zero while we're here. */
1558 if (tree_int_cst_equal (arg0, arg1))
1559 return build_int_cst (ctype, 0);
1560 else if (tree_int_cst_lt (arg1, arg0))
1561 return fold_convert_loc (loc, ctype,
1562 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1563 else
1564 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1565 fold_convert_loc (loc, ctype,
1566 size_binop_loc (loc,
1567 MINUS_EXPR,
1568 arg1, arg0)));
1571 /* A subroutine of fold_convert_const handling conversions of an
1572 INTEGER_CST to another integer type. */
1574 static tree
1575 fold_convert_const_int_from_int (tree type, const_tree arg1)
1577 /* Given an integer constant, make new constant with new type,
1578 appropriately sign-extended or truncated. Use widest_int
1579 so that any extension is done according ARG1's type. */
1580 return force_fit_type (type, wi::to_widest (arg1),
1581 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1582 TREE_OVERFLOW (arg1));
1585 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1586 to an integer type. */
1588 static tree
1589 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1591 bool overflow = false;
1592 tree t;
1594 /* The following code implements the floating point to integer
1595 conversion rules required by the Java Language Specification,
1596 that IEEE NaNs are mapped to zero and values that overflow
1597 the target precision saturate, i.e. values greater than
1598 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1599 are mapped to INT_MIN. These semantics are allowed by the
1600 C and C++ standards that simply state that the behavior of
1601 FP-to-integer conversion is unspecified upon overflow. */
1603 wide_int val;
1604 REAL_VALUE_TYPE r;
1605 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1607 switch (code)
1609 case FIX_TRUNC_EXPR:
1610 real_trunc (&r, VOIDmode, &x);
1611 break;
1613 default:
1614 gcc_unreachable ();
1617 /* If R is NaN, return zero and show we have an overflow. */
1618 if (REAL_VALUE_ISNAN (r))
1620 overflow = true;
1621 val = wi::zero (TYPE_PRECISION (type));
1624 /* See if R is less than the lower bound or greater than the
1625 upper bound. */
1627 if (! overflow)
1629 tree lt = TYPE_MIN_VALUE (type);
1630 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1631 if (REAL_VALUES_LESS (r, l))
1633 overflow = true;
1634 val = lt;
1638 if (! overflow)
1640 tree ut = TYPE_MAX_VALUE (type);
1641 if (ut)
1643 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1644 if (REAL_VALUES_LESS (u, r))
1646 overflow = true;
1647 val = ut;
1652 if (! overflow)
1653 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1655 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1656 return t;
1659 /* A subroutine of fold_convert_const handling conversions of a
1660 FIXED_CST to an integer type. */
1662 static tree
1663 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1665 tree t;
1666 double_int temp, temp_trunc;
1667 unsigned int mode;
1669 /* Right shift FIXED_CST to temp by fbit. */
1670 temp = TREE_FIXED_CST (arg1).data;
1671 mode = TREE_FIXED_CST (arg1).mode;
1672 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1674 temp = temp.rshift (GET_MODE_FBIT (mode),
1675 HOST_BITS_PER_DOUBLE_INT,
1676 SIGNED_FIXED_POINT_MODE_P (mode));
1678 /* Left shift temp to temp_trunc by fbit. */
1679 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1680 HOST_BITS_PER_DOUBLE_INT,
1681 SIGNED_FIXED_POINT_MODE_P (mode));
1683 else
1685 temp = double_int_zero;
1686 temp_trunc = double_int_zero;
1689 /* If FIXED_CST is negative, we need to round the value toward 0.
1690 By checking if the fractional bits are not zero to add 1 to temp. */
1691 if (SIGNED_FIXED_POINT_MODE_P (mode)
1692 && temp_trunc.is_negative ()
1693 && TREE_FIXED_CST (arg1).data != temp_trunc)
1694 temp += double_int_one;
1696 /* Given a fixed-point constant, make new constant with new type,
1697 appropriately sign-extended or truncated. */
1698 t = force_fit_type (type, temp, -1,
1699 (temp.is_negative ()
1700 && (TYPE_UNSIGNED (type)
1701 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1702 | TREE_OVERFLOW (arg1));
1704 return t;
1707 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1708 to another floating point type. */
1710 static tree
1711 fold_convert_const_real_from_real (tree type, const_tree arg1)
1713 REAL_VALUE_TYPE value;
1714 tree t;
1716 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1717 t = build_real (type, value);
1719 /* If converting an infinity or NAN to a representation that doesn't
1720 have one, set the overflow bit so that we can produce some kind of
1721 error message at the appropriate point if necessary. It's not the
1722 most user-friendly message, but it's better than nothing. */
1723 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1724 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1725 TREE_OVERFLOW (t) = 1;
1726 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1727 && !MODE_HAS_NANS (TYPE_MODE (type)))
1728 TREE_OVERFLOW (t) = 1;
1729 /* Regular overflow, conversion produced an infinity in a mode that
1730 can't represent them. */
1731 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1732 && REAL_VALUE_ISINF (value)
1733 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1734 TREE_OVERFLOW (t) = 1;
1735 else
1736 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1737 return t;
1740 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1741 to a floating point type. */
1743 static tree
1744 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1746 REAL_VALUE_TYPE value;
1747 tree t;
1749 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1750 t = build_real (type, value);
1752 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1753 return t;
1756 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1757 to another fixed-point type. */
1759 static tree
1760 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1762 FIXED_VALUE_TYPE value;
1763 tree t;
1764 bool overflow_p;
1766 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1767 TYPE_SATURATING (type));
1768 t = build_fixed (type, value);
1770 /* Propagate overflow flags. */
1771 if (overflow_p | TREE_OVERFLOW (arg1))
1772 TREE_OVERFLOW (t) = 1;
1773 return t;
1776 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1777 to a fixed-point type. */
1779 static tree
1780 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1782 FIXED_VALUE_TYPE value;
1783 tree t;
1784 bool overflow_p;
1785 double_int di;
1787 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1789 di.low = TREE_INT_CST_ELT (arg1, 0);
1790 if (TREE_INT_CST_NUNITS (arg1) == 1)
1791 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1792 else
1793 di.high = TREE_INT_CST_ELT (arg1, 1);
1795 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1796 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1797 TYPE_SATURATING (type));
1798 t = build_fixed (type, value);
1800 /* Propagate overflow flags. */
1801 if (overflow_p | TREE_OVERFLOW (arg1))
1802 TREE_OVERFLOW (t) = 1;
1803 return t;
1806 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1807 to a fixed-point type. */
1809 static tree
1810 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1812 FIXED_VALUE_TYPE value;
1813 tree t;
1814 bool overflow_p;
1816 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1817 &TREE_REAL_CST (arg1),
1818 TYPE_SATURATING (type));
1819 t = build_fixed (type, value);
1821 /* Propagate overflow flags. */
1822 if (overflow_p | TREE_OVERFLOW (arg1))
1823 TREE_OVERFLOW (t) = 1;
1824 return t;
1827 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1828 type TYPE. If no simplification can be done return NULL_TREE. */
1830 static tree
1831 fold_convert_const (enum tree_code code, tree type, tree arg1)
1833 if (TREE_TYPE (arg1) == type)
1834 return arg1;
1836 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1837 || TREE_CODE (type) == OFFSET_TYPE)
1839 if (TREE_CODE (arg1) == INTEGER_CST)
1840 return fold_convert_const_int_from_int (type, arg1);
1841 else if (TREE_CODE (arg1) == REAL_CST)
1842 return fold_convert_const_int_from_real (code, type, arg1);
1843 else if (TREE_CODE (arg1) == FIXED_CST)
1844 return fold_convert_const_int_from_fixed (type, arg1);
1846 else if (TREE_CODE (type) == REAL_TYPE)
1848 if (TREE_CODE (arg1) == INTEGER_CST)
1849 return build_real_from_int_cst (type, arg1);
1850 else if (TREE_CODE (arg1) == REAL_CST)
1851 return fold_convert_const_real_from_real (type, arg1);
1852 else if (TREE_CODE (arg1) == FIXED_CST)
1853 return fold_convert_const_real_from_fixed (type, arg1);
1855 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1857 if (TREE_CODE (arg1) == FIXED_CST)
1858 return fold_convert_const_fixed_from_fixed (type, arg1);
1859 else if (TREE_CODE (arg1) == INTEGER_CST)
1860 return fold_convert_const_fixed_from_int (type, arg1);
1861 else if (TREE_CODE (arg1) == REAL_CST)
1862 return fold_convert_const_fixed_from_real (type, arg1);
1864 return NULL_TREE;
1867 /* Construct a vector of zero elements of vector type TYPE. */
1869 static tree
1870 build_zero_vector (tree type)
1872 tree t;
1874 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1875 return build_vector_from_val (type, t);
1878 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1880 bool
1881 fold_convertible_p (const_tree type, const_tree arg)
1883 tree orig = TREE_TYPE (arg);
1885 if (type == orig)
1886 return true;
1888 if (TREE_CODE (arg) == ERROR_MARK
1889 || TREE_CODE (type) == ERROR_MARK
1890 || TREE_CODE (orig) == ERROR_MARK)
1891 return false;
1893 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1894 return true;
1896 switch (TREE_CODE (type))
1898 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1899 case POINTER_TYPE: case REFERENCE_TYPE:
1900 case OFFSET_TYPE:
1901 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1902 || TREE_CODE (orig) == OFFSET_TYPE)
1903 return true;
1904 return (TREE_CODE (orig) == VECTOR_TYPE
1905 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1907 case REAL_TYPE:
1908 case FIXED_POINT_TYPE:
1909 case COMPLEX_TYPE:
1910 case VECTOR_TYPE:
1911 case VOID_TYPE:
1912 return TREE_CODE (type) == TREE_CODE (orig);
1914 default:
1915 return false;
1919 /* Convert expression ARG to type TYPE. Used by the middle-end for
1920 simple conversions in preference to calling the front-end's convert. */
1922 tree
1923 fold_convert_loc (location_t loc, tree type, tree arg)
1925 tree orig = TREE_TYPE (arg);
1926 tree tem;
1928 if (type == orig)
1929 return arg;
1931 if (TREE_CODE (arg) == ERROR_MARK
1932 || TREE_CODE (type) == ERROR_MARK
1933 || TREE_CODE (orig) == ERROR_MARK)
1934 return error_mark_node;
1936 switch (TREE_CODE (type))
1938 case POINTER_TYPE:
1939 case REFERENCE_TYPE:
1940 /* Handle conversions between pointers to different address spaces. */
1941 if (POINTER_TYPE_P (orig)
1942 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1943 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1944 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1945 /* fall through */
1947 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1948 case OFFSET_TYPE:
1949 if (TREE_CODE (arg) == INTEGER_CST)
1951 tem = fold_convert_const (NOP_EXPR, type, arg);
1952 if (tem != NULL_TREE)
1953 return tem;
1955 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1956 || TREE_CODE (orig) == OFFSET_TYPE)
1957 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1958 if (TREE_CODE (orig) == COMPLEX_TYPE)
1959 return fold_convert_loc (loc, type,
1960 fold_build1_loc (loc, REALPART_EXPR,
1961 TREE_TYPE (orig), arg));
1962 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1963 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1964 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1966 case REAL_TYPE:
1967 if (TREE_CODE (arg) == INTEGER_CST)
1969 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1970 if (tem != NULL_TREE)
1971 return tem;
1973 else if (TREE_CODE (arg) == REAL_CST)
1975 tem = fold_convert_const (NOP_EXPR, type, arg);
1976 if (tem != NULL_TREE)
1977 return tem;
1979 else if (TREE_CODE (arg) == FIXED_CST)
1981 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1982 if (tem != NULL_TREE)
1983 return tem;
1986 switch (TREE_CODE (orig))
1988 case INTEGER_TYPE:
1989 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1990 case POINTER_TYPE: case REFERENCE_TYPE:
1991 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1993 case REAL_TYPE:
1994 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1996 case FIXED_POINT_TYPE:
1997 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1999 case COMPLEX_TYPE:
2000 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2001 return fold_convert_loc (loc, type, tem);
2003 default:
2004 gcc_unreachable ();
2007 case FIXED_POINT_TYPE:
2008 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2009 || TREE_CODE (arg) == REAL_CST)
2011 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2012 if (tem != NULL_TREE)
2013 goto fold_convert_exit;
2016 switch (TREE_CODE (orig))
2018 case FIXED_POINT_TYPE:
2019 case INTEGER_TYPE:
2020 case ENUMERAL_TYPE:
2021 case BOOLEAN_TYPE:
2022 case REAL_TYPE:
2023 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2025 case COMPLEX_TYPE:
2026 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2027 return fold_convert_loc (loc, type, tem);
2029 default:
2030 gcc_unreachable ();
2033 case COMPLEX_TYPE:
2034 switch (TREE_CODE (orig))
2036 case INTEGER_TYPE:
2037 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2038 case POINTER_TYPE: case REFERENCE_TYPE:
2039 case REAL_TYPE:
2040 case FIXED_POINT_TYPE:
2041 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2042 fold_convert_loc (loc, TREE_TYPE (type), arg),
2043 fold_convert_loc (loc, TREE_TYPE (type),
2044 integer_zero_node));
2045 case COMPLEX_TYPE:
2047 tree rpart, ipart;
2049 if (TREE_CODE (arg) == COMPLEX_EXPR)
2051 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2052 TREE_OPERAND (arg, 0));
2053 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2054 TREE_OPERAND (arg, 1));
2055 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2058 arg = save_expr (arg);
2059 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2060 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2061 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2062 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2063 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2066 default:
2067 gcc_unreachable ();
2070 case VECTOR_TYPE:
2071 if (integer_zerop (arg))
2072 return build_zero_vector (type);
2073 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2074 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2075 || TREE_CODE (orig) == VECTOR_TYPE);
2076 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2078 case VOID_TYPE:
2079 tem = fold_ignored_result (arg);
2080 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2082 default:
2083 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2084 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2085 gcc_unreachable ();
2087 fold_convert_exit:
2088 protected_set_expr_location_unshare (tem, loc);
2089 return tem;
2092 /* Return false if expr can be assumed not to be an lvalue, true
2093 otherwise. */
2095 static bool
2096 maybe_lvalue_p (const_tree x)
2098 /* We only need to wrap lvalue tree codes. */
2099 switch (TREE_CODE (x))
2101 case VAR_DECL:
2102 case PARM_DECL:
2103 case RESULT_DECL:
2104 case LABEL_DECL:
2105 case FUNCTION_DECL:
2106 case SSA_NAME:
2108 case COMPONENT_REF:
2109 case MEM_REF:
2110 case INDIRECT_REF:
2111 case ARRAY_REF:
2112 case ARRAY_RANGE_REF:
2113 case BIT_FIELD_REF:
2114 case OBJ_TYPE_REF:
2116 case REALPART_EXPR:
2117 case IMAGPART_EXPR:
2118 case PREINCREMENT_EXPR:
2119 case PREDECREMENT_EXPR:
2120 case SAVE_EXPR:
2121 case TRY_CATCH_EXPR:
2122 case WITH_CLEANUP_EXPR:
2123 case COMPOUND_EXPR:
2124 case MODIFY_EXPR:
2125 case TARGET_EXPR:
2126 case COND_EXPR:
2127 case BIND_EXPR:
2128 break;
2130 default:
2131 /* Assume the worst for front-end tree codes. */
2132 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2133 break;
2134 return false;
2137 return true;
2140 /* Return an expr equal to X but certainly not valid as an lvalue. */
2142 tree
2143 non_lvalue_loc (location_t loc, tree x)
2145 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2146 us. */
2147 if (in_gimple_form)
2148 return x;
2150 if (! maybe_lvalue_p (x))
2151 return x;
2152 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2155 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2156 Zero means allow extended lvalues. */
2158 int pedantic_lvalues;
2160 /* When pedantic, return an expr equal to X but certainly not valid as a
2161 pedantic lvalue. Otherwise, return X. */
2163 static tree
2164 pedantic_non_lvalue_loc (location_t loc, tree x)
2166 if (pedantic_lvalues)
2167 return non_lvalue_loc (loc, x);
2169 return protected_set_expr_location_unshare (x, loc);
2172 /* Given a tree comparison code, return the code that is the logical inverse.
2173 It is generally not safe to do this for floating-point comparisons, except
2174 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2175 ERROR_MARK in this case. */
2177 enum tree_code
2178 invert_tree_comparison (enum tree_code code, bool honor_nans)
2180 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2181 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2182 return ERROR_MARK;
2184 switch (code)
2186 case EQ_EXPR:
2187 return NE_EXPR;
2188 case NE_EXPR:
2189 return EQ_EXPR;
2190 case GT_EXPR:
2191 return honor_nans ? UNLE_EXPR : LE_EXPR;
2192 case GE_EXPR:
2193 return honor_nans ? UNLT_EXPR : LT_EXPR;
2194 case LT_EXPR:
2195 return honor_nans ? UNGE_EXPR : GE_EXPR;
2196 case LE_EXPR:
2197 return honor_nans ? UNGT_EXPR : GT_EXPR;
2198 case LTGT_EXPR:
2199 return UNEQ_EXPR;
2200 case UNEQ_EXPR:
2201 return LTGT_EXPR;
2202 case UNGT_EXPR:
2203 return LE_EXPR;
2204 case UNGE_EXPR:
2205 return LT_EXPR;
2206 case UNLT_EXPR:
2207 return GE_EXPR;
2208 case UNLE_EXPR:
2209 return GT_EXPR;
2210 case ORDERED_EXPR:
2211 return UNORDERED_EXPR;
2212 case UNORDERED_EXPR:
2213 return ORDERED_EXPR;
2214 default:
2215 gcc_unreachable ();
2219 /* Similar, but return the comparison that results if the operands are
2220 swapped. This is safe for floating-point. */
2222 enum tree_code
2223 swap_tree_comparison (enum tree_code code)
2225 switch (code)
2227 case EQ_EXPR:
2228 case NE_EXPR:
2229 case ORDERED_EXPR:
2230 case UNORDERED_EXPR:
2231 case LTGT_EXPR:
2232 case UNEQ_EXPR:
2233 return code;
2234 case GT_EXPR:
2235 return LT_EXPR;
2236 case GE_EXPR:
2237 return LE_EXPR;
2238 case LT_EXPR:
2239 return GT_EXPR;
2240 case LE_EXPR:
2241 return GE_EXPR;
2242 case UNGT_EXPR:
2243 return UNLT_EXPR;
2244 case UNGE_EXPR:
2245 return UNLE_EXPR;
2246 case UNLT_EXPR:
2247 return UNGT_EXPR;
2248 case UNLE_EXPR:
2249 return UNGE_EXPR;
2250 default:
2251 gcc_unreachable ();
2256 /* Convert a comparison tree code from an enum tree_code representation
2257 into a compcode bit-based encoding. This function is the inverse of
2258 compcode_to_comparison. */
2260 static enum comparison_code
2261 comparison_to_compcode (enum tree_code code)
2263 switch (code)
2265 case LT_EXPR:
2266 return COMPCODE_LT;
2267 case EQ_EXPR:
2268 return COMPCODE_EQ;
2269 case LE_EXPR:
2270 return COMPCODE_LE;
2271 case GT_EXPR:
2272 return COMPCODE_GT;
2273 case NE_EXPR:
2274 return COMPCODE_NE;
2275 case GE_EXPR:
2276 return COMPCODE_GE;
2277 case ORDERED_EXPR:
2278 return COMPCODE_ORD;
2279 case UNORDERED_EXPR:
2280 return COMPCODE_UNORD;
2281 case UNLT_EXPR:
2282 return COMPCODE_UNLT;
2283 case UNEQ_EXPR:
2284 return COMPCODE_UNEQ;
2285 case UNLE_EXPR:
2286 return COMPCODE_UNLE;
2287 case UNGT_EXPR:
2288 return COMPCODE_UNGT;
2289 case LTGT_EXPR:
2290 return COMPCODE_LTGT;
2291 case UNGE_EXPR:
2292 return COMPCODE_UNGE;
2293 default:
2294 gcc_unreachable ();
2298 /* Convert a compcode bit-based encoding of a comparison operator back
2299 to GCC's enum tree_code representation. This function is the
2300 inverse of comparison_to_compcode. */
2302 static enum tree_code
2303 compcode_to_comparison (enum comparison_code code)
2305 switch (code)
2307 case COMPCODE_LT:
2308 return LT_EXPR;
2309 case COMPCODE_EQ:
2310 return EQ_EXPR;
2311 case COMPCODE_LE:
2312 return LE_EXPR;
2313 case COMPCODE_GT:
2314 return GT_EXPR;
2315 case COMPCODE_NE:
2316 return NE_EXPR;
2317 case COMPCODE_GE:
2318 return GE_EXPR;
2319 case COMPCODE_ORD:
2320 return ORDERED_EXPR;
2321 case COMPCODE_UNORD:
2322 return UNORDERED_EXPR;
2323 case COMPCODE_UNLT:
2324 return UNLT_EXPR;
2325 case COMPCODE_UNEQ:
2326 return UNEQ_EXPR;
2327 case COMPCODE_UNLE:
2328 return UNLE_EXPR;
2329 case COMPCODE_UNGT:
2330 return UNGT_EXPR;
2331 case COMPCODE_LTGT:
2332 return LTGT_EXPR;
2333 case COMPCODE_UNGE:
2334 return UNGE_EXPR;
2335 default:
2336 gcc_unreachable ();
2340 /* Return a tree for the comparison which is the combination of
2341 doing the AND or OR (depending on CODE) of the two operations LCODE
2342 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2343 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2344 if this makes the transformation invalid. */
2346 tree
2347 combine_comparisons (location_t loc,
2348 enum tree_code code, enum tree_code lcode,
2349 enum tree_code rcode, tree truth_type,
2350 tree ll_arg, tree lr_arg)
2352 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2353 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2354 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2355 int compcode;
2357 switch (code)
2359 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2360 compcode = lcompcode & rcompcode;
2361 break;
2363 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2364 compcode = lcompcode | rcompcode;
2365 break;
2367 default:
2368 return NULL_TREE;
2371 if (!honor_nans)
2373 /* Eliminate unordered comparisons, as well as LTGT and ORD
2374 which are not used unless the mode has NaNs. */
2375 compcode &= ~COMPCODE_UNORD;
2376 if (compcode == COMPCODE_LTGT)
2377 compcode = COMPCODE_NE;
2378 else if (compcode == COMPCODE_ORD)
2379 compcode = COMPCODE_TRUE;
2381 else if (flag_trapping_math)
2383 /* Check that the original operation and the optimized ones will trap
2384 under the same condition. */
2385 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2386 && (lcompcode != COMPCODE_EQ)
2387 && (lcompcode != COMPCODE_ORD);
2388 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2389 && (rcompcode != COMPCODE_EQ)
2390 && (rcompcode != COMPCODE_ORD);
2391 bool trap = (compcode & COMPCODE_UNORD) == 0
2392 && (compcode != COMPCODE_EQ)
2393 && (compcode != COMPCODE_ORD);
2395 /* In a short-circuited boolean expression the LHS might be
2396 such that the RHS, if evaluated, will never trap. For
2397 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2398 if neither x nor y is NaN. (This is a mixed blessing: for
2399 example, the expression above will never trap, hence
2400 optimizing it to x < y would be invalid). */
2401 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2402 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2403 rtrap = false;
2405 /* If the comparison was short-circuited, and only the RHS
2406 trapped, we may now generate a spurious trap. */
2407 if (rtrap && !ltrap
2408 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2409 return NULL_TREE;
2411 /* If we changed the conditions that cause a trap, we lose. */
2412 if ((ltrap || rtrap) != trap)
2413 return NULL_TREE;
2416 if (compcode == COMPCODE_TRUE)
2417 return constant_boolean_node (true, truth_type);
2418 else if (compcode == COMPCODE_FALSE)
2419 return constant_boolean_node (false, truth_type);
2420 else
2422 enum tree_code tcode;
2424 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2425 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2429 /* Return nonzero if two operands (typically of the same tree node)
2430 are necessarily equal. If either argument has side-effects this
2431 function returns zero. FLAGS modifies behavior as follows:
2433 If OEP_ONLY_CONST is set, only return nonzero for constants.
2434 This function tests whether the operands are indistinguishable;
2435 it does not test whether they are equal using C's == operation.
2436 The distinction is important for IEEE floating point, because
2437 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2438 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2440 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2441 even though it may hold multiple values during a function.
2442 This is because a GCC tree node guarantees that nothing else is
2443 executed between the evaluation of its "operands" (which may often
2444 be evaluated in arbitrary order). Hence if the operands themselves
2445 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2446 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2447 unset means assuming isochronic (or instantaneous) tree equivalence.
2448 Unless comparing arbitrary expression trees, such as from different
2449 statements, this flag can usually be left unset.
2451 If OEP_PURE_SAME is set, then pure functions with identical arguments
2452 are considered the same. It is used when the caller has other ways
2453 to ensure that global memory is unchanged in between. */
2456 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2458 /* If either is ERROR_MARK, they aren't equal. */
2459 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2460 || TREE_TYPE (arg0) == error_mark_node
2461 || TREE_TYPE (arg1) == error_mark_node)
2462 return 0;
2464 /* Similar, if either does not have a type (like a released SSA name),
2465 they aren't equal. */
2466 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2467 return 0;
2469 /* Check equality of integer constants before bailing out due to
2470 precision differences. */
2471 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2472 return tree_int_cst_equal (arg0, arg1);
2474 /* If both types don't have the same signedness, then we can't consider
2475 them equal. We must check this before the STRIP_NOPS calls
2476 because they may change the signedness of the arguments. As pointers
2477 strictly don't have a signedness, require either two pointers or
2478 two non-pointers as well. */
2479 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2480 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2481 return 0;
2483 /* We cannot consider pointers to different address space equal. */
2484 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2485 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2486 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2487 return 0;
2489 /* If both types don't have the same precision, then it is not safe
2490 to strip NOPs. */
2491 if (element_precision (TREE_TYPE (arg0))
2492 != element_precision (TREE_TYPE (arg1)))
2493 return 0;
2495 STRIP_NOPS (arg0);
2496 STRIP_NOPS (arg1);
2498 /* In case both args are comparisons but with different comparison
2499 code, try to swap the comparison operands of one arg to produce
2500 a match and compare that variant. */
2501 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2502 && COMPARISON_CLASS_P (arg0)
2503 && COMPARISON_CLASS_P (arg1))
2505 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2507 if (TREE_CODE (arg0) == swap_code)
2508 return operand_equal_p (TREE_OPERAND (arg0, 0),
2509 TREE_OPERAND (arg1, 1), flags)
2510 && operand_equal_p (TREE_OPERAND (arg0, 1),
2511 TREE_OPERAND (arg1, 0), flags);
2514 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2515 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2516 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2517 return 0;
2519 /* This is needed for conversions and for COMPONENT_REF.
2520 Might as well play it safe and always test this. */
2521 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2522 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2523 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2524 return 0;
2526 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2527 We don't care about side effects in that case because the SAVE_EXPR
2528 takes care of that for us. In all other cases, two expressions are
2529 equal if they have no side effects. If we have two identical
2530 expressions with side effects that should be treated the same due
2531 to the only side effects being identical SAVE_EXPR's, that will
2532 be detected in the recursive calls below.
2533 If we are taking an invariant address of two identical objects
2534 they are necessarily equal as well. */
2535 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2536 && (TREE_CODE (arg0) == SAVE_EXPR
2537 || (flags & OEP_CONSTANT_ADDRESS_OF)
2538 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2539 return 1;
2541 /* Next handle constant cases, those for which we can return 1 even
2542 if ONLY_CONST is set. */
2543 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2544 switch (TREE_CODE (arg0))
2546 case INTEGER_CST:
2547 return tree_int_cst_equal (arg0, arg1);
2549 case FIXED_CST:
2550 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2551 TREE_FIXED_CST (arg1));
2553 case REAL_CST:
2554 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2555 TREE_REAL_CST (arg1)))
2556 return 1;
2559 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2561 /* If we do not distinguish between signed and unsigned zero,
2562 consider them equal. */
2563 if (real_zerop (arg0) && real_zerop (arg1))
2564 return 1;
2566 return 0;
2568 case VECTOR_CST:
2570 unsigned i;
2572 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2573 return 0;
2575 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2577 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2578 VECTOR_CST_ELT (arg1, i), flags))
2579 return 0;
2581 return 1;
2584 case COMPLEX_CST:
2585 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2586 flags)
2587 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2588 flags));
2590 case STRING_CST:
2591 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2592 && ! memcmp (TREE_STRING_POINTER (arg0),
2593 TREE_STRING_POINTER (arg1),
2594 TREE_STRING_LENGTH (arg0)));
2596 case ADDR_EXPR:
2597 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2598 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2599 ? OEP_CONSTANT_ADDRESS_OF : 0);
2600 default:
2601 break;
2604 if (flags & OEP_ONLY_CONST)
2605 return 0;
2607 /* Define macros to test an operand from arg0 and arg1 for equality and a
2608 variant that allows null and views null as being different from any
2609 non-null value. In the latter case, if either is null, the both
2610 must be; otherwise, do the normal comparison. */
2611 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2612 TREE_OPERAND (arg1, N), flags)
2614 #define OP_SAME_WITH_NULL(N) \
2615 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2616 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2618 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2620 case tcc_unary:
2621 /* Two conversions are equal only if signedness and modes match. */
2622 switch (TREE_CODE (arg0))
2624 CASE_CONVERT:
2625 case FIX_TRUNC_EXPR:
2626 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2627 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2628 return 0;
2629 break;
2630 default:
2631 break;
2634 return OP_SAME (0);
2637 case tcc_comparison:
2638 case tcc_binary:
2639 if (OP_SAME (0) && OP_SAME (1))
2640 return 1;
2642 /* For commutative ops, allow the other order. */
2643 return (commutative_tree_code (TREE_CODE (arg0))
2644 && operand_equal_p (TREE_OPERAND (arg0, 0),
2645 TREE_OPERAND (arg1, 1), flags)
2646 && operand_equal_p (TREE_OPERAND (arg0, 1),
2647 TREE_OPERAND (arg1, 0), flags));
2649 case tcc_reference:
2650 /* If either of the pointer (or reference) expressions we are
2651 dereferencing contain a side effect, these cannot be equal,
2652 but their addresses can be. */
2653 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2654 && (TREE_SIDE_EFFECTS (arg0)
2655 || TREE_SIDE_EFFECTS (arg1)))
2656 return 0;
2658 switch (TREE_CODE (arg0))
2660 case INDIRECT_REF:
2661 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2662 return OP_SAME (0);
2664 case REALPART_EXPR:
2665 case IMAGPART_EXPR:
2666 return OP_SAME (0);
2668 case TARGET_MEM_REF:
2669 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2670 /* Require equal extra operands and then fall through to MEM_REF
2671 handling of the two common operands. */
2672 if (!OP_SAME_WITH_NULL (2)
2673 || !OP_SAME_WITH_NULL (3)
2674 || !OP_SAME_WITH_NULL (4))
2675 return 0;
2676 /* Fallthru. */
2677 case MEM_REF:
2678 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2679 /* Require equal access sizes, and similar pointer types.
2680 We can have incomplete types for array references of
2681 variable-sized arrays from the Fortran frontend
2682 though. Also verify the types are compatible. */
2683 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2684 || (TYPE_SIZE (TREE_TYPE (arg0))
2685 && TYPE_SIZE (TREE_TYPE (arg1))
2686 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2687 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2688 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2689 && alias_ptr_types_compatible_p
2690 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2691 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2692 && OP_SAME (0) && OP_SAME (1));
2694 case ARRAY_REF:
2695 case ARRAY_RANGE_REF:
2696 /* Operands 2 and 3 may be null.
2697 Compare the array index by value if it is constant first as we
2698 may have different types but same value here. */
2699 if (!OP_SAME (0))
2700 return 0;
2701 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2702 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2703 TREE_OPERAND (arg1, 1))
2704 || OP_SAME (1))
2705 && OP_SAME_WITH_NULL (2)
2706 && OP_SAME_WITH_NULL (3));
2708 case COMPONENT_REF:
2709 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2710 may be NULL when we're called to compare MEM_EXPRs. */
2711 if (!OP_SAME_WITH_NULL (0)
2712 || !OP_SAME (1))
2713 return 0;
2714 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2715 return OP_SAME_WITH_NULL (2);
2717 case BIT_FIELD_REF:
2718 if (!OP_SAME (0))
2719 return 0;
2720 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2721 return OP_SAME (1) && OP_SAME (2);
2723 default:
2724 return 0;
2727 case tcc_expression:
2728 switch (TREE_CODE (arg0))
2730 case ADDR_EXPR:
2731 case TRUTH_NOT_EXPR:
2732 return OP_SAME (0);
2734 case TRUTH_ANDIF_EXPR:
2735 case TRUTH_ORIF_EXPR:
2736 return OP_SAME (0) && OP_SAME (1);
2738 case FMA_EXPR:
2739 case WIDEN_MULT_PLUS_EXPR:
2740 case WIDEN_MULT_MINUS_EXPR:
2741 if (!OP_SAME (2))
2742 return 0;
2743 /* The multiplcation operands are commutative. */
2744 /* FALLTHRU */
2746 case TRUTH_AND_EXPR:
2747 case TRUTH_OR_EXPR:
2748 case TRUTH_XOR_EXPR:
2749 if (OP_SAME (0) && OP_SAME (1))
2750 return 1;
2752 /* Otherwise take into account this is a commutative operation. */
2753 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2754 TREE_OPERAND (arg1, 1), flags)
2755 && operand_equal_p (TREE_OPERAND (arg0, 1),
2756 TREE_OPERAND (arg1, 0), flags));
2758 case COND_EXPR:
2759 case VEC_COND_EXPR:
2760 case DOT_PROD_EXPR:
2761 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2763 default:
2764 return 0;
2767 case tcc_vl_exp:
2768 switch (TREE_CODE (arg0))
2770 case CALL_EXPR:
2771 /* If the CALL_EXPRs call different functions, then they
2772 clearly can not be equal. */
2773 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2774 flags))
2775 return 0;
2778 unsigned int cef = call_expr_flags (arg0);
2779 if (flags & OEP_PURE_SAME)
2780 cef &= ECF_CONST | ECF_PURE;
2781 else
2782 cef &= ECF_CONST;
2783 if (!cef)
2784 return 0;
2787 /* Now see if all the arguments are the same. */
2789 const_call_expr_arg_iterator iter0, iter1;
2790 const_tree a0, a1;
2791 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2792 a1 = first_const_call_expr_arg (arg1, &iter1);
2793 a0 && a1;
2794 a0 = next_const_call_expr_arg (&iter0),
2795 a1 = next_const_call_expr_arg (&iter1))
2796 if (! operand_equal_p (a0, a1, flags))
2797 return 0;
2799 /* If we get here and both argument lists are exhausted
2800 then the CALL_EXPRs are equal. */
2801 return ! (a0 || a1);
2803 default:
2804 return 0;
2807 case tcc_declaration:
2808 /* Consider __builtin_sqrt equal to sqrt. */
2809 return (TREE_CODE (arg0) == FUNCTION_DECL
2810 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2811 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2812 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2814 default:
2815 return 0;
2818 #undef OP_SAME
2819 #undef OP_SAME_WITH_NULL
2822 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2823 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2825 When in doubt, return 0. */
2827 static int
2828 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2830 int unsignedp1, unsignedpo;
2831 tree primarg0, primarg1, primother;
2832 unsigned int correct_width;
2834 if (operand_equal_p (arg0, arg1, 0))
2835 return 1;
2837 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2838 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2839 return 0;
2841 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2842 and see if the inner values are the same. This removes any
2843 signedness comparison, which doesn't matter here. */
2844 primarg0 = arg0, primarg1 = arg1;
2845 STRIP_NOPS (primarg0);
2846 STRIP_NOPS (primarg1);
2847 if (operand_equal_p (primarg0, primarg1, 0))
2848 return 1;
2850 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2851 actual comparison operand, ARG0.
2853 First throw away any conversions to wider types
2854 already present in the operands. */
2856 primarg1 = get_narrower (arg1, &unsignedp1);
2857 primother = get_narrower (other, &unsignedpo);
2859 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2860 if (unsignedp1 == unsignedpo
2861 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2862 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2864 tree type = TREE_TYPE (arg0);
2866 /* Make sure shorter operand is extended the right way
2867 to match the longer operand. */
2868 primarg1 = fold_convert (signed_or_unsigned_type_for
2869 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2871 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2872 return 1;
2875 return 0;
2878 /* See if ARG is an expression that is either a comparison or is performing
2879 arithmetic on comparisons. The comparisons must only be comparing
2880 two different values, which will be stored in *CVAL1 and *CVAL2; if
2881 they are nonzero it means that some operands have already been found.
2882 No variables may be used anywhere else in the expression except in the
2883 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2884 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2886 If this is true, return 1. Otherwise, return zero. */
2888 static int
2889 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2891 enum tree_code code = TREE_CODE (arg);
2892 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2894 /* We can handle some of the tcc_expression cases here. */
2895 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2896 tclass = tcc_unary;
2897 else if (tclass == tcc_expression
2898 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2899 || code == COMPOUND_EXPR))
2900 tclass = tcc_binary;
2902 else if (tclass == tcc_expression && code == SAVE_EXPR
2903 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2905 /* If we've already found a CVAL1 or CVAL2, this expression is
2906 two complex to handle. */
2907 if (*cval1 || *cval2)
2908 return 0;
2910 tclass = tcc_unary;
2911 *save_p = 1;
2914 switch (tclass)
2916 case tcc_unary:
2917 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2919 case tcc_binary:
2920 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2921 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2922 cval1, cval2, save_p));
2924 case tcc_constant:
2925 return 1;
2927 case tcc_expression:
2928 if (code == COND_EXPR)
2929 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2930 cval1, cval2, save_p)
2931 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2932 cval1, cval2, save_p)
2933 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2934 cval1, cval2, save_p));
2935 return 0;
2937 case tcc_comparison:
2938 /* First see if we can handle the first operand, then the second. For
2939 the second operand, we know *CVAL1 can't be zero. It must be that
2940 one side of the comparison is each of the values; test for the
2941 case where this isn't true by failing if the two operands
2942 are the same. */
2944 if (operand_equal_p (TREE_OPERAND (arg, 0),
2945 TREE_OPERAND (arg, 1), 0))
2946 return 0;
2948 if (*cval1 == 0)
2949 *cval1 = TREE_OPERAND (arg, 0);
2950 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2952 else if (*cval2 == 0)
2953 *cval2 = TREE_OPERAND (arg, 0);
2954 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2956 else
2957 return 0;
2959 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2961 else if (*cval2 == 0)
2962 *cval2 = TREE_OPERAND (arg, 1);
2963 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2965 else
2966 return 0;
2968 return 1;
2970 default:
2971 return 0;
2975 /* ARG is a tree that is known to contain just arithmetic operations and
2976 comparisons. Evaluate the operations in the tree substituting NEW0 for
2977 any occurrence of OLD0 as an operand of a comparison and likewise for
2978 NEW1 and OLD1. */
2980 static tree
2981 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2982 tree old1, tree new1)
2984 tree type = TREE_TYPE (arg);
2985 enum tree_code code = TREE_CODE (arg);
2986 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2988 /* We can handle some of the tcc_expression cases here. */
2989 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2990 tclass = tcc_unary;
2991 else if (tclass == tcc_expression
2992 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2993 tclass = tcc_binary;
2995 switch (tclass)
2997 case tcc_unary:
2998 return fold_build1_loc (loc, code, type,
2999 eval_subst (loc, TREE_OPERAND (arg, 0),
3000 old0, new0, old1, new1));
3002 case tcc_binary:
3003 return fold_build2_loc (loc, code, type,
3004 eval_subst (loc, TREE_OPERAND (arg, 0),
3005 old0, new0, old1, new1),
3006 eval_subst (loc, TREE_OPERAND (arg, 1),
3007 old0, new0, old1, new1));
3009 case tcc_expression:
3010 switch (code)
3012 case SAVE_EXPR:
3013 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3014 old1, new1);
3016 case COMPOUND_EXPR:
3017 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3018 old1, new1);
3020 case COND_EXPR:
3021 return fold_build3_loc (loc, code, type,
3022 eval_subst (loc, TREE_OPERAND (arg, 0),
3023 old0, new0, old1, new1),
3024 eval_subst (loc, TREE_OPERAND (arg, 1),
3025 old0, new0, old1, new1),
3026 eval_subst (loc, TREE_OPERAND (arg, 2),
3027 old0, new0, old1, new1));
3028 default:
3029 break;
3031 /* Fall through - ??? */
3033 case tcc_comparison:
3035 tree arg0 = TREE_OPERAND (arg, 0);
3036 tree arg1 = TREE_OPERAND (arg, 1);
3038 /* We need to check both for exact equality and tree equality. The
3039 former will be true if the operand has a side-effect. In that
3040 case, we know the operand occurred exactly once. */
3042 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3043 arg0 = new0;
3044 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3045 arg0 = new1;
3047 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3048 arg1 = new0;
3049 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3050 arg1 = new1;
3052 return fold_build2_loc (loc, code, type, arg0, arg1);
3055 default:
3056 return arg;
3060 /* Return a tree for the case when the result of an expression is RESULT
3061 converted to TYPE and OMITTED was previously an operand of the expression
3062 but is now not needed (e.g., we folded OMITTED * 0).
3064 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3065 the conversion of RESULT to TYPE. */
3067 tree
3068 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3070 tree t = fold_convert_loc (loc, type, result);
3072 /* If the resulting operand is an empty statement, just return the omitted
3073 statement casted to void. */
3074 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3075 return build1_loc (loc, NOP_EXPR, void_type_node,
3076 fold_ignored_result (omitted));
3078 if (TREE_SIDE_EFFECTS (omitted))
3079 return build2_loc (loc, COMPOUND_EXPR, type,
3080 fold_ignored_result (omitted), t);
3082 return non_lvalue_loc (loc, t);
3085 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3087 static tree
3088 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3089 tree omitted)
3091 tree t = fold_convert_loc (loc, type, result);
3093 /* If the resulting operand is an empty statement, just return the omitted
3094 statement casted to void. */
3095 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3096 return build1_loc (loc, NOP_EXPR, void_type_node,
3097 fold_ignored_result (omitted));
3099 if (TREE_SIDE_EFFECTS (omitted))
3100 return build2_loc (loc, COMPOUND_EXPR, type,
3101 fold_ignored_result (omitted), t);
3103 return pedantic_non_lvalue_loc (loc, t);
3106 /* Return a tree for the case when the result of an expression is RESULT
3107 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3108 of the expression but are now not needed.
3110 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3111 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3112 evaluated before OMITTED2. Otherwise, if neither has side effects,
3113 just do the conversion of RESULT to TYPE. */
3115 tree
3116 omit_two_operands_loc (location_t loc, tree type, tree result,
3117 tree omitted1, tree omitted2)
3119 tree t = fold_convert_loc (loc, type, result);
3121 if (TREE_SIDE_EFFECTS (omitted2))
3122 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3123 if (TREE_SIDE_EFFECTS (omitted1))
3124 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3126 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3130 /* Return a simplified tree node for the truth-negation of ARG. This
3131 never alters ARG itself. We assume that ARG is an operation that
3132 returns a truth value (0 or 1).
3134 FIXME: one would think we would fold the result, but it causes
3135 problems with the dominator optimizer. */
3137 static tree
3138 fold_truth_not_expr (location_t loc, tree arg)
3140 tree type = TREE_TYPE (arg);
3141 enum tree_code code = TREE_CODE (arg);
3142 location_t loc1, loc2;
3144 /* If this is a comparison, we can simply invert it, except for
3145 floating-point non-equality comparisons, in which case we just
3146 enclose a TRUTH_NOT_EXPR around what we have. */
3148 if (TREE_CODE_CLASS (code) == tcc_comparison)
3150 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3151 if (FLOAT_TYPE_P (op_type)
3152 && flag_trapping_math
3153 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3154 && code != NE_EXPR && code != EQ_EXPR)
3155 return NULL_TREE;
3157 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3158 if (code == ERROR_MARK)
3159 return NULL_TREE;
3161 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3162 TREE_OPERAND (arg, 1));
3165 switch (code)
3167 case INTEGER_CST:
3168 return constant_boolean_node (integer_zerop (arg), type);
3170 case TRUTH_AND_EXPR:
3171 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3172 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3173 return build2_loc (loc, TRUTH_OR_EXPR, type,
3174 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3175 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3177 case TRUTH_OR_EXPR:
3178 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3179 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3180 return build2_loc (loc, TRUTH_AND_EXPR, type,
3181 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3182 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3184 case TRUTH_XOR_EXPR:
3185 /* Here we can invert either operand. We invert the first operand
3186 unless the second operand is a TRUTH_NOT_EXPR in which case our
3187 result is the XOR of the first operand with the inside of the
3188 negation of the second operand. */
3190 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3191 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3192 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3193 else
3194 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3195 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3196 TREE_OPERAND (arg, 1));
3198 case TRUTH_ANDIF_EXPR:
3199 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3200 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3201 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3202 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3203 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3205 case TRUTH_ORIF_EXPR:
3206 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3207 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3208 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3209 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3210 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3212 case TRUTH_NOT_EXPR:
3213 return TREE_OPERAND (arg, 0);
3215 case COND_EXPR:
3217 tree arg1 = TREE_OPERAND (arg, 1);
3218 tree arg2 = TREE_OPERAND (arg, 2);
3220 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3221 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3223 /* A COND_EXPR may have a throw as one operand, which
3224 then has void type. Just leave void operands
3225 as they are. */
3226 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3227 VOID_TYPE_P (TREE_TYPE (arg1))
3228 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3229 VOID_TYPE_P (TREE_TYPE (arg2))
3230 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3233 case COMPOUND_EXPR:
3234 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3235 return build2_loc (loc, COMPOUND_EXPR, type,
3236 TREE_OPERAND (arg, 0),
3237 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3239 case NON_LVALUE_EXPR:
3240 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3241 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3243 CASE_CONVERT:
3244 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3245 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3247 /* ... fall through ... */
3249 case FLOAT_EXPR:
3250 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3251 return build1_loc (loc, TREE_CODE (arg), type,
3252 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3254 case BIT_AND_EXPR:
3255 if (!integer_onep (TREE_OPERAND (arg, 1)))
3256 return NULL_TREE;
3257 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3259 case SAVE_EXPR:
3260 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3262 case CLEANUP_POINT_EXPR:
3263 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3264 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3265 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3267 default:
3268 return NULL_TREE;
3272 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3273 assume that ARG is an operation that returns a truth value (0 or 1
3274 for scalars, 0 or -1 for vectors). Return the folded expression if
3275 folding is successful. Otherwise, return NULL_TREE. */
3277 static tree
3278 fold_invert_truthvalue (location_t loc, tree arg)
3280 tree type = TREE_TYPE (arg);
3281 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3282 ? BIT_NOT_EXPR
3283 : TRUTH_NOT_EXPR,
3284 type, arg);
3287 /* Return a simplified tree node for the truth-negation of ARG. This
3288 never alters ARG itself. We assume that ARG is an operation that
3289 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3291 tree
3292 invert_truthvalue_loc (location_t loc, tree arg)
3294 if (TREE_CODE (arg) == ERROR_MARK)
3295 return arg;
3297 tree type = TREE_TYPE (arg);
3298 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3299 ? BIT_NOT_EXPR
3300 : TRUTH_NOT_EXPR,
3301 type, arg);
3304 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3305 operands are another bit-wise operation with a common input. If so,
3306 distribute the bit operations to save an operation and possibly two if
3307 constants are involved. For example, convert
3308 (A | B) & (A | C) into A | (B & C)
3309 Further simplification will occur if B and C are constants.
3311 If this optimization cannot be done, 0 will be returned. */
3313 static tree
3314 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3315 tree arg0, tree arg1)
3317 tree common;
3318 tree left, right;
3320 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3321 || TREE_CODE (arg0) == code
3322 || (TREE_CODE (arg0) != BIT_AND_EXPR
3323 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3324 return 0;
3326 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3328 common = TREE_OPERAND (arg0, 0);
3329 left = TREE_OPERAND (arg0, 1);
3330 right = TREE_OPERAND (arg1, 1);
3332 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3334 common = TREE_OPERAND (arg0, 0);
3335 left = TREE_OPERAND (arg0, 1);
3336 right = TREE_OPERAND (arg1, 0);
3338 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3340 common = TREE_OPERAND (arg0, 1);
3341 left = TREE_OPERAND (arg0, 0);
3342 right = TREE_OPERAND (arg1, 1);
3344 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3346 common = TREE_OPERAND (arg0, 1);
3347 left = TREE_OPERAND (arg0, 0);
3348 right = TREE_OPERAND (arg1, 0);
3350 else
3351 return 0;
3353 common = fold_convert_loc (loc, type, common);
3354 left = fold_convert_loc (loc, type, left);
3355 right = fold_convert_loc (loc, type, right);
3356 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3357 fold_build2_loc (loc, code, type, left, right));
3360 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3361 with code CODE. This optimization is unsafe. */
3362 static tree
3363 distribute_real_division (location_t loc, enum tree_code code, tree type,
3364 tree arg0, tree arg1)
3366 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3367 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3369 /* (A / C) +- (B / C) -> (A +- B) / C. */
3370 if (mul0 == mul1
3371 && operand_equal_p (TREE_OPERAND (arg0, 1),
3372 TREE_OPERAND (arg1, 1), 0))
3373 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3374 fold_build2_loc (loc, code, type,
3375 TREE_OPERAND (arg0, 0),
3376 TREE_OPERAND (arg1, 0)),
3377 TREE_OPERAND (arg0, 1));
3379 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3380 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3381 TREE_OPERAND (arg1, 0), 0)
3382 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3383 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3385 REAL_VALUE_TYPE r0, r1;
3386 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3387 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3388 if (!mul0)
3389 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3390 if (!mul1)
3391 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3392 real_arithmetic (&r0, code, &r0, &r1);
3393 return fold_build2_loc (loc, MULT_EXPR, type,
3394 TREE_OPERAND (arg0, 0),
3395 build_real (type, r0));
3398 return NULL_TREE;
3401 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3402 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3404 static tree
3405 make_bit_field_ref (location_t loc, tree inner, tree type,
3406 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3408 tree result, bftype;
3410 if (bitpos == 0)
3412 tree size = TYPE_SIZE (TREE_TYPE (inner));
3413 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3414 || POINTER_TYPE_P (TREE_TYPE (inner)))
3415 && tree_fits_shwi_p (size)
3416 && tree_to_shwi (size) == bitsize)
3417 return fold_convert_loc (loc, type, inner);
3420 bftype = type;
3421 if (TYPE_PRECISION (bftype) != bitsize
3422 || TYPE_UNSIGNED (bftype) == !unsignedp)
3423 bftype = build_nonstandard_integer_type (bitsize, 0);
3425 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3426 size_int (bitsize), bitsize_int (bitpos));
3428 if (bftype != type)
3429 result = fold_convert_loc (loc, type, result);
3431 return result;
3434 /* Optimize a bit-field compare.
3436 There are two cases: First is a compare against a constant and the
3437 second is a comparison of two items where the fields are at the same
3438 bit position relative to the start of a chunk (byte, halfword, word)
3439 large enough to contain it. In these cases we can avoid the shift
3440 implicit in bitfield extractions.
3442 For constants, we emit a compare of the shifted constant with the
3443 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3444 compared. For two fields at the same position, we do the ANDs with the
3445 similar mask and compare the result of the ANDs.
3447 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3448 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3449 are the left and right operands of the comparison, respectively.
3451 If the optimization described above can be done, we return the resulting
3452 tree. Otherwise we return zero. */
3454 static tree
3455 optimize_bit_field_compare (location_t loc, enum tree_code code,
3456 tree compare_type, tree lhs, tree rhs)
3458 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3459 tree type = TREE_TYPE (lhs);
3460 tree unsigned_type;
3461 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3462 machine_mode lmode, rmode, nmode;
3463 int lunsignedp, runsignedp;
3464 int lvolatilep = 0, rvolatilep = 0;
3465 tree linner, rinner = NULL_TREE;
3466 tree mask;
3467 tree offset;
3469 /* Get all the information about the extractions being done. If the bit size
3470 if the same as the size of the underlying object, we aren't doing an
3471 extraction at all and so can do nothing. We also don't want to
3472 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3473 then will no longer be able to replace it. */
3474 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3475 &lunsignedp, &lvolatilep, false);
3476 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3477 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3478 return 0;
3480 if (!const_p)
3482 /* If this is not a constant, we can only do something if bit positions,
3483 sizes, and signedness are the same. */
3484 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3485 &runsignedp, &rvolatilep, false);
3487 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3488 || lunsignedp != runsignedp || offset != 0
3489 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3490 return 0;
3493 /* See if we can find a mode to refer to this field. We should be able to,
3494 but fail if we can't. */
3495 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3496 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3497 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3498 TYPE_ALIGN (TREE_TYPE (rinner))),
3499 word_mode, false);
3500 if (nmode == VOIDmode)
3501 return 0;
3503 /* Set signed and unsigned types of the precision of this mode for the
3504 shifts below. */
3505 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3507 /* Compute the bit position and size for the new reference and our offset
3508 within it. If the new reference is the same size as the original, we
3509 won't optimize anything, so return zero. */
3510 nbitsize = GET_MODE_BITSIZE (nmode);
3511 nbitpos = lbitpos & ~ (nbitsize - 1);
3512 lbitpos -= nbitpos;
3513 if (nbitsize == lbitsize)
3514 return 0;
3516 if (BYTES_BIG_ENDIAN)
3517 lbitpos = nbitsize - lbitsize - lbitpos;
3519 /* Make the mask to be used against the extracted field. */
3520 mask = build_int_cst_type (unsigned_type, -1);
3521 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3522 mask = const_binop (RSHIFT_EXPR, mask,
3523 size_int (nbitsize - lbitsize - lbitpos));
3525 if (! const_p)
3526 /* If not comparing with constant, just rework the comparison
3527 and return. */
3528 return fold_build2_loc (loc, code, compare_type,
3529 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3530 make_bit_field_ref (loc, linner,
3531 unsigned_type,
3532 nbitsize, nbitpos,
3534 mask),
3535 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3536 make_bit_field_ref (loc, rinner,
3537 unsigned_type,
3538 nbitsize, nbitpos,
3540 mask));
3542 /* Otherwise, we are handling the constant case. See if the constant is too
3543 big for the field. Warn and return a tree of for 0 (false) if so. We do
3544 this not only for its own sake, but to avoid having to test for this
3545 error case below. If we didn't, we might generate wrong code.
3547 For unsigned fields, the constant shifted right by the field length should
3548 be all zero. For signed fields, the high-order bits should agree with
3549 the sign bit. */
3551 if (lunsignedp)
3553 if (wi::lrshift (rhs, lbitsize) != 0)
3555 warning (0, "comparison is always %d due to width of bit-field",
3556 code == NE_EXPR);
3557 return constant_boolean_node (code == NE_EXPR, compare_type);
3560 else
3562 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3563 if (tem != 0 && tem != -1)
3565 warning (0, "comparison is always %d due to width of bit-field",
3566 code == NE_EXPR);
3567 return constant_boolean_node (code == NE_EXPR, compare_type);
3571 /* Single-bit compares should always be against zero. */
3572 if (lbitsize == 1 && ! integer_zerop (rhs))
3574 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3575 rhs = build_int_cst (type, 0);
3578 /* Make a new bitfield reference, shift the constant over the
3579 appropriate number of bits and mask it with the computed mask
3580 (in case this was a signed field). If we changed it, make a new one. */
3581 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3583 rhs = const_binop (BIT_AND_EXPR,
3584 const_binop (LSHIFT_EXPR,
3585 fold_convert_loc (loc, unsigned_type, rhs),
3586 size_int (lbitpos)),
3587 mask);
3589 lhs = build2_loc (loc, code, compare_type,
3590 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3591 return lhs;
3594 /* Subroutine for fold_truth_andor_1: decode a field reference.
3596 If EXP is a comparison reference, we return the innermost reference.
3598 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3599 set to the starting bit number.
3601 If the innermost field can be completely contained in a mode-sized
3602 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3604 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3605 otherwise it is not changed.
3607 *PUNSIGNEDP is set to the signedness of the field.
3609 *PMASK is set to the mask used. This is either contained in a
3610 BIT_AND_EXPR or derived from the width of the field.
3612 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3614 Return 0 if this is not a component reference or is one that we can't
3615 do anything with. */
3617 static tree
3618 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3619 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3620 int *punsignedp, int *pvolatilep,
3621 tree *pmask, tree *pand_mask)
3623 tree outer_type = 0;
3624 tree and_mask = 0;
3625 tree mask, inner, offset;
3626 tree unsigned_type;
3627 unsigned int precision;
3629 /* All the optimizations using this function assume integer fields.
3630 There are problems with FP fields since the type_for_size call
3631 below can fail for, e.g., XFmode. */
3632 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3633 return 0;
3635 /* We are interested in the bare arrangement of bits, so strip everything
3636 that doesn't affect the machine mode. However, record the type of the
3637 outermost expression if it may matter below. */
3638 if (CONVERT_EXPR_P (exp)
3639 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3640 outer_type = TREE_TYPE (exp);
3641 STRIP_NOPS (exp);
3643 if (TREE_CODE (exp) == BIT_AND_EXPR)
3645 and_mask = TREE_OPERAND (exp, 1);
3646 exp = TREE_OPERAND (exp, 0);
3647 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3648 if (TREE_CODE (and_mask) != INTEGER_CST)
3649 return 0;
3652 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3653 punsignedp, pvolatilep, false);
3654 if ((inner == exp && and_mask == 0)
3655 || *pbitsize < 0 || offset != 0
3656 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3657 return 0;
3659 /* If the number of bits in the reference is the same as the bitsize of
3660 the outer type, then the outer type gives the signedness. Otherwise
3661 (in case of a small bitfield) the signedness is unchanged. */
3662 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3663 *punsignedp = TYPE_UNSIGNED (outer_type);
3665 /* Compute the mask to access the bitfield. */
3666 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3667 precision = TYPE_PRECISION (unsigned_type);
3669 mask = build_int_cst_type (unsigned_type, -1);
3671 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3672 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3674 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3675 if (and_mask != 0)
3676 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3677 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3679 *pmask = mask;
3680 *pand_mask = and_mask;
3681 return inner;
3684 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3685 bit positions and MASK is SIGNED. */
3687 static int
3688 all_ones_mask_p (const_tree mask, unsigned int size)
3690 tree type = TREE_TYPE (mask);
3691 unsigned int precision = TYPE_PRECISION (type);
3693 /* If this function returns true when the type of the mask is
3694 UNSIGNED, then there will be errors. In particular see
3695 gcc.c-torture/execute/990326-1.c. There does not appear to be
3696 any documentation paper trail as to why this is so. But the pre
3697 wide-int worked with that restriction and it has been preserved
3698 here. */
3699 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3700 return false;
3702 return wi::mask (size, false, precision) == mask;
3705 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3706 represents the sign bit of EXP's type. If EXP represents a sign
3707 or zero extension, also test VAL against the unextended type.
3708 The return value is the (sub)expression whose sign bit is VAL,
3709 or NULL_TREE otherwise. */
3711 static tree
3712 sign_bit_p (tree exp, const_tree val)
3714 int width;
3715 tree t;
3717 /* Tree EXP must have an integral type. */
3718 t = TREE_TYPE (exp);
3719 if (! INTEGRAL_TYPE_P (t))
3720 return NULL_TREE;
3722 /* Tree VAL must be an integer constant. */
3723 if (TREE_CODE (val) != INTEGER_CST
3724 || TREE_OVERFLOW (val))
3725 return NULL_TREE;
3727 width = TYPE_PRECISION (t);
3728 if (wi::only_sign_bit_p (val, width))
3729 return exp;
3731 /* Handle extension from a narrower type. */
3732 if (TREE_CODE (exp) == NOP_EXPR
3733 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3734 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3736 return NULL_TREE;
3739 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3740 to be evaluated unconditionally. */
3742 static int
3743 simple_operand_p (const_tree exp)
3745 /* Strip any conversions that don't change the machine mode. */
3746 STRIP_NOPS (exp);
3748 return (CONSTANT_CLASS_P (exp)
3749 || TREE_CODE (exp) == SSA_NAME
3750 || (DECL_P (exp)
3751 && ! TREE_ADDRESSABLE (exp)
3752 && ! TREE_THIS_VOLATILE (exp)
3753 && ! DECL_NONLOCAL (exp)
3754 /* Don't regard global variables as simple. They may be
3755 allocated in ways unknown to the compiler (shared memory,
3756 #pragma weak, etc). */
3757 && ! TREE_PUBLIC (exp)
3758 && ! DECL_EXTERNAL (exp)
3759 /* Weakrefs are not safe to be read, since they can be NULL.
3760 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3761 have DECL_WEAK flag set. */
3762 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3763 /* Loading a static variable is unduly expensive, but global
3764 registers aren't expensive. */
3765 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3768 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3769 to be evaluated unconditionally.
3770 I addition to simple_operand_p, we assume that comparisons, conversions,
3771 and logic-not operations are simple, if their operands are simple, too. */
3773 static bool
3774 simple_operand_p_2 (tree exp)
3776 enum tree_code code;
3778 if (TREE_SIDE_EFFECTS (exp)
3779 || tree_could_trap_p (exp))
3780 return false;
3782 while (CONVERT_EXPR_P (exp))
3783 exp = TREE_OPERAND (exp, 0);
3785 code = TREE_CODE (exp);
3787 if (TREE_CODE_CLASS (code) == tcc_comparison)
3788 return (simple_operand_p (TREE_OPERAND (exp, 0))
3789 && simple_operand_p (TREE_OPERAND (exp, 1)));
3791 if (code == TRUTH_NOT_EXPR)
3792 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3794 return simple_operand_p (exp);
3798 /* The following functions are subroutines to fold_range_test and allow it to
3799 try to change a logical combination of comparisons into a range test.
3801 For example, both
3802 X == 2 || X == 3 || X == 4 || X == 5
3804 X >= 2 && X <= 5
3805 are converted to
3806 (unsigned) (X - 2) <= 3
3808 We describe each set of comparisons as being either inside or outside
3809 a range, using a variable named like IN_P, and then describe the
3810 range with a lower and upper bound. If one of the bounds is omitted,
3811 it represents either the highest or lowest value of the type.
3813 In the comments below, we represent a range by two numbers in brackets
3814 preceded by a "+" to designate being inside that range, or a "-" to
3815 designate being outside that range, so the condition can be inverted by
3816 flipping the prefix. An omitted bound is represented by a "-". For
3817 example, "- [-, 10]" means being outside the range starting at the lowest
3818 possible value and ending at 10, in other words, being greater than 10.
3819 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3820 always false.
3822 We set up things so that the missing bounds are handled in a consistent
3823 manner so neither a missing bound nor "true" and "false" need to be
3824 handled using a special case. */
3826 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3827 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3828 and UPPER1_P are nonzero if the respective argument is an upper bound
3829 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3830 must be specified for a comparison. ARG1 will be converted to ARG0's
3831 type if both are specified. */
3833 static tree
3834 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3835 tree arg1, int upper1_p)
3837 tree tem;
3838 int result;
3839 int sgn0, sgn1;
3841 /* If neither arg represents infinity, do the normal operation.
3842 Else, if not a comparison, return infinity. Else handle the special
3843 comparison rules. Note that most of the cases below won't occur, but
3844 are handled for consistency. */
3846 if (arg0 != 0 && arg1 != 0)
3848 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3849 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3850 STRIP_NOPS (tem);
3851 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3854 if (TREE_CODE_CLASS (code) != tcc_comparison)
3855 return 0;
3857 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3858 for neither. In real maths, we cannot assume open ended ranges are
3859 the same. But, this is computer arithmetic, where numbers are finite.
3860 We can therefore make the transformation of any unbounded range with
3861 the value Z, Z being greater than any representable number. This permits
3862 us to treat unbounded ranges as equal. */
3863 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3864 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3865 switch (code)
3867 case EQ_EXPR:
3868 result = sgn0 == sgn1;
3869 break;
3870 case NE_EXPR:
3871 result = sgn0 != sgn1;
3872 break;
3873 case LT_EXPR:
3874 result = sgn0 < sgn1;
3875 break;
3876 case LE_EXPR:
3877 result = sgn0 <= sgn1;
3878 break;
3879 case GT_EXPR:
3880 result = sgn0 > sgn1;
3881 break;
3882 case GE_EXPR:
3883 result = sgn0 >= sgn1;
3884 break;
3885 default:
3886 gcc_unreachable ();
3889 return constant_boolean_node (result, type);
3892 /* Helper routine for make_range. Perform one step for it, return
3893 new expression if the loop should continue or NULL_TREE if it should
3894 stop. */
3896 tree
3897 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3898 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3899 bool *strict_overflow_p)
3901 tree arg0_type = TREE_TYPE (arg0);
3902 tree n_low, n_high, low = *p_low, high = *p_high;
3903 int in_p = *p_in_p, n_in_p;
3905 switch (code)
3907 case TRUTH_NOT_EXPR:
3908 /* We can only do something if the range is testing for zero. */
3909 if (low == NULL_TREE || high == NULL_TREE
3910 || ! integer_zerop (low) || ! integer_zerop (high))
3911 return NULL_TREE;
3912 *p_in_p = ! in_p;
3913 return arg0;
3915 case EQ_EXPR: case NE_EXPR:
3916 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3917 /* We can only do something if the range is testing for zero
3918 and if the second operand is an integer constant. Note that
3919 saying something is "in" the range we make is done by
3920 complementing IN_P since it will set in the initial case of
3921 being not equal to zero; "out" is leaving it alone. */
3922 if (low == NULL_TREE || high == NULL_TREE
3923 || ! integer_zerop (low) || ! integer_zerop (high)
3924 || TREE_CODE (arg1) != INTEGER_CST)
3925 return NULL_TREE;
3927 switch (code)
3929 case NE_EXPR: /* - [c, c] */
3930 low = high = arg1;
3931 break;
3932 case EQ_EXPR: /* + [c, c] */
3933 in_p = ! in_p, low = high = arg1;
3934 break;
3935 case GT_EXPR: /* - [-, c] */
3936 low = 0, high = arg1;
3937 break;
3938 case GE_EXPR: /* + [c, -] */
3939 in_p = ! in_p, low = arg1, high = 0;
3940 break;
3941 case LT_EXPR: /* - [c, -] */
3942 low = arg1, high = 0;
3943 break;
3944 case LE_EXPR: /* + [-, c] */
3945 in_p = ! in_p, low = 0, high = arg1;
3946 break;
3947 default:
3948 gcc_unreachable ();
3951 /* If this is an unsigned comparison, we also know that EXP is
3952 greater than or equal to zero. We base the range tests we make
3953 on that fact, so we record it here so we can parse existing
3954 range tests. We test arg0_type since often the return type
3955 of, e.g. EQ_EXPR, is boolean. */
3956 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3958 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3959 in_p, low, high, 1,
3960 build_int_cst (arg0_type, 0),
3961 NULL_TREE))
3962 return NULL_TREE;
3964 in_p = n_in_p, low = n_low, high = n_high;
3966 /* If the high bound is missing, but we have a nonzero low
3967 bound, reverse the range so it goes from zero to the low bound
3968 minus 1. */
3969 if (high == 0 && low && ! integer_zerop (low))
3971 in_p = ! in_p;
3972 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3973 build_int_cst (TREE_TYPE (low), 1), 0);
3974 low = build_int_cst (arg0_type, 0);
3978 *p_low = low;
3979 *p_high = high;
3980 *p_in_p = in_p;
3981 return arg0;
3983 case NEGATE_EXPR:
3984 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3985 low and high are non-NULL, then normalize will DTRT. */
3986 if (!TYPE_UNSIGNED (arg0_type)
3987 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3989 if (low == NULL_TREE)
3990 low = TYPE_MIN_VALUE (arg0_type);
3991 if (high == NULL_TREE)
3992 high = TYPE_MAX_VALUE (arg0_type);
3995 /* (-x) IN [a,b] -> x in [-b, -a] */
3996 n_low = range_binop (MINUS_EXPR, exp_type,
3997 build_int_cst (exp_type, 0),
3998 0, high, 1);
3999 n_high = range_binop (MINUS_EXPR, exp_type,
4000 build_int_cst (exp_type, 0),
4001 0, low, 0);
4002 if (n_high != 0 && TREE_OVERFLOW (n_high))
4003 return NULL_TREE;
4004 goto normalize;
4006 case BIT_NOT_EXPR:
4007 /* ~ X -> -X - 1 */
4008 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4009 build_int_cst (exp_type, 1));
4011 case PLUS_EXPR:
4012 case MINUS_EXPR:
4013 if (TREE_CODE (arg1) != INTEGER_CST)
4014 return NULL_TREE;
4016 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4017 move a constant to the other side. */
4018 if (!TYPE_UNSIGNED (arg0_type)
4019 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4020 return NULL_TREE;
4022 /* If EXP is signed, any overflow in the computation is undefined,
4023 so we don't worry about it so long as our computations on
4024 the bounds don't overflow. For unsigned, overflow is defined
4025 and this is exactly the right thing. */
4026 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4027 arg0_type, low, 0, arg1, 0);
4028 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4029 arg0_type, high, 1, arg1, 0);
4030 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4031 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4032 return NULL_TREE;
4034 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4035 *strict_overflow_p = true;
4037 normalize:
4038 /* Check for an unsigned range which has wrapped around the maximum
4039 value thus making n_high < n_low, and normalize it. */
4040 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4042 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4043 build_int_cst (TREE_TYPE (n_high), 1), 0);
4044 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4045 build_int_cst (TREE_TYPE (n_low), 1), 0);
4047 /* If the range is of the form +/- [ x+1, x ], we won't
4048 be able to normalize it. But then, it represents the
4049 whole range or the empty set, so make it
4050 +/- [ -, - ]. */
4051 if (tree_int_cst_equal (n_low, low)
4052 && tree_int_cst_equal (n_high, high))
4053 low = high = 0;
4054 else
4055 in_p = ! in_p;
4057 else
4058 low = n_low, high = n_high;
4060 *p_low = low;
4061 *p_high = high;
4062 *p_in_p = in_p;
4063 return arg0;
4065 CASE_CONVERT:
4066 case NON_LVALUE_EXPR:
4067 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4068 return NULL_TREE;
4070 if (! INTEGRAL_TYPE_P (arg0_type)
4071 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4072 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4073 return NULL_TREE;
4075 n_low = low, n_high = high;
4077 if (n_low != 0)
4078 n_low = fold_convert_loc (loc, arg0_type, n_low);
4080 if (n_high != 0)
4081 n_high = fold_convert_loc (loc, arg0_type, n_high);
4083 /* If we're converting arg0 from an unsigned type, to exp,
4084 a signed type, we will be doing the comparison as unsigned.
4085 The tests above have already verified that LOW and HIGH
4086 are both positive.
4088 So we have to ensure that we will handle large unsigned
4089 values the same way that the current signed bounds treat
4090 negative values. */
4092 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4094 tree high_positive;
4095 tree equiv_type;
4096 /* For fixed-point modes, we need to pass the saturating flag
4097 as the 2nd parameter. */
4098 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4099 equiv_type
4100 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4101 TYPE_SATURATING (arg0_type));
4102 else
4103 equiv_type
4104 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4106 /* A range without an upper bound is, naturally, unbounded.
4107 Since convert would have cropped a very large value, use
4108 the max value for the destination type. */
4109 high_positive
4110 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4111 : TYPE_MAX_VALUE (arg0_type);
4113 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4114 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4115 fold_convert_loc (loc, arg0_type,
4116 high_positive),
4117 build_int_cst (arg0_type, 1));
4119 /* If the low bound is specified, "and" the range with the
4120 range for which the original unsigned value will be
4121 positive. */
4122 if (low != 0)
4124 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4125 1, fold_convert_loc (loc, arg0_type,
4126 integer_zero_node),
4127 high_positive))
4128 return NULL_TREE;
4130 in_p = (n_in_p == in_p);
4132 else
4134 /* Otherwise, "or" the range with the range of the input
4135 that will be interpreted as negative. */
4136 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4137 1, fold_convert_loc (loc, arg0_type,
4138 integer_zero_node),
4139 high_positive))
4140 return NULL_TREE;
4142 in_p = (in_p != n_in_p);
4146 *p_low = n_low;
4147 *p_high = n_high;
4148 *p_in_p = in_p;
4149 return arg0;
4151 default:
4152 return NULL_TREE;
4156 /* Given EXP, a logical expression, set the range it is testing into
4157 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4158 actually being tested. *PLOW and *PHIGH will be made of the same
4159 type as the returned expression. If EXP is not a comparison, we
4160 will most likely not be returning a useful value and range. Set
4161 *STRICT_OVERFLOW_P to true if the return value is only valid
4162 because signed overflow is undefined; otherwise, do not change
4163 *STRICT_OVERFLOW_P. */
4165 tree
4166 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4167 bool *strict_overflow_p)
4169 enum tree_code code;
4170 tree arg0, arg1 = NULL_TREE;
4171 tree exp_type, nexp;
4172 int in_p;
4173 tree low, high;
4174 location_t loc = EXPR_LOCATION (exp);
4176 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4177 and see if we can refine the range. Some of the cases below may not
4178 happen, but it doesn't seem worth worrying about this. We "continue"
4179 the outer loop when we've changed something; otherwise we "break"
4180 the switch, which will "break" the while. */
4182 in_p = 0;
4183 low = high = build_int_cst (TREE_TYPE (exp), 0);
4185 while (1)
4187 code = TREE_CODE (exp);
4188 exp_type = TREE_TYPE (exp);
4189 arg0 = NULL_TREE;
4191 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4193 if (TREE_OPERAND_LENGTH (exp) > 0)
4194 arg0 = TREE_OPERAND (exp, 0);
4195 if (TREE_CODE_CLASS (code) == tcc_binary
4196 || TREE_CODE_CLASS (code) == tcc_comparison
4197 || (TREE_CODE_CLASS (code) == tcc_expression
4198 && TREE_OPERAND_LENGTH (exp) > 1))
4199 arg1 = TREE_OPERAND (exp, 1);
4201 if (arg0 == NULL_TREE)
4202 break;
4204 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4205 &high, &in_p, strict_overflow_p);
4206 if (nexp == NULL_TREE)
4207 break;
4208 exp = nexp;
4211 /* If EXP is a constant, we can evaluate whether this is true or false. */
4212 if (TREE_CODE (exp) == INTEGER_CST)
4214 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4215 exp, 0, low, 0))
4216 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4217 exp, 1, high, 1)));
4218 low = high = 0;
4219 exp = 0;
4222 *pin_p = in_p, *plow = low, *phigh = high;
4223 return exp;
4226 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4227 type, TYPE, return an expression to test if EXP is in (or out of, depending
4228 on IN_P) the range. Return 0 if the test couldn't be created. */
4230 tree
4231 build_range_check (location_t loc, tree type, tree exp, int in_p,
4232 tree low, tree high)
4234 tree etype = TREE_TYPE (exp), value;
4236 #ifdef HAVE_canonicalize_funcptr_for_compare
4237 /* Disable this optimization for function pointer expressions
4238 on targets that require function pointer canonicalization. */
4239 if (HAVE_canonicalize_funcptr_for_compare
4240 && TREE_CODE (etype) == POINTER_TYPE
4241 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4242 return NULL_TREE;
4243 #endif
4245 if (! in_p)
4247 value = build_range_check (loc, type, exp, 1, low, high);
4248 if (value != 0)
4249 return invert_truthvalue_loc (loc, value);
4251 return 0;
4254 if (low == 0 && high == 0)
4255 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4257 if (low == 0)
4258 return fold_build2_loc (loc, LE_EXPR, type, exp,
4259 fold_convert_loc (loc, etype, high));
4261 if (high == 0)
4262 return fold_build2_loc (loc, GE_EXPR, type, exp,
4263 fold_convert_loc (loc, etype, low));
4265 if (operand_equal_p (low, high, 0))
4266 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4267 fold_convert_loc (loc, etype, low));
4269 if (integer_zerop (low))
4271 if (! TYPE_UNSIGNED (etype))
4273 etype = unsigned_type_for (etype);
4274 high = fold_convert_loc (loc, etype, high);
4275 exp = fold_convert_loc (loc, etype, exp);
4277 return build_range_check (loc, type, exp, 1, 0, high);
4280 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4281 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4283 int prec = TYPE_PRECISION (etype);
4285 if (wi::mask (prec - 1, false, prec) == high)
4287 if (TYPE_UNSIGNED (etype))
4289 tree signed_etype = signed_type_for (etype);
4290 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4291 etype
4292 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4293 else
4294 etype = signed_etype;
4295 exp = fold_convert_loc (loc, etype, exp);
4297 return fold_build2_loc (loc, GT_EXPR, type, exp,
4298 build_int_cst (etype, 0));
4302 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4303 This requires wrap-around arithmetics for the type of the expression.
4304 First make sure that arithmetics in this type is valid, then make sure
4305 that it wraps around. */
4306 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4307 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4308 TYPE_UNSIGNED (etype));
4310 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4312 tree utype, minv, maxv;
4314 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4315 for the type in question, as we rely on this here. */
4316 utype = unsigned_type_for (etype);
4317 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4318 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4319 build_int_cst (TREE_TYPE (maxv), 1), 1);
4320 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4322 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4323 minv, 1, maxv, 1)))
4324 etype = utype;
4325 else
4326 return 0;
4329 high = fold_convert_loc (loc, etype, high);
4330 low = fold_convert_loc (loc, etype, low);
4331 exp = fold_convert_loc (loc, etype, exp);
4333 value = const_binop (MINUS_EXPR, high, low);
4336 if (POINTER_TYPE_P (etype))
4338 if (value != 0 && !TREE_OVERFLOW (value))
4340 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4341 return build_range_check (loc, type,
4342 fold_build_pointer_plus_loc (loc, exp, low),
4343 1, build_int_cst (etype, 0), value);
4345 return 0;
4348 if (value != 0 && !TREE_OVERFLOW (value))
4349 return build_range_check (loc, type,
4350 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4351 1, build_int_cst (etype, 0), value);
4353 return 0;
4356 /* Return the predecessor of VAL in its type, handling the infinite case. */
4358 static tree
4359 range_predecessor (tree val)
4361 tree type = TREE_TYPE (val);
4363 if (INTEGRAL_TYPE_P (type)
4364 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4365 return 0;
4366 else
4367 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4368 build_int_cst (TREE_TYPE (val), 1), 0);
4371 /* Return the successor of VAL in its type, handling the infinite case. */
4373 static tree
4374 range_successor (tree val)
4376 tree type = TREE_TYPE (val);
4378 if (INTEGRAL_TYPE_P (type)
4379 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4380 return 0;
4381 else
4382 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4383 build_int_cst (TREE_TYPE (val), 1), 0);
4386 /* Given two ranges, see if we can merge them into one. Return 1 if we
4387 can, 0 if we can't. Set the output range into the specified parameters. */
4389 bool
4390 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4391 tree high0, int in1_p, tree low1, tree high1)
4393 int no_overlap;
4394 int subset;
4395 int temp;
4396 tree tem;
4397 int in_p;
4398 tree low, high;
4399 int lowequal = ((low0 == 0 && low1 == 0)
4400 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4401 low0, 0, low1, 0)));
4402 int highequal = ((high0 == 0 && high1 == 0)
4403 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4404 high0, 1, high1, 1)));
4406 /* Make range 0 be the range that starts first, or ends last if they
4407 start at the same value. Swap them if it isn't. */
4408 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4409 low0, 0, low1, 0))
4410 || (lowequal
4411 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4412 high1, 1, high0, 1))))
4414 temp = in0_p, in0_p = in1_p, in1_p = temp;
4415 tem = low0, low0 = low1, low1 = tem;
4416 tem = high0, high0 = high1, high1 = tem;
4419 /* Now flag two cases, whether the ranges are disjoint or whether the
4420 second range is totally subsumed in the first. Note that the tests
4421 below are simplified by the ones above. */
4422 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4423 high0, 1, low1, 0));
4424 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4425 high1, 1, high0, 1));
4427 /* We now have four cases, depending on whether we are including or
4428 excluding the two ranges. */
4429 if (in0_p && in1_p)
4431 /* If they don't overlap, the result is false. If the second range
4432 is a subset it is the result. Otherwise, the range is from the start
4433 of the second to the end of the first. */
4434 if (no_overlap)
4435 in_p = 0, low = high = 0;
4436 else if (subset)
4437 in_p = 1, low = low1, high = high1;
4438 else
4439 in_p = 1, low = low1, high = high0;
4442 else if (in0_p && ! in1_p)
4444 /* If they don't overlap, the result is the first range. If they are
4445 equal, the result is false. If the second range is a subset of the
4446 first, and the ranges begin at the same place, we go from just after
4447 the end of the second range to the end of the first. If the second
4448 range is not a subset of the first, or if it is a subset and both
4449 ranges end at the same place, the range starts at the start of the
4450 first range and ends just before the second range.
4451 Otherwise, we can't describe this as a single range. */
4452 if (no_overlap)
4453 in_p = 1, low = low0, high = high0;
4454 else if (lowequal && highequal)
4455 in_p = 0, low = high = 0;
4456 else if (subset && lowequal)
4458 low = range_successor (high1);
4459 high = high0;
4460 in_p = 1;
4461 if (low == 0)
4463 /* We are in the weird situation where high0 > high1 but
4464 high1 has no successor. Punt. */
4465 return 0;
4468 else if (! subset || highequal)
4470 low = low0;
4471 high = range_predecessor (low1);
4472 in_p = 1;
4473 if (high == 0)
4475 /* low0 < low1 but low1 has no predecessor. Punt. */
4476 return 0;
4479 else
4480 return 0;
4483 else if (! in0_p && in1_p)
4485 /* If they don't overlap, the result is the second range. If the second
4486 is a subset of the first, the result is false. Otherwise,
4487 the range starts just after the first range and ends at the
4488 end of the second. */
4489 if (no_overlap)
4490 in_p = 1, low = low1, high = high1;
4491 else if (subset || highequal)
4492 in_p = 0, low = high = 0;
4493 else
4495 low = range_successor (high0);
4496 high = high1;
4497 in_p = 1;
4498 if (low == 0)
4500 /* high1 > high0 but high0 has no successor. Punt. */
4501 return 0;
4506 else
4508 /* The case where we are excluding both ranges. Here the complex case
4509 is if they don't overlap. In that case, the only time we have a
4510 range is if they are adjacent. If the second is a subset of the
4511 first, the result is the first. Otherwise, the range to exclude
4512 starts at the beginning of the first range and ends at the end of the
4513 second. */
4514 if (no_overlap)
4516 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4517 range_successor (high0),
4518 1, low1, 0)))
4519 in_p = 0, low = low0, high = high1;
4520 else
4522 /* Canonicalize - [min, x] into - [-, x]. */
4523 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4524 switch (TREE_CODE (TREE_TYPE (low0)))
4526 case ENUMERAL_TYPE:
4527 if (TYPE_PRECISION (TREE_TYPE (low0))
4528 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4529 break;
4530 /* FALLTHROUGH */
4531 case INTEGER_TYPE:
4532 if (tree_int_cst_equal (low0,
4533 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4534 low0 = 0;
4535 break;
4536 case POINTER_TYPE:
4537 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4538 && integer_zerop (low0))
4539 low0 = 0;
4540 break;
4541 default:
4542 break;
4545 /* Canonicalize - [x, max] into - [x, -]. */
4546 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4547 switch (TREE_CODE (TREE_TYPE (high1)))
4549 case ENUMERAL_TYPE:
4550 if (TYPE_PRECISION (TREE_TYPE (high1))
4551 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4552 break;
4553 /* FALLTHROUGH */
4554 case INTEGER_TYPE:
4555 if (tree_int_cst_equal (high1,
4556 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4557 high1 = 0;
4558 break;
4559 case POINTER_TYPE:
4560 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4561 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4562 high1, 1,
4563 build_int_cst (TREE_TYPE (high1), 1),
4564 1)))
4565 high1 = 0;
4566 break;
4567 default:
4568 break;
4571 /* The ranges might be also adjacent between the maximum and
4572 minimum values of the given type. For
4573 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4574 return + [x + 1, y - 1]. */
4575 if (low0 == 0 && high1 == 0)
4577 low = range_successor (high0);
4578 high = range_predecessor (low1);
4579 if (low == 0 || high == 0)
4580 return 0;
4582 in_p = 1;
4584 else
4585 return 0;
4588 else if (subset)
4589 in_p = 0, low = low0, high = high0;
4590 else
4591 in_p = 0, low = low0, high = high1;
4594 *pin_p = in_p, *plow = low, *phigh = high;
4595 return 1;
4599 /* Subroutine of fold, looking inside expressions of the form
4600 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4601 of the COND_EXPR. This function is being used also to optimize
4602 A op B ? C : A, by reversing the comparison first.
4604 Return a folded expression whose code is not a COND_EXPR
4605 anymore, or NULL_TREE if no folding opportunity is found. */
4607 static tree
4608 fold_cond_expr_with_comparison (location_t loc, tree type,
4609 tree arg0, tree arg1, tree arg2)
4611 enum tree_code comp_code = TREE_CODE (arg0);
4612 tree arg00 = TREE_OPERAND (arg0, 0);
4613 tree arg01 = TREE_OPERAND (arg0, 1);
4614 tree arg1_type = TREE_TYPE (arg1);
4615 tree tem;
4617 STRIP_NOPS (arg1);
4618 STRIP_NOPS (arg2);
4620 /* If we have A op 0 ? A : -A, consider applying the following
4621 transformations:
4623 A == 0? A : -A same as -A
4624 A != 0? A : -A same as A
4625 A >= 0? A : -A same as abs (A)
4626 A > 0? A : -A same as abs (A)
4627 A <= 0? A : -A same as -abs (A)
4628 A < 0? A : -A same as -abs (A)
4630 None of these transformations work for modes with signed
4631 zeros. If A is +/-0, the first two transformations will
4632 change the sign of the result (from +0 to -0, or vice
4633 versa). The last four will fix the sign of the result,
4634 even though the original expressions could be positive or
4635 negative, depending on the sign of A.
4637 Note that all these transformations are correct if A is
4638 NaN, since the two alternatives (A and -A) are also NaNs. */
4639 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4640 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4641 ? real_zerop (arg01)
4642 : integer_zerop (arg01))
4643 && ((TREE_CODE (arg2) == NEGATE_EXPR
4644 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4645 /* In the case that A is of the form X-Y, '-A' (arg2) may
4646 have already been folded to Y-X, check for that. */
4647 || (TREE_CODE (arg1) == MINUS_EXPR
4648 && TREE_CODE (arg2) == MINUS_EXPR
4649 && operand_equal_p (TREE_OPERAND (arg1, 0),
4650 TREE_OPERAND (arg2, 1), 0)
4651 && operand_equal_p (TREE_OPERAND (arg1, 1),
4652 TREE_OPERAND (arg2, 0), 0))))
4653 switch (comp_code)
4655 case EQ_EXPR:
4656 case UNEQ_EXPR:
4657 tem = fold_convert_loc (loc, arg1_type, arg1);
4658 return pedantic_non_lvalue_loc (loc,
4659 fold_convert_loc (loc, type,
4660 negate_expr (tem)));
4661 case NE_EXPR:
4662 case LTGT_EXPR:
4663 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4664 case UNGE_EXPR:
4665 case UNGT_EXPR:
4666 if (flag_trapping_math)
4667 break;
4668 /* Fall through. */
4669 case GE_EXPR:
4670 case GT_EXPR:
4671 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4672 arg1 = fold_convert_loc (loc, signed_type_for
4673 (TREE_TYPE (arg1)), arg1);
4674 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4675 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4676 case UNLE_EXPR:
4677 case UNLT_EXPR:
4678 if (flag_trapping_math)
4679 break;
4680 case LE_EXPR:
4681 case LT_EXPR:
4682 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4683 arg1 = fold_convert_loc (loc, signed_type_for
4684 (TREE_TYPE (arg1)), arg1);
4685 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4686 return negate_expr (fold_convert_loc (loc, type, tem));
4687 default:
4688 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4689 break;
4692 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4693 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4694 both transformations are correct when A is NaN: A != 0
4695 is then true, and A == 0 is false. */
4697 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4698 && integer_zerop (arg01) && integer_zerop (arg2))
4700 if (comp_code == NE_EXPR)
4701 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4702 else if (comp_code == EQ_EXPR)
4703 return build_zero_cst (type);
4706 /* Try some transformations of A op B ? A : B.
4708 A == B? A : B same as B
4709 A != B? A : B same as A
4710 A >= B? A : B same as max (A, B)
4711 A > B? A : B same as max (B, A)
4712 A <= B? A : B same as min (A, B)
4713 A < B? A : B same as min (B, A)
4715 As above, these transformations don't work in the presence
4716 of signed zeros. For example, if A and B are zeros of
4717 opposite sign, the first two transformations will change
4718 the sign of the result. In the last four, the original
4719 expressions give different results for (A=+0, B=-0) and
4720 (A=-0, B=+0), but the transformed expressions do not.
4722 The first two transformations are correct if either A or B
4723 is a NaN. In the first transformation, the condition will
4724 be false, and B will indeed be chosen. In the case of the
4725 second transformation, the condition A != B will be true,
4726 and A will be chosen.
4728 The conversions to max() and min() are not correct if B is
4729 a number and A is not. The conditions in the original
4730 expressions will be false, so all four give B. The min()
4731 and max() versions would give a NaN instead. */
4732 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4733 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4734 /* Avoid these transformations if the COND_EXPR may be used
4735 as an lvalue in the C++ front-end. PR c++/19199. */
4736 && (in_gimple_form
4737 || VECTOR_TYPE_P (type)
4738 || (strcmp (lang_hooks.name, "GNU C++") != 0
4739 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4740 || ! maybe_lvalue_p (arg1)
4741 || ! maybe_lvalue_p (arg2)))
4743 tree comp_op0 = arg00;
4744 tree comp_op1 = arg01;
4745 tree comp_type = TREE_TYPE (comp_op0);
4747 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4748 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4750 comp_type = type;
4751 comp_op0 = arg1;
4752 comp_op1 = arg2;
4755 switch (comp_code)
4757 case EQ_EXPR:
4758 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4759 case NE_EXPR:
4760 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4761 case LE_EXPR:
4762 case LT_EXPR:
4763 case UNLE_EXPR:
4764 case UNLT_EXPR:
4765 /* In C++ a ?: expression can be an lvalue, so put the
4766 operand which will be used if they are equal first
4767 so that we can convert this back to the
4768 corresponding COND_EXPR. */
4769 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4771 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4772 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4773 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4774 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4775 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4776 comp_op1, comp_op0);
4777 return pedantic_non_lvalue_loc (loc,
4778 fold_convert_loc (loc, type, tem));
4780 break;
4781 case GE_EXPR:
4782 case GT_EXPR:
4783 case UNGE_EXPR:
4784 case UNGT_EXPR:
4785 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4787 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4788 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4789 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4790 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4791 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4792 comp_op1, comp_op0);
4793 return pedantic_non_lvalue_loc (loc,
4794 fold_convert_loc (loc, type, tem));
4796 break;
4797 case UNEQ_EXPR:
4798 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4799 return pedantic_non_lvalue_loc (loc,
4800 fold_convert_loc (loc, type, arg2));
4801 break;
4802 case LTGT_EXPR:
4803 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4804 return pedantic_non_lvalue_loc (loc,
4805 fold_convert_loc (loc, type, arg1));
4806 break;
4807 default:
4808 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4809 break;
4813 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4814 we might still be able to simplify this. For example,
4815 if C1 is one less or one more than C2, this might have started
4816 out as a MIN or MAX and been transformed by this function.
4817 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4819 if (INTEGRAL_TYPE_P (type)
4820 && TREE_CODE (arg01) == INTEGER_CST
4821 && TREE_CODE (arg2) == INTEGER_CST)
4822 switch (comp_code)
4824 case EQ_EXPR:
4825 if (TREE_CODE (arg1) == INTEGER_CST)
4826 break;
4827 /* We can replace A with C1 in this case. */
4828 arg1 = fold_convert_loc (loc, type, arg01);
4829 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4831 case LT_EXPR:
4832 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4833 MIN_EXPR, to preserve the signedness of the comparison. */
4834 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4835 OEP_ONLY_CONST)
4836 && operand_equal_p (arg01,
4837 const_binop (PLUS_EXPR, arg2,
4838 build_int_cst (type, 1)),
4839 OEP_ONLY_CONST))
4841 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4842 fold_convert_loc (loc, TREE_TYPE (arg00),
4843 arg2));
4844 return pedantic_non_lvalue_loc (loc,
4845 fold_convert_loc (loc, type, tem));
4847 break;
4849 case LE_EXPR:
4850 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4851 as above. */
4852 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4853 OEP_ONLY_CONST)
4854 && operand_equal_p (arg01,
4855 const_binop (MINUS_EXPR, arg2,
4856 build_int_cst (type, 1)),
4857 OEP_ONLY_CONST))
4859 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4860 fold_convert_loc (loc, TREE_TYPE (arg00),
4861 arg2));
4862 return pedantic_non_lvalue_loc (loc,
4863 fold_convert_loc (loc, type, tem));
4865 break;
4867 case GT_EXPR:
4868 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4869 MAX_EXPR, to preserve the signedness of the comparison. */
4870 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4871 OEP_ONLY_CONST)
4872 && operand_equal_p (arg01,
4873 const_binop (MINUS_EXPR, arg2,
4874 build_int_cst (type, 1)),
4875 OEP_ONLY_CONST))
4877 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4878 fold_convert_loc (loc, TREE_TYPE (arg00),
4879 arg2));
4880 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4882 break;
4884 case GE_EXPR:
4885 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4886 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4887 OEP_ONLY_CONST)
4888 && operand_equal_p (arg01,
4889 const_binop (PLUS_EXPR, arg2,
4890 build_int_cst (type, 1)),
4891 OEP_ONLY_CONST))
4893 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4894 fold_convert_loc (loc, TREE_TYPE (arg00),
4895 arg2));
4896 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4898 break;
4899 case NE_EXPR:
4900 break;
4901 default:
4902 gcc_unreachable ();
4905 return NULL_TREE;
4910 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4911 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4912 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4913 false) >= 2)
4914 #endif
4916 /* EXP is some logical combination of boolean tests. See if we can
4917 merge it into some range test. Return the new tree if so. */
4919 static tree
4920 fold_range_test (location_t loc, enum tree_code code, tree type,
4921 tree op0, tree op1)
4923 int or_op = (code == TRUTH_ORIF_EXPR
4924 || code == TRUTH_OR_EXPR);
4925 int in0_p, in1_p, in_p;
4926 tree low0, low1, low, high0, high1, high;
4927 bool strict_overflow_p = false;
4928 tree tem, lhs, rhs;
4929 const char * const warnmsg = G_("assuming signed overflow does not occur "
4930 "when simplifying range test");
4932 if (!INTEGRAL_TYPE_P (type))
4933 return 0;
4935 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4936 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4938 /* If this is an OR operation, invert both sides; we will invert
4939 again at the end. */
4940 if (or_op)
4941 in0_p = ! in0_p, in1_p = ! in1_p;
4943 /* If both expressions are the same, if we can merge the ranges, and we
4944 can build the range test, return it or it inverted. If one of the
4945 ranges is always true or always false, consider it to be the same
4946 expression as the other. */
4947 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4948 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4949 in1_p, low1, high1)
4950 && 0 != (tem = (build_range_check (loc, type,
4951 lhs != 0 ? lhs
4952 : rhs != 0 ? rhs : integer_zero_node,
4953 in_p, low, high))))
4955 if (strict_overflow_p)
4956 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4957 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4960 /* On machines where the branch cost is expensive, if this is a
4961 short-circuited branch and the underlying object on both sides
4962 is the same, make a non-short-circuit operation. */
4963 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4964 && lhs != 0 && rhs != 0
4965 && (code == TRUTH_ANDIF_EXPR
4966 || code == TRUTH_ORIF_EXPR)
4967 && operand_equal_p (lhs, rhs, 0))
4969 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4970 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4971 which cases we can't do this. */
4972 if (simple_operand_p (lhs))
4973 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4974 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4975 type, op0, op1);
4977 else if (!lang_hooks.decls.global_bindings_p ()
4978 && !CONTAINS_PLACEHOLDER_P (lhs))
4980 tree common = save_expr (lhs);
4982 if (0 != (lhs = build_range_check (loc, type, common,
4983 or_op ? ! in0_p : in0_p,
4984 low0, high0))
4985 && (0 != (rhs = build_range_check (loc, type, common,
4986 or_op ? ! in1_p : in1_p,
4987 low1, high1))))
4989 if (strict_overflow_p)
4990 fold_overflow_warning (warnmsg,
4991 WARN_STRICT_OVERFLOW_COMPARISON);
4992 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4993 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4994 type, lhs, rhs);
4999 return 0;
5002 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5003 bit value. Arrange things so the extra bits will be set to zero if and
5004 only if C is signed-extended to its full width. If MASK is nonzero,
5005 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5007 static tree
5008 unextend (tree c, int p, int unsignedp, tree mask)
5010 tree type = TREE_TYPE (c);
5011 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5012 tree temp;
5014 if (p == modesize || unsignedp)
5015 return c;
5017 /* We work by getting just the sign bit into the low-order bit, then
5018 into the high-order bit, then sign-extend. We then XOR that value
5019 with C. */
5020 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5022 /* We must use a signed type in order to get an arithmetic right shift.
5023 However, we must also avoid introducing accidental overflows, so that
5024 a subsequent call to integer_zerop will work. Hence we must
5025 do the type conversion here. At this point, the constant is either
5026 zero or one, and the conversion to a signed type can never overflow.
5027 We could get an overflow if this conversion is done anywhere else. */
5028 if (TYPE_UNSIGNED (type))
5029 temp = fold_convert (signed_type_for (type), temp);
5031 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5032 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5033 if (mask != 0)
5034 temp = const_binop (BIT_AND_EXPR, temp,
5035 fold_convert (TREE_TYPE (c), mask));
5036 /* If necessary, convert the type back to match the type of C. */
5037 if (TYPE_UNSIGNED (type))
5038 temp = fold_convert (type, temp);
5040 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5043 /* For an expression that has the form
5044 (A && B) || ~B
5046 (A || B) && ~B,
5047 we can drop one of the inner expressions and simplify to
5048 A || ~B
5050 A && ~B
5051 LOC is the location of the resulting expression. OP is the inner
5052 logical operation; the left-hand side in the examples above, while CMPOP
5053 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5054 removing a condition that guards another, as in
5055 (A != NULL && A->...) || A == NULL
5056 which we must not transform. If RHS_ONLY is true, only eliminate the
5057 right-most operand of the inner logical operation. */
5059 static tree
5060 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5061 bool rhs_only)
5063 tree type = TREE_TYPE (cmpop);
5064 enum tree_code code = TREE_CODE (cmpop);
5065 enum tree_code truthop_code = TREE_CODE (op);
5066 tree lhs = TREE_OPERAND (op, 0);
5067 tree rhs = TREE_OPERAND (op, 1);
5068 tree orig_lhs = lhs, orig_rhs = rhs;
5069 enum tree_code rhs_code = TREE_CODE (rhs);
5070 enum tree_code lhs_code = TREE_CODE (lhs);
5071 enum tree_code inv_code;
5073 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5074 return NULL_TREE;
5076 if (TREE_CODE_CLASS (code) != tcc_comparison)
5077 return NULL_TREE;
5079 if (rhs_code == truthop_code)
5081 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5082 if (newrhs != NULL_TREE)
5084 rhs = newrhs;
5085 rhs_code = TREE_CODE (rhs);
5088 if (lhs_code == truthop_code && !rhs_only)
5090 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5091 if (newlhs != NULL_TREE)
5093 lhs = newlhs;
5094 lhs_code = TREE_CODE (lhs);
5098 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5099 if (inv_code == rhs_code
5100 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5101 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5102 return lhs;
5103 if (!rhs_only && inv_code == lhs_code
5104 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5105 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5106 return rhs;
5107 if (rhs != orig_rhs || lhs != orig_lhs)
5108 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5109 lhs, rhs);
5110 return NULL_TREE;
5113 /* Find ways of folding logical expressions of LHS and RHS:
5114 Try to merge two comparisons to the same innermost item.
5115 Look for range tests like "ch >= '0' && ch <= '9'".
5116 Look for combinations of simple terms on machines with expensive branches
5117 and evaluate the RHS unconditionally.
5119 For example, if we have p->a == 2 && p->b == 4 and we can make an
5120 object large enough to span both A and B, we can do this with a comparison
5121 against the object ANDed with the a mask.
5123 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5124 operations to do this with one comparison.
5126 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5127 function and the one above.
5129 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5130 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5132 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5133 two operands.
5135 We return the simplified tree or 0 if no optimization is possible. */
5137 static tree
5138 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5139 tree lhs, tree rhs)
5141 /* If this is the "or" of two comparisons, we can do something if
5142 the comparisons are NE_EXPR. If this is the "and", we can do something
5143 if the comparisons are EQ_EXPR. I.e.,
5144 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5146 WANTED_CODE is this operation code. For single bit fields, we can
5147 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5148 comparison for one-bit fields. */
5150 enum tree_code wanted_code;
5151 enum tree_code lcode, rcode;
5152 tree ll_arg, lr_arg, rl_arg, rr_arg;
5153 tree ll_inner, lr_inner, rl_inner, rr_inner;
5154 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5155 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5156 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5157 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5158 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5159 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5160 machine_mode lnmode, rnmode;
5161 tree ll_mask, lr_mask, rl_mask, rr_mask;
5162 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5163 tree l_const, r_const;
5164 tree lntype, rntype, result;
5165 HOST_WIDE_INT first_bit, end_bit;
5166 int volatilep;
5168 /* Start by getting the comparison codes. Fail if anything is volatile.
5169 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5170 it were surrounded with a NE_EXPR. */
5172 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5173 return 0;
5175 lcode = TREE_CODE (lhs);
5176 rcode = TREE_CODE (rhs);
5178 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5180 lhs = build2 (NE_EXPR, truth_type, lhs,
5181 build_int_cst (TREE_TYPE (lhs), 0));
5182 lcode = NE_EXPR;
5185 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5187 rhs = build2 (NE_EXPR, truth_type, rhs,
5188 build_int_cst (TREE_TYPE (rhs), 0));
5189 rcode = NE_EXPR;
5192 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5193 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5194 return 0;
5196 ll_arg = TREE_OPERAND (lhs, 0);
5197 lr_arg = TREE_OPERAND (lhs, 1);
5198 rl_arg = TREE_OPERAND (rhs, 0);
5199 rr_arg = TREE_OPERAND (rhs, 1);
5201 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5202 if (simple_operand_p (ll_arg)
5203 && simple_operand_p (lr_arg))
5205 if (operand_equal_p (ll_arg, rl_arg, 0)
5206 && operand_equal_p (lr_arg, rr_arg, 0))
5208 result = combine_comparisons (loc, code, lcode, rcode,
5209 truth_type, ll_arg, lr_arg);
5210 if (result)
5211 return result;
5213 else if (operand_equal_p (ll_arg, rr_arg, 0)
5214 && operand_equal_p (lr_arg, rl_arg, 0))
5216 result = combine_comparisons (loc, code, lcode,
5217 swap_tree_comparison (rcode),
5218 truth_type, ll_arg, lr_arg);
5219 if (result)
5220 return result;
5224 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5225 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5227 /* If the RHS can be evaluated unconditionally and its operands are
5228 simple, it wins to evaluate the RHS unconditionally on machines
5229 with expensive branches. In this case, this isn't a comparison
5230 that can be merged. */
5232 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5233 false) >= 2
5234 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5235 && simple_operand_p (rl_arg)
5236 && simple_operand_p (rr_arg))
5238 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5239 if (code == TRUTH_OR_EXPR
5240 && lcode == NE_EXPR && integer_zerop (lr_arg)
5241 && rcode == NE_EXPR && integer_zerop (rr_arg)
5242 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5243 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5244 return build2_loc (loc, NE_EXPR, truth_type,
5245 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5246 ll_arg, rl_arg),
5247 build_int_cst (TREE_TYPE (ll_arg), 0));
5249 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5250 if (code == TRUTH_AND_EXPR
5251 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5252 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5253 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5254 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5255 return build2_loc (loc, EQ_EXPR, truth_type,
5256 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5257 ll_arg, rl_arg),
5258 build_int_cst (TREE_TYPE (ll_arg), 0));
5261 /* See if the comparisons can be merged. Then get all the parameters for
5262 each side. */
5264 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5265 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5266 return 0;
5268 volatilep = 0;
5269 ll_inner = decode_field_reference (loc, ll_arg,
5270 &ll_bitsize, &ll_bitpos, &ll_mode,
5271 &ll_unsignedp, &volatilep, &ll_mask,
5272 &ll_and_mask);
5273 lr_inner = decode_field_reference (loc, lr_arg,
5274 &lr_bitsize, &lr_bitpos, &lr_mode,
5275 &lr_unsignedp, &volatilep, &lr_mask,
5276 &lr_and_mask);
5277 rl_inner = decode_field_reference (loc, rl_arg,
5278 &rl_bitsize, &rl_bitpos, &rl_mode,
5279 &rl_unsignedp, &volatilep, &rl_mask,
5280 &rl_and_mask);
5281 rr_inner = decode_field_reference (loc, rr_arg,
5282 &rr_bitsize, &rr_bitpos, &rr_mode,
5283 &rr_unsignedp, &volatilep, &rr_mask,
5284 &rr_and_mask);
5286 /* It must be true that the inner operation on the lhs of each
5287 comparison must be the same if we are to be able to do anything.
5288 Then see if we have constants. If not, the same must be true for
5289 the rhs's. */
5290 if (volatilep || ll_inner == 0 || rl_inner == 0
5291 || ! operand_equal_p (ll_inner, rl_inner, 0))
5292 return 0;
5294 if (TREE_CODE (lr_arg) == INTEGER_CST
5295 && TREE_CODE (rr_arg) == INTEGER_CST)
5296 l_const = lr_arg, r_const = rr_arg;
5297 else if (lr_inner == 0 || rr_inner == 0
5298 || ! operand_equal_p (lr_inner, rr_inner, 0))
5299 return 0;
5300 else
5301 l_const = r_const = 0;
5303 /* If either comparison code is not correct for our logical operation,
5304 fail. However, we can convert a one-bit comparison against zero into
5305 the opposite comparison against that bit being set in the field. */
5307 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5308 if (lcode != wanted_code)
5310 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5312 /* Make the left operand unsigned, since we are only interested
5313 in the value of one bit. Otherwise we are doing the wrong
5314 thing below. */
5315 ll_unsignedp = 1;
5316 l_const = ll_mask;
5318 else
5319 return 0;
5322 /* This is analogous to the code for l_const above. */
5323 if (rcode != wanted_code)
5325 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5327 rl_unsignedp = 1;
5328 r_const = rl_mask;
5330 else
5331 return 0;
5334 /* See if we can find a mode that contains both fields being compared on
5335 the left. If we can't, fail. Otherwise, update all constants and masks
5336 to be relative to a field of that size. */
5337 first_bit = MIN (ll_bitpos, rl_bitpos);
5338 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5339 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5340 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5341 volatilep);
5342 if (lnmode == VOIDmode)
5343 return 0;
5345 lnbitsize = GET_MODE_BITSIZE (lnmode);
5346 lnbitpos = first_bit & ~ (lnbitsize - 1);
5347 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5348 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5350 if (BYTES_BIG_ENDIAN)
5352 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5353 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5356 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5357 size_int (xll_bitpos));
5358 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5359 size_int (xrl_bitpos));
5361 if (l_const)
5363 l_const = fold_convert_loc (loc, lntype, l_const);
5364 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5365 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5366 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5367 fold_build1_loc (loc, BIT_NOT_EXPR,
5368 lntype, ll_mask))))
5370 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5372 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5375 if (r_const)
5377 r_const = fold_convert_loc (loc, lntype, r_const);
5378 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5379 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5380 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5381 fold_build1_loc (loc, BIT_NOT_EXPR,
5382 lntype, rl_mask))))
5384 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5386 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5390 /* If the right sides are not constant, do the same for it. Also,
5391 disallow this optimization if a size or signedness mismatch occurs
5392 between the left and right sides. */
5393 if (l_const == 0)
5395 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5396 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5397 /* Make sure the two fields on the right
5398 correspond to the left without being swapped. */
5399 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5400 return 0;
5402 first_bit = MIN (lr_bitpos, rr_bitpos);
5403 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5404 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5405 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5406 volatilep);
5407 if (rnmode == VOIDmode)
5408 return 0;
5410 rnbitsize = GET_MODE_BITSIZE (rnmode);
5411 rnbitpos = first_bit & ~ (rnbitsize - 1);
5412 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5413 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5415 if (BYTES_BIG_ENDIAN)
5417 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5418 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5421 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5422 rntype, lr_mask),
5423 size_int (xlr_bitpos));
5424 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5425 rntype, rr_mask),
5426 size_int (xrr_bitpos));
5428 /* Make a mask that corresponds to both fields being compared.
5429 Do this for both items being compared. If the operands are the
5430 same size and the bits being compared are in the same position
5431 then we can do this by masking both and comparing the masked
5432 results. */
5433 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5434 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5435 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5437 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5438 ll_unsignedp || rl_unsignedp);
5439 if (! all_ones_mask_p (ll_mask, lnbitsize))
5440 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5442 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5443 lr_unsignedp || rr_unsignedp);
5444 if (! all_ones_mask_p (lr_mask, rnbitsize))
5445 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5447 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5450 /* There is still another way we can do something: If both pairs of
5451 fields being compared are adjacent, we may be able to make a wider
5452 field containing them both.
5454 Note that we still must mask the lhs/rhs expressions. Furthermore,
5455 the mask must be shifted to account for the shift done by
5456 make_bit_field_ref. */
5457 if ((ll_bitsize + ll_bitpos == rl_bitpos
5458 && lr_bitsize + lr_bitpos == rr_bitpos)
5459 || (ll_bitpos == rl_bitpos + rl_bitsize
5460 && lr_bitpos == rr_bitpos + rr_bitsize))
5462 tree type;
5464 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5465 ll_bitsize + rl_bitsize,
5466 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5467 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5468 lr_bitsize + rr_bitsize,
5469 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5471 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5472 size_int (MIN (xll_bitpos, xrl_bitpos)));
5473 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5474 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5476 /* Convert to the smaller type before masking out unwanted bits. */
5477 type = lntype;
5478 if (lntype != rntype)
5480 if (lnbitsize > rnbitsize)
5482 lhs = fold_convert_loc (loc, rntype, lhs);
5483 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5484 type = rntype;
5486 else if (lnbitsize < rnbitsize)
5488 rhs = fold_convert_loc (loc, lntype, rhs);
5489 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5490 type = lntype;
5494 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5495 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5497 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5498 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5500 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5503 return 0;
5506 /* Handle the case of comparisons with constants. If there is something in
5507 common between the masks, those bits of the constants must be the same.
5508 If not, the condition is always false. Test for this to avoid generating
5509 incorrect code below. */
5510 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5511 if (! integer_zerop (result)
5512 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5513 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5515 if (wanted_code == NE_EXPR)
5517 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5518 return constant_boolean_node (true, truth_type);
5520 else
5522 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5523 return constant_boolean_node (false, truth_type);
5527 /* Construct the expression we will return. First get the component
5528 reference we will make. Unless the mask is all ones the width of
5529 that field, perform the mask operation. Then compare with the
5530 merged constant. */
5531 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5532 ll_unsignedp || rl_unsignedp);
5534 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5535 if (! all_ones_mask_p (ll_mask, lnbitsize))
5536 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5538 return build2_loc (loc, wanted_code, truth_type, result,
5539 const_binop (BIT_IOR_EXPR, l_const, r_const));
5542 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5543 constant. */
5545 static tree
5546 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5547 tree op0, tree op1)
5549 tree arg0 = op0;
5550 enum tree_code op_code;
5551 tree comp_const;
5552 tree minmax_const;
5553 int consts_equal, consts_lt;
5554 tree inner;
5556 STRIP_SIGN_NOPS (arg0);
5558 op_code = TREE_CODE (arg0);
5559 minmax_const = TREE_OPERAND (arg0, 1);
5560 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5561 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5562 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5563 inner = TREE_OPERAND (arg0, 0);
5565 /* If something does not permit us to optimize, return the original tree. */
5566 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5567 || TREE_CODE (comp_const) != INTEGER_CST
5568 || TREE_OVERFLOW (comp_const)
5569 || TREE_CODE (minmax_const) != INTEGER_CST
5570 || TREE_OVERFLOW (minmax_const))
5571 return NULL_TREE;
5573 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5574 and GT_EXPR, doing the rest with recursive calls using logical
5575 simplifications. */
5576 switch (code)
5578 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5580 tree tem
5581 = optimize_minmax_comparison (loc,
5582 invert_tree_comparison (code, false),
5583 type, op0, op1);
5584 if (tem)
5585 return invert_truthvalue_loc (loc, tem);
5586 return NULL_TREE;
5589 case GE_EXPR:
5590 return
5591 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5592 optimize_minmax_comparison
5593 (loc, EQ_EXPR, type, arg0, comp_const),
5594 optimize_minmax_comparison
5595 (loc, GT_EXPR, type, arg0, comp_const));
5597 case EQ_EXPR:
5598 if (op_code == MAX_EXPR && consts_equal)
5599 /* MAX (X, 0) == 0 -> X <= 0 */
5600 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5602 else if (op_code == MAX_EXPR && consts_lt)
5603 /* MAX (X, 0) == 5 -> X == 5 */
5604 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5606 else if (op_code == MAX_EXPR)
5607 /* MAX (X, 0) == -1 -> false */
5608 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5610 else if (consts_equal)
5611 /* MIN (X, 0) == 0 -> X >= 0 */
5612 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5614 else if (consts_lt)
5615 /* MIN (X, 0) == 5 -> false */
5616 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5618 else
5619 /* MIN (X, 0) == -1 -> X == -1 */
5620 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5622 case GT_EXPR:
5623 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5624 /* MAX (X, 0) > 0 -> X > 0
5625 MAX (X, 0) > 5 -> X > 5 */
5626 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5628 else if (op_code == MAX_EXPR)
5629 /* MAX (X, 0) > -1 -> true */
5630 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5632 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5633 /* MIN (X, 0) > 0 -> false
5634 MIN (X, 0) > 5 -> false */
5635 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5637 else
5638 /* MIN (X, 0) > -1 -> X > -1 */
5639 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5641 default:
5642 return NULL_TREE;
5646 /* T is an integer expression that is being multiplied, divided, or taken a
5647 modulus (CODE says which and what kind of divide or modulus) by a
5648 constant C. See if we can eliminate that operation by folding it with
5649 other operations already in T. WIDE_TYPE, if non-null, is a type that
5650 should be used for the computation if wider than our type.
5652 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5653 (X * 2) + (Y * 4). We must, however, be assured that either the original
5654 expression would not overflow or that overflow is undefined for the type
5655 in the language in question.
5657 If we return a non-null expression, it is an equivalent form of the
5658 original computation, but need not be in the original type.
5660 We set *STRICT_OVERFLOW_P to true if the return values depends on
5661 signed overflow being undefined. Otherwise we do not change
5662 *STRICT_OVERFLOW_P. */
5664 static tree
5665 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5666 bool *strict_overflow_p)
5668 /* To avoid exponential search depth, refuse to allow recursion past
5669 three levels. Beyond that (1) it's highly unlikely that we'll find
5670 something interesting and (2) we've probably processed it before
5671 when we built the inner expression. */
5673 static int depth;
5674 tree ret;
5676 if (depth > 3)
5677 return NULL;
5679 depth++;
5680 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5681 depth--;
5683 return ret;
5686 static tree
5687 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5688 bool *strict_overflow_p)
5690 tree type = TREE_TYPE (t);
5691 enum tree_code tcode = TREE_CODE (t);
5692 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5693 > GET_MODE_SIZE (TYPE_MODE (type)))
5694 ? wide_type : type);
5695 tree t1, t2;
5696 int same_p = tcode == code;
5697 tree op0 = NULL_TREE, op1 = NULL_TREE;
5698 bool sub_strict_overflow_p;
5700 /* Don't deal with constants of zero here; they confuse the code below. */
5701 if (integer_zerop (c))
5702 return NULL_TREE;
5704 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5705 op0 = TREE_OPERAND (t, 0);
5707 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5708 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5710 /* Note that we need not handle conditional operations here since fold
5711 already handles those cases. So just do arithmetic here. */
5712 switch (tcode)
5714 case INTEGER_CST:
5715 /* For a constant, we can always simplify if we are a multiply
5716 or (for divide and modulus) if it is a multiple of our constant. */
5717 if (code == MULT_EXPR
5718 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5719 return const_binop (code, fold_convert (ctype, t),
5720 fold_convert (ctype, c));
5721 break;
5723 CASE_CONVERT: case NON_LVALUE_EXPR:
5724 /* If op0 is an expression ... */
5725 if ((COMPARISON_CLASS_P (op0)
5726 || UNARY_CLASS_P (op0)
5727 || BINARY_CLASS_P (op0)
5728 || VL_EXP_CLASS_P (op0)
5729 || EXPRESSION_CLASS_P (op0))
5730 /* ... and has wrapping overflow, and its type is smaller
5731 than ctype, then we cannot pass through as widening. */
5732 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5733 && (TYPE_PRECISION (ctype)
5734 > TYPE_PRECISION (TREE_TYPE (op0))))
5735 /* ... or this is a truncation (t is narrower than op0),
5736 then we cannot pass through this narrowing. */
5737 || (TYPE_PRECISION (type)
5738 < TYPE_PRECISION (TREE_TYPE (op0)))
5739 /* ... or signedness changes for division or modulus,
5740 then we cannot pass through this conversion. */
5741 || (code != MULT_EXPR
5742 && (TYPE_UNSIGNED (ctype)
5743 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5744 /* ... or has undefined overflow while the converted to
5745 type has not, we cannot do the operation in the inner type
5746 as that would introduce undefined overflow. */
5747 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5748 && !TYPE_OVERFLOW_UNDEFINED (type))))
5749 break;
5751 /* Pass the constant down and see if we can make a simplification. If
5752 we can, replace this expression with the inner simplification for
5753 possible later conversion to our or some other type. */
5754 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5755 && TREE_CODE (t2) == INTEGER_CST
5756 && !TREE_OVERFLOW (t2)
5757 && (0 != (t1 = extract_muldiv (op0, t2, code,
5758 code == MULT_EXPR
5759 ? ctype : NULL_TREE,
5760 strict_overflow_p))))
5761 return t1;
5762 break;
5764 case ABS_EXPR:
5765 /* If widening the type changes it from signed to unsigned, then we
5766 must avoid building ABS_EXPR itself as unsigned. */
5767 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5769 tree cstype = (*signed_type_for) (ctype);
5770 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5771 != 0)
5773 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5774 return fold_convert (ctype, t1);
5776 break;
5778 /* If the constant is negative, we cannot simplify this. */
5779 if (tree_int_cst_sgn (c) == -1)
5780 break;
5781 /* FALLTHROUGH */
5782 case NEGATE_EXPR:
5783 /* For division and modulus, type can't be unsigned, as e.g.
5784 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5785 For signed types, even with wrapping overflow, this is fine. */
5786 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5787 break;
5788 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5789 != 0)
5790 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5791 break;
5793 case MIN_EXPR: case MAX_EXPR:
5794 /* If widening the type changes the signedness, then we can't perform
5795 this optimization as that changes the result. */
5796 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5797 break;
5799 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5800 sub_strict_overflow_p = false;
5801 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5802 &sub_strict_overflow_p)) != 0
5803 && (t2 = extract_muldiv (op1, c, code, wide_type,
5804 &sub_strict_overflow_p)) != 0)
5806 if (tree_int_cst_sgn (c) < 0)
5807 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5808 if (sub_strict_overflow_p)
5809 *strict_overflow_p = true;
5810 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5811 fold_convert (ctype, t2));
5813 break;
5815 case LSHIFT_EXPR: case RSHIFT_EXPR:
5816 /* If the second operand is constant, this is a multiplication
5817 or floor division, by a power of two, so we can treat it that
5818 way unless the multiplier or divisor overflows. Signed
5819 left-shift overflow is implementation-defined rather than
5820 undefined in C90, so do not convert signed left shift into
5821 multiplication. */
5822 if (TREE_CODE (op1) == INTEGER_CST
5823 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5824 /* const_binop may not detect overflow correctly,
5825 so check for it explicitly here. */
5826 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5827 && 0 != (t1 = fold_convert (ctype,
5828 const_binop (LSHIFT_EXPR,
5829 size_one_node,
5830 op1)))
5831 && !TREE_OVERFLOW (t1))
5832 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5833 ? MULT_EXPR : FLOOR_DIV_EXPR,
5834 ctype,
5835 fold_convert (ctype, op0),
5836 t1),
5837 c, code, wide_type, strict_overflow_p);
5838 break;
5840 case PLUS_EXPR: case MINUS_EXPR:
5841 /* See if we can eliminate the operation on both sides. If we can, we
5842 can return a new PLUS or MINUS. If we can't, the only remaining
5843 cases where we can do anything are if the second operand is a
5844 constant. */
5845 sub_strict_overflow_p = false;
5846 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5847 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5848 if (t1 != 0 && t2 != 0
5849 && (code == MULT_EXPR
5850 /* If not multiplication, we can only do this if both operands
5851 are divisible by c. */
5852 || (multiple_of_p (ctype, op0, c)
5853 && multiple_of_p (ctype, op1, c))))
5855 if (sub_strict_overflow_p)
5856 *strict_overflow_p = true;
5857 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5858 fold_convert (ctype, t2));
5861 /* If this was a subtraction, negate OP1 and set it to be an addition.
5862 This simplifies the logic below. */
5863 if (tcode == MINUS_EXPR)
5865 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5866 /* If OP1 was not easily negatable, the constant may be OP0. */
5867 if (TREE_CODE (op0) == INTEGER_CST)
5869 tree tem = op0;
5870 op0 = op1;
5871 op1 = tem;
5872 tem = t1;
5873 t1 = t2;
5874 t2 = tem;
5878 if (TREE_CODE (op1) != INTEGER_CST)
5879 break;
5881 /* If either OP1 or C are negative, this optimization is not safe for
5882 some of the division and remainder types while for others we need
5883 to change the code. */
5884 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5886 if (code == CEIL_DIV_EXPR)
5887 code = FLOOR_DIV_EXPR;
5888 else if (code == FLOOR_DIV_EXPR)
5889 code = CEIL_DIV_EXPR;
5890 else if (code != MULT_EXPR
5891 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5892 break;
5895 /* If it's a multiply or a division/modulus operation of a multiple
5896 of our constant, do the operation and verify it doesn't overflow. */
5897 if (code == MULT_EXPR
5898 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5900 op1 = const_binop (code, fold_convert (ctype, op1),
5901 fold_convert (ctype, c));
5902 /* We allow the constant to overflow with wrapping semantics. */
5903 if (op1 == 0
5904 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5905 break;
5907 else
5908 break;
5910 /* If we have an unsigned type, we cannot widen the operation since it
5911 will change the result if the original computation overflowed. */
5912 if (TYPE_UNSIGNED (ctype) && ctype != type)
5913 break;
5915 /* If we were able to eliminate our operation from the first side,
5916 apply our operation to the second side and reform the PLUS. */
5917 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5918 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5920 /* The last case is if we are a multiply. In that case, we can
5921 apply the distributive law to commute the multiply and addition
5922 if the multiplication of the constants doesn't overflow
5923 and overflow is defined. With undefined overflow
5924 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5925 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5926 return fold_build2 (tcode, ctype,
5927 fold_build2 (code, ctype,
5928 fold_convert (ctype, op0),
5929 fold_convert (ctype, c)),
5930 op1);
5932 break;
5934 case MULT_EXPR:
5935 /* We have a special case here if we are doing something like
5936 (C * 8) % 4 since we know that's zero. */
5937 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5938 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5939 /* If the multiplication can overflow we cannot optimize this. */
5940 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5941 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5942 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5944 *strict_overflow_p = true;
5945 return omit_one_operand (type, integer_zero_node, op0);
5948 /* ... fall through ... */
5950 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5951 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5952 /* If we can extract our operation from the LHS, do so and return a
5953 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5954 do something only if the second operand is a constant. */
5955 if (same_p
5956 && (t1 = extract_muldiv (op0, c, code, wide_type,
5957 strict_overflow_p)) != 0)
5958 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5959 fold_convert (ctype, op1));
5960 else if (tcode == MULT_EXPR && code == MULT_EXPR
5961 && (t1 = extract_muldiv (op1, c, code, wide_type,
5962 strict_overflow_p)) != 0)
5963 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5964 fold_convert (ctype, t1));
5965 else if (TREE_CODE (op1) != INTEGER_CST)
5966 return 0;
5968 /* If these are the same operation types, we can associate them
5969 assuming no overflow. */
5970 if (tcode == code)
5972 bool overflow_p = false;
5973 bool overflow_mul_p;
5974 signop sign = TYPE_SIGN (ctype);
5975 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5976 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5977 if (overflow_mul_p
5978 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5979 overflow_p = true;
5980 if (!overflow_p)
5981 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5982 wide_int_to_tree (ctype, mul));
5985 /* If these operations "cancel" each other, we have the main
5986 optimizations of this pass, which occur when either constant is a
5987 multiple of the other, in which case we replace this with either an
5988 operation or CODE or TCODE.
5990 If we have an unsigned type, we cannot do this since it will change
5991 the result if the original computation overflowed. */
5992 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5993 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5994 || (tcode == MULT_EXPR
5995 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5996 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5997 && code != MULT_EXPR)))
5999 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6001 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6002 *strict_overflow_p = true;
6003 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6004 fold_convert (ctype,
6005 const_binop (TRUNC_DIV_EXPR,
6006 op1, c)));
6008 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6010 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6011 *strict_overflow_p = true;
6012 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6013 fold_convert (ctype,
6014 const_binop (TRUNC_DIV_EXPR,
6015 c, op1)));
6018 break;
6020 default:
6021 break;
6024 return 0;
6027 /* Return a node which has the indicated constant VALUE (either 0 or
6028 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6029 and is of the indicated TYPE. */
6031 tree
6032 constant_boolean_node (bool value, tree type)
6034 if (type == integer_type_node)
6035 return value ? integer_one_node : integer_zero_node;
6036 else if (type == boolean_type_node)
6037 return value ? boolean_true_node : boolean_false_node;
6038 else if (TREE_CODE (type) == VECTOR_TYPE)
6039 return build_vector_from_val (type,
6040 build_int_cst (TREE_TYPE (type),
6041 value ? -1 : 0));
6042 else
6043 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6047 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6048 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6049 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6050 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6051 COND is the first argument to CODE; otherwise (as in the example
6052 given here), it is the second argument. TYPE is the type of the
6053 original expression. Return NULL_TREE if no simplification is
6054 possible. */
6056 static tree
6057 fold_binary_op_with_conditional_arg (location_t loc,
6058 enum tree_code code,
6059 tree type, tree op0, tree op1,
6060 tree cond, tree arg, int cond_first_p)
6062 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6063 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6064 tree test, true_value, false_value;
6065 tree lhs = NULL_TREE;
6066 tree rhs = NULL_TREE;
6067 enum tree_code cond_code = COND_EXPR;
6069 if (TREE_CODE (cond) == COND_EXPR
6070 || TREE_CODE (cond) == VEC_COND_EXPR)
6072 test = TREE_OPERAND (cond, 0);
6073 true_value = TREE_OPERAND (cond, 1);
6074 false_value = TREE_OPERAND (cond, 2);
6075 /* If this operand throws an expression, then it does not make
6076 sense to try to perform a logical or arithmetic operation
6077 involving it. */
6078 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6079 lhs = true_value;
6080 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6081 rhs = false_value;
6083 else
6085 tree testtype = TREE_TYPE (cond);
6086 test = cond;
6087 true_value = constant_boolean_node (true, testtype);
6088 false_value = constant_boolean_node (false, testtype);
6091 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6092 cond_code = VEC_COND_EXPR;
6094 /* This transformation is only worthwhile if we don't have to wrap ARG
6095 in a SAVE_EXPR and the operation can be simplified without recursing
6096 on at least one of the branches once its pushed inside the COND_EXPR. */
6097 if (!TREE_CONSTANT (arg)
6098 && (TREE_SIDE_EFFECTS (arg)
6099 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6100 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6101 return NULL_TREE;
6103 arg = fold_convert_loc (loc, arg_type, arg);
6104 if (lhs == 0)
6106 true_value = fold_convert_loc (loc, cond_type, true_value);
6107 if (cond_first_p)
6108 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6109 else
6110 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6112 if (rhs == 0)
6114 false_value = fold_convert_loc (loc, cond_type, false_value);
6115 if (cond_first_p)
6116 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6117 else
6118 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6121 /* Check that we have simplified at least one of the branches. */
6122 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6123 return NULL_TREE;
6125 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6129 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6131 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6132 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6133 ADDEND is the same as X.
6135 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6136 and finite. The problematic cases are when X is zero, and its mode
6137 has signed zeros. In the case of rounding towards -infinity,
6138 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6139 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6141 bool
6142 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6144 if (!real_zerop (addend))
6145 return false;
6147 /* Don't allow the fold with -fsignaling-nans. */
6148 if (HONOR_SNANS (TYPE_MODE (type)))
6149 return false;
6151 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6152 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6153 return true;
6155 /* In a vector or complex, we would need to check the sign of all zeros. */
6156 if (TREE_CODE (addend) != REAL_CST)
6157 return false;
6159 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6160 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6161 negate = !negate;
6163 /* The mode has signed zeros, and we have to honor their sign.
6164 In this situation, there is only one case we can return true for.
6165 X - 0 is the same as X unless rounding towards -infinity is
6166 supported. */
6167 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6170 /* Subroutine of fold() that checks comparisons of built-in math
6171 functions against real constants.
6173 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6174 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6175 is the type of the result and ARG0 and ARG1 are the operands of the
6176 comparison. ARG1 must be a TREE_REAL_CST.
6178 The function returns the constant folded tree if a simplification
6179 can be made, and NULL_TREE otherwise. */
6181 static tree
6182 fold_mathfn_compare (location_t loc,
6183 enum built_in_function fcode, enum tree_code code,
6184 tree type, tree arg0, tree arg1)
6186 REAL_VALUE_TYPE c;
6188 if (BUILTIN_SQRT_P (fcode))
6190 tree arg = CALL_EXPR_ARG (arg0, 0);
6191 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6193 c = TREE_REAL_CST (arg1);
6194 if (REAL_VALUE_NEGATIVE (c))
6196 /* sqrt(x) < y is always false, if y is negative. */
6197 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6198 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6200 /* sqrt(x) > y is always true, if y is negative and we
6201 don't care about NaNs, i.e. negative values of x. */
6202 if (code == NE_EXPR || !HONOR_NANS (mode))
6203 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6205 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6206 return fold_build2_loc (loc, GE_EXPR, type, arg,
6207 build_real (TREE_TYPE (arg), dconst0));
6209 else if (code == GT_EXPR || code == GE_EXPR)
6211 REAL_VALUE_TYPE c2;
6213 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6214 real_convert (&c2, mode, &c2);
6216 if (REAL_VALUE_ISINF (c2))
6218 /* sqrt(x) > y is x == +Inf, when y is very large. */
6219 if (HONOR_INFINITIES (mode))
6220 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6221 build_real (TREE_TYPE (arg), c2));
6223 /* sqrt(x) > y is always false, when y is very large
6224 and we don't care about infinities. */
6225 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6228 /* sqrt(x) > c is the same as x > c*c. */
6229 return fold_build2_loc (loc, code, type, arg,
6230 build_real (TREE_TYPE (arg), c2));
6232 else if (code == LT_EXPR || code == LE_EXPR)
6234 REAL_VALUE_TYPE c2;
6236 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6237 real_convert (&c2, mode, &c2);
6239 if (REAL_VALUE_ISINF (c2))
6241 /* sqrt(x) < y is always true, when y is a very large
6242 value and we don't care about NaNs or Infinities. */
6243 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6244 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6246 /* sqrt(x) < y is x != +Inf when y is very large and we
6247 don't care about NaNs. */
6248 if (! HONOR_NANS (mode))
6249 return fold_build2_loc (loc, NE_EXPR, type, arg,
6250 build_real (TREE_TYPE (arg), c2));
6252 /* sqrt(x) < y is x >= 0 when y is very large and we
6253 don't care about Infinities. */
6254 if (! HONOR_INFINITIES (mode))
6255 return fold_build2_loc (loc, GE_EXPR, type, arg,
6256 build_real (TREE_TYPE (arg), dconst0));
6258 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6259 arg = save_expr (arg);
6260 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6261 fold_build2_loc (loc, GE_EXPR, type, arg,
6262 build_real (TREE_TYPE (arg),
6263 dconst0)),
6264 fold_build2_loc (loc, NE_EXPR, type, arg,
6265 build_real (TREE_TYPE (arg),
6266 c2)));
6269 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6270 if (! HONOR_NANS (mode))
6271 return fold_build2_loc (loc, code, type, arg,
6272 build_real (TREE_TYPE (arg), c2));
6274 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6275 arg = save_expr (arg);
6276 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6277 fold_build2_loc (loc, GE_EXPR, type, arg,
6278 build_real (TREE_TYPE (arg),
6279 dconst0)),
6280 fold_build2_loc (loc, code, type, arg,
6281 build_real (TREE_TYPE (arg),
6282 c2)));
6286 return NULL_TREE;
6289 /* Subroutine of fold() that optimizes comparisons against Infinities,
6290 either +Inf or -Inf.
6292 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6293 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6294 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6296 The function returns the constant folded tree if a simplification
6297 can be made, and NULL_TREE otherwise. */
6299 static tree
6300 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6301 tree arg0, tree arg1)
6303 machine_mode mode;
6304 REAL_VALUE_TYPE max;
6305 tree temp;
6306 bool neg;
6308 mode = TYPE_MODE (TREE_TYPE (arg0));
6310 /* For negative infinity swap the sense of the comparison. */
6311 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6312 if (neg)
6313 code = swap_tree_comparison (code);
6315 switch (code)
6317 case GT_EXPR:
6318 /* x > +Inf is always false, if with ignore sNANs. */
6319 if (HONOR_SNANS (mode))
6320 return NULL_TREE;
6321 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6323 case LE_EXPR:
6324 /* x <= +Inf is always true, if we don't case about NaNs. */
6325 if (! HONOR_NANS (mode))
6326 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6328 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6329 arg0 = save_expr (arg0);
6330 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6332 case EQ_EXPR:
6333 case GE_EXPR:
6334 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6335 real_maxval (&max, neg, mode);
6336 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6337 arg0, build_real (TREE_TYPE (arg0), max));
6339 case LT_EXPR:
6340 /* x < +Inf is always equal to x <= DBL_MAX. */
6341 real_maxval (&max, neg, mode);
6342 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6343 arg0, build_real (TREE_TYPE (arg0), max));
6345 case NE_EXPR:
6346 /* x != +Inf is always equal to !(x > DBL_MAX). */
6347 real_maxval (&max, neg, mode);
6348 if (! HONOR_NANS (mode))
6349 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6350 arg0, build_real (TREE_TYPE (arg0), max));
6352 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6353 arg0, build_real (TREE_TYPE (arg0), max));
6354 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6356 default:
6357 break;
6360 return NULL_TREE;
6363 /* Subroutine of fold() that optimizes comparisons of a division by
6364 a nonzero integer constant against an integer constant, i.e.
6365 X/C1 op C2.
6367 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6368 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6369 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6371 The function returns the constant folded tree if a simplification
6372 can be made, and NULL_TREE otherwise. */
6374 static tree
6375 fold_div_compare (location_t loc,
6376 enum tree_code code, tree type, tree arg0, tree arg1)
6378 tree prod, tmp, hi, lo;
6379 tree arg00 = TREE_OPERAND (arg0, 0);
6380 tree arg01 = TREE_OPERAND (arg0, 1);
6381 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6382 bool neg_overflow = false;
6383 bool overflow;
6385 /* We have to do this the hard way to detect unsigned overflow.
6386 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6387 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6388 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6389 neg_overflow = false;
6391 if (sign == UNSIGNED)
6393 tmp = int_const_binop (MINUS_EXPR, arg01,
6394 build_int_cst (TREE_TYPE (arg01), 1));
6395 lo = prod;
6397 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6398 val = wi::add (prod, tmp, sign, &overflow);
6399 hi = force_fit_type (TREE_TYPE (arg00), val,
6400 -1, overflow | TREE_OVERFLOW (prod));
6402 else if (tree_int_cst_sgn (arg01) >= 0)
6404 tmp = int_const_binop (MINUS_EXPR, arg01,
6405 build_int_cst (TREE_TYPE (arg01), 1));
6406 switch (tree_int_cst_sgn (arg1))
6408 case -1:
6409 neg_overflow = true;
6410 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6411 hi = prod;
6412 break;
6414 case 0:
6415 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6416 hi = tmp;
6417 break;
6419 case 1:
6420 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6421 lo = prod;
6422 break;
6424 default:
6425 gcc_unreachable ();
6428 else
6430 /* A negative divisor reverses the relational operators. */
6431 code = swap_tree_comparison (code);
6433 tmp = int_const_binop (PLUS_EXPR, arg01,
6434 build_int_cst (TREE_TYPE (arg01), 1));
6435 switch (tree_int_cst_sgn (arg1))
6437 case -1:
6438 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6439 lo = prod;
6440 break;
6442 case 0:
6443 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6444 lo = tmp;
6445 break;
6447 case 1:
6448 neg_overflow = true;
6449 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6450 hi = prod;
6451 break;
6453 default:
6454 gcc_unreachable ();
6458 switch (code)
6460 case EQ_EXPR:
6461 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6462 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6463 if (TREE_OVERFLOW (hi))
6464 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6465 if (TREE_OVERFLOW (lo))
6466 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6467 return build_range_check (loc, type, arg00, 1, lo, hi);
6469 case NE_EXPR:
6470 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6471 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6472 if (TREE_OVERFLOW (hi))
6473 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6474 if (TREE_OVERFLOW (lo))
6475 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6476 return build_range_check (loc, type, arg00, 0, lo, hi);
6478 case LT_EXPR:
6479 if (TREE_OVERFLOW (lo))
6481 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6482 return omit_one_operand_loc (loc, type, tmp, arg00);
6484 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6486 case LE_EXPR:
6487 if (TREE_OVERFLOW (hi))
6489 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6490 return omit_one_operand_loc (loc, type, tmp, arg00);
6492 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6494 case GT_EXPR:
6495 if (TREE_OVERFLOW (hi))
6497 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6498 return omit_one_operand_loc (loc, type, tmp, arg00);
6500 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6502 case GE_EXPR:
6503 if (TREE_OVERFLOW (lo))
6505 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6506 return omit_one_operand_loc (loc, type, tmp, arg00);
6508 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6510 default:
6511 break;
6514 return NULL_TREE;
6518 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6519 equality/inequality test, then return a simplified form of the test
6520 using a sign testing. Otherwise return NULL. TYPE is the desired
6521 result type. */
6523 static tree
6524 fold_single_bit_test_into_sign_test (location_t loc,
6525 enum tree_code code, tree arg0, tree arg1,
6526 tree result_type)
6528 /* If this is testing a single bit, we can optimize the test. */
6529 if ((code == NE_EXPR || code == EQ_EXPR)
6530 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6531 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6533 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6534 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6535 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6537 if (arg00 != NULL_TREE
6538 /* This is only a win if casting to a signed type is cheap,
6539 i.e. when arg00's type is not a partial mode. */
6540 && TYPE_PRECISION (TREE_TYPE (arg00))
6541 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6543 tree stype = signed_type_for (TREE_TYPE (arg00));
6544 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6545 result_type,
6546 fold_convert_loc (loc, stype, arg00),
6547 build_int_cst (stype, 0));
6551 return NULL_TREE;
6554 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6555 equality/inequality test, then return a simplified form of
6556 the test using shifts and logical operations. Otherwise return
6557 NULL. TYPE is the desired result type. */
6559 tree
6560 fold_single_bit_test (location_t loc, enum tree_code code,
6561 tree arg0, tree arg1, tree result_type)
6563 /* If this is testing a single bit, we can optimize the test. */
6564 if ((code == NE_EXPR || code == EQ_EXPR)
6565 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6566 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6568 tree inner = TREE_OPERAND (arg0, 0);
6569 tree type = TREE_TYPE (arg0);
6570 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6571 machine_mode operand_mode = TYPE_MODE (type);
6572 int ops_unsigned;
6573 tree signed_type, unsigned_type, intermediate_type;
6574 tree tem, one;
6576 /* First, see if we can fold the single bit test into a sign-bit
6577 test. */
6578 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6579 result_type);
6580 if (tem)
6581 return tem;
6583 /* Otherwise we have (A & C) != 0 where C is a single bit,
6584 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6585 Similarly for (A & C) == 0. */
6587 /* If INNER is a right shift of a constant and it plus BITNUM does
6588 not overflow, adjust BITNUM and INNER. */
6589 if (TREE_CODE (inner) == RSHIFT_EXPR
6590 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6591 && bitnum < TYPE_PRECISION (type)
6592 && wi::ltu_p (TREE_OPERAND (inner, 1),
6593 TYPE_PRECISION (type) - bitnum))
6595 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6596 inner = TREE_OPERAND (inner, 0);
6599 /* If we are going to be able to omit the AND below, we must do our
6600 operations as unsigned. If we must use the AND, we have a choice.
6601 Normally unsigned is faster, but for some machines signed is. */
6602 #ifdef LOAD_EXTEND_OP
6603 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6604 && !flag_syntax_only) ? 0 : 1;
6605 #else
6606 ops_unsigned = 1;
6607 #endif
6609 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6610 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6611 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6612 inner = fold_convert_loc (loc, intermediate_type, inner);
6614 if (bitnum != 0)
6615 inner = build2 (RSHIFT_EXPR, intermediate_type,
6616 inner, size_int (bitnum));
6618 one = build_int_cst (intermediate_type, 1);
6620 if (code == EQ_EXPR)
6621 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6623 /* Put the AND last so it can combine with more things. */
6624 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6626 /* Make sure to return the proper type. */
6627 inner = fold_convert_loc (loc, result_type, inner);
6629 return inner;
6631 return NULL_TREE;
6634 /* Check whether we are allowed to reorder operands arg0 and arg1,
6635 such that the evaluation of arg1 occurs before arg0. */
6637 static bool
6638 reorder_operands_p (const_tree arg0, const_tree arg1)
6640 if (! flag_evaluation_order)
6641 return true;
6642 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6643 return true;
6644 return ! TREE_SIDE_EFFECTS (arg0)
6645 && ! TREE_SIDE_EFFECTS (arg1);
6648 /* Test whether it is preferable two swap two operands, ARG0 and
6649 ARG1, for example because ARG0 is an integer constant and ARG1
6650 isn't. If REORDER is true, only recommend swapping if we can
6651 evaluate the operands in reverse order. */
6653 bool
6654 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6656 if (CONSTANT_CLASS_P (arg1))
6657 return 0;
6658 if (CONSTANT_CLASS_P (arg0))
6659 return 1;
6661 STRIP_SIGN_NOPS (arg0);
6662 STRIP_SIGN_NOPS (arg1);
6664 if (TREE_CONSTANT (arg1))
6665 return 0;
6666 if (TREE_CONSTANT (arg0))
6667 return 1;
6669 if (reorder && flag_evaluation_order
6670 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6671 return 0;
6673 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6674 for commutative and comparison operators. Ensuring a canonical
6675 form allows the optimizers to find additional redundancies without
6676 having to explicitly check for both orderings. */
6677 if (TREE_CODE (arg0) == SSA_NAME
6678 && TREE_CODE (arg1) == SSA_NAME
6679 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6680 return 1;
6682 /* Put SSA_NAMEs last. */
6683 if (TREE_CODE (arg1) == SSA_NAME)
6684 return 0;
6685 if (TREE_CODE (arg0) == SSA_NAME)
6686 return 1;
6688 /* Put variables last. */
6689 if (DECL_P (arg1))
6690 return 0;
6691 if (DECL_P (arg0))
6692 return 1;
6694 return 0;
6697 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6698 ARG0 is extended to a wider type. */
6700 static tree
6701 fold_widened_comparison (location_t loc, enum tree_code code,
6702 tree type, tree arg0, tree arg1)
6704 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6705 tree arg1_unw;
6706 tree shorter_type, outer_type;
6707 tree min, max;
6708 bool above, below;
6710 if (arg0_unw == arg0)
6711 return NULL_TREE;
6712 shorter_type = TREE_TYPE (arg0_unw);
6714 #ifdef HAVE_canonicalize_funcptr_for_compare
6715 /* Disable this optimization if we're casting a function pointer
6716 type on targets that require function pointer canonicalization. */
6717 if (HAVE_canonicalize_funcptr_for_compare
6718 && TREE_CODE (shorter_type) == POINTER_TYPE
6719 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6720 return NULL_TREE;
6721 #endif
6723 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6724 return NULL_TREE;
6726 arg1_unw = get_unwidened (arg1, NULL_TREE);
6728 /* If possible, express the comparison in the shorter mode. */
6729 if ((code == EQ_EXPR || code == NE_EXPR
6730 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6731 && (TREE_TYPE (arg1_unw) == shorter_type
6732 || ((TYPE_PRECISION (shorter_type)
6733 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6734 && (TYPE_UNSIGNED (shorter_type)
6735 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6736 || (TREE_CODE (arg1_unw) == INTEGER_CST
6737 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6738 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6739 && int_fits_type_p (arg1_unw, shorter_type))))
6740 return fold_build2_loc (loc, code, type, arg0_unw,
6741 fold_convert_loc (loc, shorter_type, arg1_unw));
6743 if (TREE_CODE (arg1_unw) != INTEGER_CST
6744 || TREE_CODE (shorter_type) != INTEGER_TYPE
6745 || !int_fits_type_p (arg1_unw, shorter_type))
6746 return NULL_TREE;
6748 /* If we are comparing with the integer that does not fit into the range
6749 of the shorter type, the result is known. */
6750 outer_type = TREE_TYPE (arg1_unw);
6751 min = lower_bound_in_type (outer_type, shorter_type);
6752 max = upper_bound_in_type (outer_type, shorter_type);
6754 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6755 max, arg1_unw));
6756 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6757 arg1_unw, min));
6759 switch (code)
6761 case EQ_EXPR:
6762 if (above || below)
6763 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6764 break;
6766 case NE_EXPR:
6767 if (above || below)
6768 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6769 break;
6771 case LT_EXPR:
6772 case LE_EXPR:
6773 if (above)
6774 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6775 else if (below)
6776 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6778 case GT_EXPR:
6779 case GE_EXPR:
6780 if (above)
6781 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6782 else if (below)
6783 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6785 default:
6786 break;
6789 return NULL_TREE;
6792 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6793 ARG0 just the signedness is changed. */
6795 static tree
6796 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6797 tree arg0, tree arg1)
6799 tree arg0_inner;
6800 tree inner_type, outer_type;
6802 if (!CONVERT_EXPR_P (arg0))
6803 return NULL_TREE;
6805 outer_type = TREE_TYPE (arg0);
6806 arg0_inner = TREE_OPERAND (arg0, 0);
6807 inner_type = TREE_TYPE (arg0_inner);
6809 #ifdef HAVE_canonicalize_funcptr_for_compare
6810 /* Disable this optimization if we're casting a function pointer
6811 type on targets that require function pointer canonicalization. */
6812 if (HAVE_canonicalize_funcptr_for_compare
6813 && TREE_CODE (inner_type) == POINTER_TYPE
6814 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6815 return NULL_TREE;
6816 #endif
6818 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6819 return NULL_TREE;
6821 if (TREE_CODE (arg1) != INTEGER_CST
6822 && !(CONVERT_EXPR_P (arg1)
6823 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6824 return NULL_TREE;
6826 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6827 && code != NE_EXPR
6828 && code != EQ_EXPR)
6829 return NULL_TREE;
6831 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6832 return NULL_TREE;
6834 if (TREE_CODE (arg1) == INTEGER_CST)
6835 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6836 TREE_OVERFLOW (arg1));
6837 else
6838 arg1 = fold_convert_loc (loc, inner_type, arg1);
6840 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6844 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6845 means A >= Y && A != MAX, but in this case we know that
6846 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6848 static tree
6849 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6851 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6853 if (TREE_CODE (bound) == LT_EXPR)
6854 a = TREE_OPERAND (bound, 0);
6855 else if (TREE_CODE (bound) == GT_EXPR)
6856 a = TREE_OPERAND (bound, 1);
6857 else
6858 return NULL_TREE;
6860 typea = TREE_TYPE (a);
6861 if (!INTEGRAL_TYPE_P (typea)
6862 && !POINTER_TYPE_P (typea))
6863 return NULL_TREE;
6865 if (TREE_CODE (ineq) == LT_EXPR)
6867 a1 = TREE_OPERAND (ineq, 1);
6868 y = TREE_OPERAND (ineq, 0);
6870 else if (TREE_CODE (ineq) == GT_EXPR)
6872 a1 = TREE_OPERAND (ineq, 0);
6873 y = TREE_OPERAND (ineq, 1);
6875 else
6876 return NULL_TREE;
6878 if (TREE_TYPE (a1) != typea)
6879 return NULL_TREE;
6881 if (POINTER_TYPE_P (typea))
6883 /* Convert the pointer types into integer before taking the difference. */
6884 tree ta = fold_convert_loc (loc, ssizetype, a);
6885 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6886 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6888 else
6889 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6891 if (!diff || !integer_onep (diff))
6892 return NULL_TREE;
6894 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6897 /* Fold a sum or difference of at least one multiplication.
6898 Returns the folded tree or NULL if no simplification could be made. */
6900 static tree
6901 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6902 tree arg0, tree arg1)
6904 tree arg00, arg01, arg10, arg11;
6905 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6907 /* (A * C) +- (B * C) -> (A+-B) * C.
6908 (A * C) +- A -> A * (C+-1).
6909 We are most concerned about the case where C is a constant,
6910 but other combinations show up during loop reduction. Since
6911 it is not difficult, try all four possibilities. */
6913 if (TREE_CODE (arg0) == MULT_EXPR)
6915 arg00 = TREE_OPERAND (arg0, 0);
6916 arg01 = TREE_OPERAND (arg0, 1);
6918 else if (TREE_CODE (arg0) == INTEGER_CST)
6920 arg00 = build_one_cst (type);
6921 arg01 = arg0;
6923 else
6925 /* We cannot generate constant 1 for fract. */
6926 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6927 return NULL_TREE;
6928 arg00 = arg0;
6929 arg01 = build_one_cst (type);
6931 if (TREE_CODE (arg1) == MULT_EXPR)
6933 arg10 = TREE_OPERAND (arg1, 0);
6934 arg11 = TREE_OPERAND (arg1, 1);
6936 else if (TREE_CODE (arg1) == INTEGER_CST)
6938 arg10 = build_one_cst (type);
6939 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6940 the purpose of this canonicalization. */
6941 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6942 && negate_expr_p (arg1)
6943 && code == PLUS_EXPR)
6945 arg11 = negate_expr (arg1);
6946 code = MINUS_EXPR;
6948 else
6949 arg11 = arg1;
6951 else
6953 /* We cannot generate constant 1 for fract. */
6954 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6955 return NULL_TREE;
6956 arg10 = arg1;
6957 arg11 = build_one_cst (type);
6959 same = NULL_TREE;
6961 if (operand_equal_p (arg01, arg11, 0))
6962 same = arg01, alt0 = arg00, alt1 = arg10;
6963 else if (operand_equal_p (arg00, arg10, 0))
6964 same = arg00, alt0 = arg01, alt1 = arg11;
6965 else if (operand_equal_p (arg00, arg11, 0))
6966 same = arg00, alt0 = arg01, alt1 = arg10;
6967 else if (operand_equal_p (arg01, arg10, 0))
6968 same = arg01, alt0 = arg00, alt1 = arg11;
6970 /* No identical multiplicands; see if we can find a common
6971 power-of-two factor in non-power-of-two multiplies. This
6972 can help in multi-dimensional array access. */
6973 else if (tree_fits_shwi_p (arg01)
6974 && tree_fits_shwi_p (arg11))
6976 HOST_WIDE_INT int01, int11, tmp;
6977 bool swap = false;
6978 tree maybe_same;
6979 int01 = tree_to_shwi (arg01);
6980 int11 = tree_to_shwi (arg11);
6982 /* Move min of absolute values to int11. */
6983 if (absu_hwi (int01) < absu_hwi (int11))
6985 tmp = int01, int01 = int11, int11 = tmp;
6986 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6987 maybe_same = arg01;
6988 swap = true;
6990 else
6991 maybe_same = arg11;
6993 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6994 /* The remainder should not be a constant, otherwise we
6995 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6996 increased the number of multiplications necessary. */
6997 && TREE_CODE (arg10) != INTEGER_CST)
6999 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7000 build_int_cst (TREE_TYPE (arg00),
7001 int01 / int11));
7002 alt1 = arg10;
7003 same = maybe_same;
7004 if (swap)
7005 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7009 if (same)
7010 return fold_build2_loc (loc, MULT_EXPR, type,
7011 fold_build2_loc (loc, code, type,
7012 fold_convert_loc (loc, type, alt0),
7013 fold_convert_loc (loc, type, alt1)),
7014 fold_convert_loc (loc, type, same));
7016 return NULL_TREE;
7019 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7020 specified by EXPR into the buffer PTR of length LEN bytes.
7021 Return the number of bytes placed in the buffer, or zero
7022 upon failure. */
7024 static int
7025 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7027 tree type = TREE_TYPE (expr);
7028 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7029 int byte, offset, word, words;
7030 unsigned char value;
7032 if ((off == -1 && total_bytes > len)
7033 || off >= total_bytes)
7034 return 0;
7035 if (off == -1)
7036 off = 0;
7037 words = total_bytes / UNITS_PER_WORD;
7039 for (byte = 0; byte < total_bytes; byte++)
7041 int bitpos = byte * BITS_PER_UNIT;
7042 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7043 number of bytes. */
7044 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7046 if (total_bytes > UNITS_PER_WORD)
7048 word = byte / UNITS_PER_WORD;
7049 if (WORDS_BIG_ENDIAN)
7050 word = (words - 1) - word;
7051 offset = word * UNITS_PER_WORD;
7052 if (BYTES_BIG_ENDIAN)
7053 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7054 else
7055 offset += byte % UNITS_PER_WORD;
7057 else
7058 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7059 if (offset >= off
7060 && offset - off < len)
7061 ptr[offset - off] = value;
7063 return MIN (len, total_bytes - off);
7067 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7068 specified by EXPR into the buffer PTR of length LEN bytes.
7069 Return the number of bytes placed in the buffer, or zero
7070 upon failure. */
7072 static int
7073 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7075 tree type = TREE_TYPE (expr);
7076 machine_mode mode = TYPE_MODE (type);
7077 int total_bytes = GET_MODE_SIZE (mode);
7078 FIXED_VALUE_TYPE value;
7079 tree i_value, i_type;
7081 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7082 return 0;
7084 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7086 if (NULL_TREE == i_type
7087 || TYPE_PRECISION (i_type) != total_bytes)
7088 return 0;
7090 value = TREE_FIXED_CST (expr);
7091 i_value = double_int_to_tree (i_type, value.data);
7093 return native_encode_int (i_value, ptr, len, off);
7097 /* Subroutine of native_encode_expr. Encode the REAL_CST
7098 specified by EXPR into the buffer PTR of length LEN bytes.
7099 Return the number of bytes placed in the buffer, or zero
7100 upon failure. */
7102 static int
7103 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7105 tree type = TREE_TYPE (expr);
7106 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7107 int byte, offset, word, words, bitpos;
7108 unsigned char value;
7110 /* There are always 32 bits in each long, no matter the size of
7111 the hosts long. We handle floating point representations with
7112 up to 192 bits. */
7113 long tmp[6];
7115 if ((off == -1 && total_bytes > len)
7116 || off >= total_bytes)
7117 return 0;
7118 if (off == -1)
7119 off = 0;
7120 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7122 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7124 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7125 bitpos += BITS_PER_UNIT)
7127 byte = (bitpos / BITS_PER_UNIT) & 3;
7128 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7130 if (UNITS_PER_WORD < 4)
7132 word = byte / UNITS_PER_WORD;
7133 if (WORDS_BIG_ENDIAN)
7134 word = (words - 1) - word;
7135 offset = word * UNITS_PER_WORD;
7136 if (BYTES_BIG_ENDIAN)
7137 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7138 else
7139 offset += byte % UNITS_PER_WORD;
7141 else
7142 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7143 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7144 if (offset >= off
7145 && offset - off < len)
7146 ptr[offset - off] = value;
7148 return MIN (len, total_bytes - off);
7151 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7152 specified by EXPR into the buffer PTR of length LEN bytes.
7153 Return the number of bytes placed in the buffer, or zero
7154 upon failure. */
7156 static int
7157 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7159 int rsize, isize;
7160 tree part;
7162 part = TREE_REALPART (expr);
7163 rsize = native_encode_expr (part, ptr, len, off);
7164 if (off == -1
7165 && rsize == 0)
7166 return 0;
7167 part = TREE_IMAGPART (expr);
7168 if (off != -1)
7169 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7170 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7171 if (off == -1
7172 && isize != rsize)
7173 return 0;
7174 return rsize + isize;
7178 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7179 specified by EXPR into the buffer PTR of length LEN bytes.
7180 Return the number of bytes placed in the buffer, or zero
7181 upon failure. */
7183 static int
7184 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7186 unsigned i, count;
7187 int size, offset;
7188 tree itype, elem;
7190 offset = 0;
7191 count = VECTOR_CST_NELTS (expr);
7192 itype = TREE_TYPE (TREE_TYPE (expr));
7193 size = GET_MODE_SIZE (TYPE_MODE (itype));
7194 for (i = 0; i < count; i++)
7196 if (off >= size)
7198 off -= size;
7199 continue;
7201 elem = VECTOR_CST_ELT (expr, i);
7202 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7203 if ((off == -1 && res != size)
7204 || res == 0)
7205 return 0;
7206 offset += res;
7207 if (offset >= len)
7208 return offset;
7209 if (off != -1)
7210 off = 0;
7212 return offset;
7216 /* Subroutine of native_encode_expr. Encode the STRING_CST
7217 specified by EXPR into the buffer PTR of length LEN bytes.
7218 Return the number of bytes placed in the buffer, or zero
7219 upon failure. */
7221 static int
7222 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7224 tree type = TREE_TYPE (expr);
7225 HOST_WIDE_INT total_bytes;
7227 if (TREE_CODE (type) != ARRAY_TYPE
7228 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7229 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7230 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7231 return 0;
7232 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7233 if ((off == -1 && total_bytes > len)
7234 || off >= total_bytes)
7235 return 0;
7236 if (off == -1)
7237 off = 0;
7238 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7240 int written = 0;
7241 if (off < TREE_STRING_LENGTH (expr))
7243 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7244 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7246 memset (ptr + written, 0,
7247 MIN (total_bytes - written, len - written));
7249 else
7250 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7251 return MIN (total_bytes - off, len);
7255 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7256 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7257 buffer PTR of length LEN bytes. If OFF is not -1 then start
7258 the encoding at byte offset OFF and encode at most LEN bytes.
7259 Return the number of bytes placed in the buffer, or zero upon failure. */
7262 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7264 switch (TREE_CODE (expr))
7266 case INTEGER_CST:
7267 return native_encode_int (expr, ptr, len, off);
7269 case REAL_CST:
7270 return native_encode_real (expr, ptr, len, off);
7272 case FIXED_CST:
7273 return native_encode_fixed (expr, ptr, len, off);
7275 case COMPLEX_CST:
7276 return native_encode_complex (expr, ptr, len, off);
7278 case VECTOR_CST:
7279 return native_encode_vector (expr, ptr, len, off);
7281 case STRING_CST:
7282 return native_encode_string (expr, ptr, len, off);
7284 default:
7285 return 0;
7290 /* Subroutine of native_interpret_expr. Interpret the contents of
7291 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7292 If the buffer cannot be interpreted, return NULL_TREE. */
7294 static tree
7295 native_interpret_int (tree type, const unsigned char *ptr, int len)
7297 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7299 if (total_bytes > len
7300 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7301 return NULL_TREE;
7303 wide_int result = wi::from_buffer (ptr, total_bytes);
7305 return wide_int_to_tree (type, result);
7309 /* Subroutine of native_interpret_expr. Interpret the contents of
7310 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7311 If the buffer cannot be interpreted, return NULL_TREE. */
7313 static tree
7314 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7316 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7317 double_int result;
7318 FIXED_VALUE_TYPE fixed_value;
7320 if (total_bytes > len
7321 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7322 return NULL_TREE;
7324 result = double_int::from_buffer (ptr, total_bytes);
7325 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7327 return build_fixed (type, fixed_value);
7331 /* Subroutine of native_interpret_expr. Interpret the contents of
7332 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7333 If the buffer cannot be interpreted, return NULL_TREE. */
7335 static tree
7336 native_interpret_real (tree type, const unsigned char *ptr, int len)
7338 machine_mode mode = TYPE_MODE (type);
7339 int total_bytes = GET_MODE_SIZE (mode);
7340 int byte, offset, word, words, bitpos;
7341 unsigned char value;
7342 /* There are always 32 bits in each long, no matter the size of
7343 the hosts long. We handle floating point representations with
7344 up to 192 bits. */
7345 REAL_VALUE_TYPE r;
7346 long tmp[6];
7348 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7349 if (total_bytes > len || total_bytes > 24)
7350 return NULL_TREE;
7351 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7353 memset (tmp, 0, sizeof (tmp));
7354 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7355 bitpos += BITS_PER_UNIT)
7357 byte = (bitpos / BITS_PER_UNIT) & 3;
7358 if (UNITS_PER_WORD < 4)
7360 word = byte / UNITS_PER_WORD;
7361 if (WORDS_BIG_ENDIAN)
7362 word = (words - 1) - word;
7363 offset = word * UNITS_PER_WORD;
7364 if (BYTES_BIG_ENDIAN)
7365 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7366 else
7367 offset += byte % UNITS_PER_WORD;
7369 else
7370 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7371 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7373 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7376 real_from_target (&r, tmp, mode);
7377 return build_real (type, r);
7381 /* Subroutine of native_interpret_expr. Interpret the contents of
7382 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7383 If the buffer cannot be interpreted, return NULL_TREE. */
7385 static tree
7386 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7388 tree etype, rpart, ipart;
7389 int size;
7391 etype = TREE_TYPE (type);
7392 size = GET_MODE_SIZE (TYPE_MODE (etype));
7393 if (size * 2 > len)
7394 return NULL_TREE;
7395 rpart = native_interpret_expr (etype, ptr, size);
7396 if (!rpart)
7397 return NULL_TREE;
7398 ipart = native_interpret_expr (etype, ptr+size, size);
7399 if (!ipart)
7400 return NULL_TREE;
7401 return build_complex (type, rpart, ipart);
7405 /* Subroutine of native_interpret_expr. Interpret the contents of
7406 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7407 If the buffer cannot be interpreted, return NULL_TREE. */
7409 static tree
7410 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7412 tree etype, elem;
7413 int i, size, count;
7414 tree *elements;
7416 etype = TREE_TYPE (type);
7417 size = GET_MODE_SIZE (TYPE_MODE (etype));
7418 count = TYPE_VECTOR_SUBPARTS (type);
7419 if (size * count > len)
7420 return NULL_TREE;
7422 elements = XALLOCAVEC (tree, count);
7423 for (i = count - 1; i >= 0; i--)
7425 elem = native_interpret_expr (etype, ptr+(i*size), size);
7426 if (!elem)
7427 return NULL_TREE;
7428 elements[i] = elem;
7430 return build_vector (type, elements);
7434 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7435 the buffer PTR of length LEN as a constant of type TYPE. For
7436 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7437 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7438 return NULL_TREE. */
7440 tree
7441 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7443 switch (TREE_CODE (type))
7445 case INTEGER_TYPE:
7446 case ENUMERAL_TYPE:
7447 case BOOLEAN_TYPE:
7448 case POINTER_TYPE:
7449 case REFERENCE_TYPE:
7450 return native_interpret_int (type, ptr, len);
7452 case REAL_TYPE:
7453 return native_interpret_real (type, ptr, len);
7455 case FIXED_POINT_TYPE:
7456 return native_interpret_fixed (type, ptr, len);
7458 case COMPLEX_TYPE:
7459 return native_interpret_complex (type, ptr, len);
7461 case VECTOR_TYPE:
7462 return native_interpret_vector (type, ptr, len);
7464 default:
7465 return NULL_TREE;
7469 /* Returns true if we can interpret the contents of a native encoding
7470 as TYPE. */
7472 static bool
7473 can_native_interpret_type_p (tree type)
7475 switch (TREE_CODE (type))
7477 case INTEGER_TYPE:
7478 case ENUMERAL_TYPE:
7479 case BOOLEAN_TYPE:
7480 case POINTER_TYPE:
7481 case REFERENCE_TYPE:
7482 case FIXED_POINT_TYPE:
7483 case REAL_TYPE:
7484 case COMPLEX_TYPE:
7485 case VECTOR_TYPE:
7486 return true;
7487 default:
7488 return false;
7492 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7493 TYPE at compile-time. If we're unable to perform the conversion
7494 return NULL_TREE. */
7496 static tree
7497 fold_view_convert_expr (tree type, tree expr)
7499 /* We support up to 512-bit values (for V8DFmode). */
7500 unsigned char buffer[64];
7501 int len;
7503 /* Check that the host and target are sane. */
7504 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7505 return NULL_TREE;
7507 len = native_encode_expr (expr, buffer, sizeof (buffer));
7508 if (len == 0)
7509 return NULL_TREE;
7511 return native_interpret_expr (type, buffer, len);
7514 /* Build an expression for the address of T. Folds away INDIRECT_REF
7515 to avoid confusing the gimplify process. */
7517 tree
7518 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7520 /* The size of the object is not relevant when talking about its address. */
7521 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7522 t = TREE_OPERAND (t, 0);
7524 if (TREE_CODE (t) == INDIRECT_REF)
7526 t = TREE_OPERAND (t, 0);
7528 if (TREE_TYPE (t) != ptrtype)
7529 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7531 else if (TREE_CODE (t) == MEM_REF
7532 && integer_zerop (TREE_OPERAND (t, 1)))
7533 return TREE_OPERAND (t, 0);
7534 else if (TREE_CODE (t) == MEM_REF
7535 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7536 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7537 TREE_OPERAND (t, 0),
7538 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7539 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7541 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7543 if (TREE_TYPE (t) != ptrtype)
7544 t = fold_convert_loc (loc, ptrtype, t);
7546 else
7547 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7549 return t;
7552 /* Build an expression for the address of T. */
7554 tree
7555 build_fold_addr_expr_loc (location_t loc, tree t)
7557 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7559 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7562 static bool vec_cst_ctor_to_array (tree, tree *);
7564 /* Fold a unary expression of code CODE and type TYPE with operand
7565 OP0. Return the folded expression if folding is successful.
7566 Otherwise, return NULL_TREE. */
7568 tree
7569 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7571 tree tem;
7572 tree arg0;
7573 enum tree_code_class kind = TREE_CODE_CLASS (code);
7575 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7576 && TREE_CODE_LENGTH (code) == 1);
7578 tem = generic_simplify (loc, code, type, op0);
7579 if (tem)
7580 return tem;
7582 arg0 = op0;
7583 if (arg0)
7585 if (CONVERT_EXPR_CODE_P (code)
7586 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7588 /* Don't use STRIP_NOPS, because signedness of argument type
7589 matters. */
7590 STRIP_SIGN_NOPS (arg0);
7592 else
7594 /* Strip any conversions that don't change the mode. This
7595 is safe for every expression, except for a comparison
7596 expression because its signedness is derived from its
7597 operands.
7599 Note that this is done as an internal manipulation within
7600 the constant folder, in order to find the simplest
7601 representation of the arguments so that their form can be
7602 studied. In any cases, the appropriate type conversions
7603 should be put back in the tree that will get out of the
7604 constant folder. */
7605 STRIP_NOPS (arg0);
7609 if (TREE_CODE_CLASS (code) == tcc_unary)
7611 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7612 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7613 fold_build1_loc (loc, code, type,
7614 fold_convert_loc (loc, TREE_TYPE (op0),
7615 TREE_OPERAND (arg0, 1))));
7616 else if (TREE_CODE (arg0) == COND_EXPR)
7618 tree arg01 = TREE_OPERAND (arg0, 1);
7619 tree arg02 = TREE_OPERAND (arg0, 2);
7620 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7621 arg01 = fold_build1_loc (loc, code, type,
7622 fold_convert_loc (loc,
7623 TREE_TYPE (op0), arg01));
7624 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7625 arg02 = fold_build1_loc (loc, code, type,
7626 fold_convert_loc (loc,
7627 TREE_TYPE (op0), arg02));
7628 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7629 arg01, arg02);
7631 /* If this was a conversion, and all we did was to move into
7632 inside the COND_EXPR, bring it back out. But leave it if
7633 it is a conversion from integer to integer and the
7634 result precision is no wider than a word since such a
7635 conversion is cheap and may be optimized away by combine,
7636 while it couldn't if it were outside the COND_EXPR. Then return
7637 so we don't get into an infinite recursion loop taking the
7638 conversion out and then back in. */
7640 if ((CONVERT_EXPR_CODE_P (code)
7641 || code == NON_LVALUE_EXPR)
7642 && TREE_CODE (tem) == COND_EXPR
7643 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7644 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7645 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7646 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7647 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7648 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7649 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7650 && (INTEGRAL_TYPE_P
7651 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7652 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7653 || flag_syntax_only))
7654 tem = build1_loc (loc, code, type,
7655 build3 (COND_EXPR,
7656 TREE_TYPE (TREE_OPERAND
7657 (TREE_OPERAND (tem, 1), 0)),
7658 TREE_OPERAND (tem, 0),
7659 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7660 TREE_OPERAND (TREE_OPERAND (tem, 2),
7661 0)));
7662 return tem;
7666 switch (code)
7668 case NON_LVALUE_EXPR:
7669 if (!maybe_lvalue_p (op0))
7670 return fold_convert_loc (loc, type, op0);
7671 return NULL_TREE;
7673 CASE_CONVERT:
7674 case FLOAT_EXPR:
7675 case FIX_TRUNC_EXPR:
7676 if (COMPARISON_CLASS_P (op0))
7678 /* If we have (type) (a CMP b) and type is an integral type, return
7679 new expression involving the new type. Canonicalize
7680 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7681 non-integral type.
7682 Do not fold the result as that would not simplify further, also
7683 folding again results in recursions. */
7684 if (TREE_CODE (type) == BOOLEAN_TYPE)
7685 return build2_loc (loc, TREE_CODE (op0), type,
7686 TREE_OPERAND (op0, 0),
7687 TREE_OPERAND (op0, 1));
7688 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7689 && TREE_CODE (type) != VECTOR_TYPE)
7690 return build3_loc (loc, COND_EXPR, type, op0,
7691 constant_boolean_node (true, type),
7692 constant_boolean_node (false, type));
7695 /* Handle cases of two conversions in a row. */
7696 if (CONVERT_EXPR_P (op0))
7698 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7699 tree inter_type = TREE_TYPE (op0);
7700 int inside_int = INTEGRAL_TYPE_P (inside_type);
7701 int inside_ptr = POINTER_TYPE_P (inside_type);
7702 int inside_float = FLOAT_TYPE_P (inside_type);
7703 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7704 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7705 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7706 int inter_int = INTEGRAL_TYPE_P (inter_type);
7707 int inter_ptr = POINTER_TYPE_P (inter_type);
7708 int inter_float = FLOAT_TYPE_P (inter_type);
7709 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7710 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7711 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7712 int final_int = INTEGRAL_TYPE_P (type);
7713 int final_ptr = POINTER_TYPE_P (type);
7714 int final_float = FLOAT_TYPE_P (type);
7715 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7716 unsigned int final_prec = TYPE_PRECISION (type);
7717 int final_unsignedp = TYPE_UNSIGNED (type);
7719 /* In addition to the cases of two conversions in a row
7720 handled below, if we are converting something to its own
7721 type via an object of identical or wider precision, neither
7722 conversion is needed. */
7723 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7724 && (((inter_int || inter_ptr) && final_int)
7725 || (inter_float && final_float))
7726 && inter_prec >= final_prec)
7727 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7729 /* Likewise, if the intermediate and initial types are either both
7730 float or both integer, we don't need the middle conversion if the
7731 former is wider than the latter and doesn't change the signedness
7732 (for integers). Avoid this if the final type is a pointer since
7733 then we sometimes need the middle conversion. Likewise if the
7734 final type has a precision not equal to the size of its mode. */
7735 if (((inter_int && inside_int)
7736 || (inter_float && inside_float)
7737 || (inter_vec && inside_vec))
7738 && inter_prec >= inside_prec
7739 && (inter_float || inter_vec
7740 || inter_unsignedp == inside_unsignedp)
7741 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7742 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7743 && ! final_ptr
7744 && (! final_vec || inter_prec == inside_prec))
7745 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7747 /* If we have a sign-extension of a zero-extended value, we can
7748 replace that by a single zero-extension. Likewise if the
7749 final conversion does not change precision we can drop the
7750 intermediate conversion. */
7751 if (inside_int && inter_int && final_int
7752 && ((inside_prec < inter_prec && inter_prec < final_prec
7753 && inside_unsignedp && !inter_unsignedp)
7754 || final_prec == inter_prec))
7755 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7757 /* Two conversions in a row are not needed unless:
7758 - some conversion is floating-point (overstrict for now), or
7759 - some conversion is a vector (overstrict for now), or
7760 - the intermediate type is narrower than both initial and
7761 final, or
7762 - the intermediate type and innermost type differ in signedness,
7763 and the outermost type is wider than the intermediate, or
7764 - the initial type is a pointer type and the precisions of the
7765 intermediate and final types differ, or
7766 - the final type is a pointer type and the precisions of the
7767 initial and intermediate types differ. */
7768 if (! inside_float && ! inter_float && ! final_float
7769 && ! inside_vec && ! inter_vec && ! final_vec
7770 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7771 && ! (inside_int && inter_int
7772 && inter_unsignedp != inside_unsignedp
7773 && inter_prec < final_prec)
7774 && ((inter_unsignedp && inter_prec > inside_prec)
7775 == (final_unsignedp && final_prec > inter_prec))
7776 && ! (inside_ptr && inter_prec != final_prec)
7777 && ! (final_ptr && inside_prec != inter_prec)
7778 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7779 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7780 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7783 /* Handle (T *)&A.B.C for A being of type T and B and C
7784 living at offset zero. This occurs frequently in
7785 C++ upcasting and then accessing the base. */
7786 if (TREE_CODE (op0) == ADDR_EXPR
7787 && POINTER_TYPE_P (type)
7788 && handled_component_p (TREE_OPERAND (op0, 0)))
7790 HOST_WIDE_INT bitsize, bitpos;
7791 tree offset;
7792 machine_mode mode;
7793 int unsignedp, volatilep;
7794 tree base = TREE_OPERAND (op0, 0);
7795 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7796 &mode, &unsignedp, &volatilep, false);
7797 /* If the reference was to a (constant) zero offset, we can use
7798 the address of the base if it has the same base type
7799 as the result type and the pointer type is unqualified. */
7800 if (! offset && bitpos == 0
7801 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7802 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7803 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7804 return fold_convert_loc (loc, type,
7805 build_fold_addr_expr_loc (loc, base));
7808 if (TREE_CODE (op0) == MODIFY_EXPR
7809 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7810 /* Detect assigning a bitfield. */
7811 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7812 && DECL_BIT_FIELD
7813 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7815 /* Don't leave an assignment inside a conversion
7816 unless assigning a bitfield. */
7817 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7818 /* First do the assignment, then return converted constant. */
7819 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7820 TREE_NO_WARNING (tem) = 1;
7821 TREE_USED (tem) = 1;
7822 return tem;
7825 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7826 constants (if x has signed type, the sign bit cannot be set
7827 in c). This folds extension into the BIT_AND_EXPR.
7828 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7829 very likely don't have maximal range for their precision and this
7830 transformation effectively doesn't preserve non-maximal ranges. */
7831 if (TREE_CODE (type) == INTEGER_TYPE
7832 && TREE_CODE (op0) == BIT_AND_EXPR
7833 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7835 tree and_expr = op0;
7836 tree and0 = TREE_OPERAND (and_expr, 0);
7837 tree and1 = TREE_OPERAND (and_expr, 1);
7838 int change = 0;
7840 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7841 || (TYPE_PRECISION (type)
7842 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7843 change = 1;
7844 else if (TYPE_PRECISION (TREE_TYPE (and1))
7845 <= HOST_BITS_PER_WIDE_INT
7846 && tree_fits_uhwi_p (and1))
7848 unsigned HOST_WIDE_INT cst;
7850 cst = tree_to_uhwi (and1);
7851 cst &= HOST_WIDE_INT_M1U
7852 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7853 change = (cst == 0);
7854 #ifdef LOAD_EXTEND_OP
7855 if (change
7856 && !flag_syntax_only
7857 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7858 == ZERO_EXTEND))
7860 tree uns = unsigned_type_for (TREE_TYPE (and0));
7861 and0 = fold_convert_loc (loc, uns, and0);
7862 and1 = fold_convert_loc (loc, uns, and1);
7864 #endif
7866 if (change)
7868 tem = force_fit_type (type, wi::to_widest (and1), 0,
7869 TREE_OVERFLOW (and1));
7870 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7871 fold_convert_loc (loc, type, and0), tem);
7875 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7876 when one of the new casts will fold away. Conservatively we assume
7877 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7878 if (POINTER_TYPE_P (type)
7879 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7880 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7881 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7882 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7883 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7885 tree arg00 = TREE_OPERAND (arg0, 0);
7886 tree arg01 = TREE_OPERAND (arg0, 1);
7888 return fold_build_pointer_plus_loc
7889 (loc, fold_convert_loc (loc, type, arg00), arg01);
7892 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7893 of the same precision, and X is an integer type not narrower than
7894 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7895 if (INTEGRAL_TYPE_P (type)
7896 && TREE_CODE (op0) == BIT_NOT_EXPR
7897 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7898 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7899 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7901 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7902 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7903 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7904 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7905 fold_convert_loc (loc, type, tem));
7908 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7909 type of X and Y (integer types only). */
7910 if (INTEGRAL_TYPE_P (type)
7911 && TREE_CODE (op0) == MULT_EXPR
7912 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7913 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7915 /* Be careful not to introduce new overflows. */
7916 tree mult_type;
7917 if (TYPE_OVERFLOW_WRAPS (type))
7918 mult_type = type;
7919 else
7920 mult_type = unsigned_type_for (type);
7922 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7924 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7925 fold_convert_loc (loc, mult_type,
7926 TREE_OPERAND (op0, 0)),
7927 fold_convert_loc (loc, mult_type,
7928 TREE_OPERAND (op0, 1)));
7929 return fold_convert_loc (loc, type, tem);
7933 tem = fold_convert_const (code, type, arg0);
7934 return tem ? tem : NULL_TREE;
7936 case ADDR_SPACE_CONVERT_EXPR:
7937 if (integer_zerop (arg0))
7938 return fold_convert_const (code, type, arg0);
7939 return NULL_TREE;
7941 case FIXED_CONVERT_EXPR:
7942 tem = fold_convert_const (code, type, arg0);
7943 return tem ? tem : NULL_TREE;
7945 case VIEW_CONVERT_EXPR:
7946 if (TREE_CODE (op0) == MEM_REF)
7947 return fold_build2_loc (loc, MEM_REF, type,
7948 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7950 return fold_view_convert_expr (type, op0);
7952 case NEGATE_EXPR:
7953 tem = fold_negate_expr (loc, arg0);
7954 if (tem)
7955 return fold_convert_loc (loc, type, tem);
7956 return NULL_TREE;
7958 case ABS_EXPR:
7959 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7960 return fold_abs_const (arg0, type);
7961 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7962 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7963 /* Convert fabs((double)float) into (double)fabsf(float). */
7964 else if (TREE_CODE (arg0) == NOP_EXPR
7965 && TREE_CODE (type) == REAL_TYPE)
7967 tree targ0 = strip_float_extensions (arg0);
7968 if (targ0 != arg0)
7969 return fold_convert_loc (loc, type,
7970 fold_build1_loc (loc, ABS_EXPR,
7971 TREE_TYPE (targ0),
7972 targ0));
7974 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7975 else if (TREE_CODE (arg0) == ABS_EXPR)
7976 return arg0;
7977 else if (tree_expr_nonnegative_p (arg0))
7978 return arg0;
7980 /* Strip sign ops from argument. */
7981 if (TREE_CODE (type) == REAL_TYPE)
7983 tem = fold_strip_sign_ops (arg0);
7984 if (tem)
7985 return fold_build1_loc (loc, ABS_EXPR, type,
7986 fold_convert_loc (loc, type, tem));
7988 return NULL_TREE;
7990 case CONJ_EXPR:
7991 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7992 return fold_convert_loc (loc, type, arg0);
7993 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7995 tree itype = TREE_TYPE (type);
7996 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
7997 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
7998 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
7999 negate_expr (ipart));
8001 if (TREE_CODE (arg0) == COMPLEX_CST)
8003 tree itype = TREE_TYPE (type);
8004 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8005 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8006 return build_complex (type, rpart, negate_expr (ipart));
8008 if (TREE_CODE (arg0) == CONJ_EXPR)
8009 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8010 return NULL_TREE;
8012 case BIT_NOT_EXPR:
8013 if (TREE_CODE (arg0) == INTEGER_CST)
8014 return fold_not_const (arg0, type);
8015 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8016 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8017 /* Convert ~ (-A) to A - 1. */
8018 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8019 return fold_build2_loc (loc, MINUS_EXPR, type,
8020 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8021 build_int_cst (type, 1));
8022 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8023 else if (INTEGRAL_TYPE_P (type)
8024 && ((TREE_CODE (arg0) == MINUS_EXPR
8025 && integer_onep (TREE_OPERAND (arg0, 1)))
8026 || (TREE_CODE (arg0) == PLUS_EXPR
8027 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8028 return fold_build1_loc (loc, NEGATE_EXPR, type,
8029 fold_convert_loc (loc, type,
8030 TREE_OPERAND (arg0, 0)));
8031 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8032 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8033 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8034 fold_convert_loc (loc, type,
8035 TREE_OPERAND (arg0, 0)))))
8036 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8037 fold_convert_loc (loc, type,
8038 TREE_OPERAND (arg0, 1)));
8039 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8040 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8041 fold_convert_loc (loc, type,
8042 TREE_OPERAND (arg0, 1)))))
8043 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8044 fold_convert_loc (loc, type,
8045 TREE_OPERAND (arg0, 0)), tem);
8046 /* Perform BIT_NOT_EXPR on each element individually. */
8047 else if (TREE_CODE (arg0) == VECTOR_CST)
8049 tree *elements;
8050 tree elem;
8051 unsigned count = VECTOR_CST_NELTS (arg0), i;
8053 elements = XALLOCAVEC (tree, count);
8054 for (i = 0; i < count; i++)
8056 elem = VECTOR_CST_ELT (arg0, i);
8057 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8058 if (elem == NULL_TREE)
8059 break;
8060 elements[i] = elem;
8062 if (i == count)
8063 return build_vector (type, elements);
8065 else if (COMPARISON_CLASS_P (arg0)
8066 && (VECTOR_TYPE_P (type)
8067 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8069 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8070 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8071 HONOR_NANS (TYPE_MODE (op_type)));
8072 if (subcode != ERROR_MARK)
8073 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8074 TREE_OPERAND (arg0, 1));
8078 return NULL_TREE;
8080 case TRUTH_NOT_EXPR:
8081 /* Note that the operand of this must be an int
8082 and its values must be 0 or 1.
8083 ("true" is a fixed value perhaps depending on the language,
8084 but we don't handle values other than 1 correctly yet.) */
8085 tem = fold_truth_not_expr (loc, arg0);
8086 if (!tem)
8087 return NULL_TREE;
8088 return fold_convert_loc (loc, type, tem);
8090 case REALPART_EXPR:
8091 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8092 return fold_convert_loc (loc, type, arg0);
8093 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8094 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8095 TREE_OPERAND (arg0, 1));
8096 if (TREE_CODE (arg0) == COMPLEX_CST)
8097 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8098 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8100 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8101 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8102 fold_build1_loc (loc, REALPART_EXPR, itype,
8103 TREE_OPERAND (arg0, 0)),
8104 fold_build1_loc (loc, REALPART_EXPR, itype,
8105 TREE_OPERAND (arg0, 1)));
8106 return fold_convert_loc (loc, type, tem);
8108 if (TREE_CODE (arg0) == CONJ_EXPR)
8110 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8111 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8112 TREE_OPERAND (arg0, 0));
8113 return fold_convert_loc (loc, type, tem);
8115 if (TREE_CODE (arg0) == CALL_EXPR)
8117 tree fn = get_callee_fndecl (arg0);
8118 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8119 switch (DECL_FUNCTION_CODE (fn))
8121 CASE_FLT_FN (BUILT_IN_CEXPI):
8122 fn = mathfn_built_in (type, BUILT_IN_COS);
8123 if (fn)
8124 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8125 break;
8127 default:
8128 break;
8131 return NULL_TREE;
8133 case IMAGPART_EXPR:
8134 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8135 return build_zero_cst (type);
8136 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8137 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8138 TREE_OPERAND (arg0, 0));
8139 if (TREE_CODE (arg0) == COMPLEX_CST)
8140 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8141 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8143 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8144 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8145 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8146 TREE_OPERAND (arg0, 0)),
8147 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8148 TREE_OPERAND (arg0, 1)));
8149 return fold_convert_loc (loc, type, tem);
8151 if (TREE_CODE (arg0) == CONJ_EXPR)
8153 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8154 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8155 return fold_convert_loc (loc, type, negate_expr (tem));
8157 if (TREE_CODE (arg0) == CALL_EXPR)
8159 tree fn = get_callee_fndecl (arg0);
8160 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8161 switch (DECL_FUNCTION_CODE (fn))
8163 CASE_FLT_FN (BUILT_IN_CEXPI):
8164 fn = mathfn_built_in (type, BUILT_IN_SIN);
8165 if (fn)
8166 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8167 break;
8169 default:
8170 break;
8173 return NULL_TREE;
8175 case INDIRECT_REF:
8176 /* Fold *&X to X if X is an lvalue. */
8177 if (TREE_CODE (op0) == ADDR_EXPR)
8179 tree op00 = TREE_OPERAND (op0, 0);
8180 if ((TREE_CODE (op00) == VAR_DECL
8181 || TREE_CODE (op00) == PARM_DECL
8182 || TREE_CODE (op00) == RESULT_DECL)
8183 && !TREE_READONLY (op00))
8184 return op00;
8186 return NULL_TREE;
8188 case VEC_UNPACK_LO_EXPR:
8189 case VEC_UNPACK_HI_EXPR:
8190 case VEC_UNPACK_FLOAT_LO_EXPR:
8191 case VEC_UNPACK_FLOAT_HI_EXPR:
8193 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8194 tree *elts;
8195 enum tree_code subcode;
8197 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8198 if (TREE_CODE (arg0) != VECTOR_CST)
8199 return NULL_TREE;
8201 elts = XALLOCAVEC (tree, nelts * 2);
8202 if (!vec_cst_ctor_to_array (arg0, elts))
8203 return NULL_TREE;
8205 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8206 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8207 elts += nelts;
8209 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8210 subcode = NOP_EXPR;
8211 else
8212 subcode = FLOAT_EXPR;
8214 for (i = 0; i < nelts; i++)
8216 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8217 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8218 return NULL_TREE;
8221 return build_vector (type, elts);
8224 case REDUC_MIN_EXPR:
8225 case REDUC_MAX_EXPR:
8226 case REDUC_PLUS_EXPR:
8228 unsigned int nelts, i;
8229 tree *elts;
8230 enum tree_code subcode;
8232 if (TREE_CODE (op0) != VECTOR_CST)
8233 return NULL_TREE;
8234 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (op0));
8236 elts = XALLOCAVEC (tree, nelts);
8237 if (!vec_cst_ctor_to_array (op0, elts))
8238 return NULL_TREE;
8240 switch (code)
8242 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8243 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8244 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8245 default: gcc_unreachable ();
8248 for (i = 1; i < nelts; i++)
8250 elts[0] = const_binop (subcode, elts[0], elts[i]);
8251 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8252 return NULL_TREE;
8255 return elts[0];
8258 default:
8259 return NULL_TREE;
8260 } /* switch (code) */
8264 /* If the operation was a conversion do _not_ mark a resulting constant
8265 with TREE_OVERFLOW if the original constant was not. These conversions
8266 have implementation defined behavior and retaining the TREE_OVERFLOW
8267 flag here would confuse later passes such as VRP. */
8268 tree
8269 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8270 tree type, tree op0)
8272 tree res = fold_unary_loc (loc, code, type, op0);
8273 if (res
8274 && TREE_CODE (res) == INTEGER_CST
8275 && TREE_CODE (op0) == INTEGER_CST
8276 && CONVERT_EXPR_CODE_P (code))
8277 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8279 return res;
8282 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8283 operands OP0 and OP1. LOC is the location of the resulting expression.
8284 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8285 Return the folded expression if folding is successful. Otherwise,
8286 return NULL_TREE. */
8287 static tree
8288 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8289 tree arg0, tree arg1, tree op0, tree op1)
8291 tree tem;
8293 /* We only do these simplifications if we are optimizing. */
8294 if (!optimize)
8295 return NULL_TREE;
8297 /* Check for things like (A || B) && (A || C). We can convert this
8298 to A || (B && C). Note that either operator can be any of the four
8299 truth and/or operations and the transformation will still be
8300 valid. Also note that we only care about order for the
8301 ANDIF and ORIF operators. If B contains side effects, this
8302 might change the truth-value of A. */
8303 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8304 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8305 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8306 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8307 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8308 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8310 tree a00 = TREE_OPERAND (arg0, 0);
8311 tree a01 = TREE_OPERAND (arg0, 1);
8312 tree a10 = TREE_OPERAND (arg1, 0);
8313 tree a11 = TREE_OPERAND (arg1, 1);
8314 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8315 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8316 && (code == TRUTH_AND_EXPR
8317 || code == TRUTH_OR_EXPR));
8319 if (operand_equal_p (a00, a10, 0))
8320 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8321 fold_build2_loc (loc, code, type, a01, a11));
8322 else if (commutative && operand_equal_p (a00, a11, 0))
8323 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8324 fold_build2_loc (loc, code, type, a01, a10));
8325 else if (commutative && operand_equal_p (a01, a10, 0))
8326 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8327 fold_build2_loc (loc, code, type, a00, a11));
8329 /* This case if tricky because we must either have commutative
8330 operators or else A10 must not have side-effects. */
8332 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8333 && operand_equal_p (a01, a11, 0))
8334 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8335 fold_build2_loc (loc, code, type, a00, a10),
8336 a01);
8339 /* See if we can build a range comparison. */
8340 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8341 return tem;
8343 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8344 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8346 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8347 if (tem)
8348 return fold_build2_loc (loc, code, type, tem, arg1);
8351 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8352 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8354 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8355 if (tem)
8356 return fold_build2_loc (loc, code, type, arg0, tem);
8359 /* Check for the possibility of merging component references. If our
8360 lhs is another similar operation, try to merge its rhs with our
8361 rhs. Then try to merge our lhs and rhs. */
8362 if (TREE_CODE (arg0) == code
8363 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8364 TREE_OPERAND (arg0, 1), arg1)))
8365 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8367 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8368 return tem;
8370 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8371 && (code == TRUTH_AND_EXPR
8372 || code == TRUTH_ANDIF_EXPR
8373 || code == TRUTH_OR_EXPR
8374 || code == TRUTH_ORIF_EXPR))
8376 enum tree_code ncode, icode;
8378 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8379 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8380 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8382 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8383 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8384 We don't want to pack more than two leafs to a non-IF AND/OR
8385 expression.
8386 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8387 equal to IF-CODE, then we don't want to add right-hand operand.
8388 If the inner right-hand side of left-hand operand has
8389 side-effects, or isn't simple, then we can't add to it,
8390 as otherwise we might destroy if-sequence. */
8391 if (TREE_CODE (arg0) == icode
8392 && simple_operand_p_2 (arg1)
8393 /* Needed for sequence points to handle trappings, and
8394 side-effects. */
8395 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8397 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8398 arg1);
8399 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8400 tem);
8402 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8403 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8404 else if (TREE_CODE (arg1) == icode
8405 && simple_operand_p_2 (arg0)
8406 /* Needed for sequence points to handle trappings, and
8407 side-effects. */
8408 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8410 tem = fold_build2_loc (loc, ncode, type,
8411 arg0, TREE_OPERAND (arg1, 0));
8412 return fold_build2_loc (loc, icode, type, tem,
8413 TREE_OPERAND (arg1, 1));
8415 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8416 into (A OR B).
8417 For sequence point consistancy, we need to check for trapping,
8418 and side-effects. */
8419 else if (code == icode && simple_operand_p_2 (arg0)
8420 && simple_operand_p_2 (arg1))
8421 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8424 return NULL_TREE;
8427 /* Fold a binary expression of code CODE and type TYPE with operands
8428 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8429 Return the folded expression if folding is successful. Otherwise,
8430 return NULL_TREE. */
8432 static tree
8433 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8435 enum tree_code compl_code;
8437 if (code == MIN_EXPR)
8438 compl_code = MAX_EXPR;
8439 else if (code == MAX_EXPR)
8440 compl_code = MIN_EXPR;
8441 else
8442 gcc_unreachable ();
8444 /* MIN (MAX (a, b), b) == b. */
8445 if (TREE_CODE (op0) == compl_code
8446 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8447 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8449 /* MIN (MAX (b, a), b) == b. */
8450 if (TREE_CODE (op0) == compl_code
8451 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8452 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8453 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8455 /* MIN (a, MAX (a, b)) == a. */
8456 if (TREE_CODE (op1) == compl_code
8457 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8458 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8459 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8461 /* MIN (a, MAX (b, a)) == a. */
8462 if (TREE_CODE (op1) == compl_code
8463 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8464 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8465 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8467 return NULL_TREE;
8470 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8471 by changing CODE to reduce the magnitude of constants involved in
8472 ARG0 of the comparison.
8473 Returns a canonicalized comparison tree if a simplification was
8474 possible, otherwise returns NULL_TREE.
8475 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8476 valid if signed overflow is undefined. */
8478 static tree
8479 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8480 tree arg0, tree arg1,
8481 bool *strict_overflow_p)
8483 enum tree_code code0 = TREE_CODE (arg0);
8484 tree t, cst0 = NULL_TREE;
8485 int sgn0;
8486 bool swap = false;
8488 /* Match A +- CST code arg1 and CST code arg1. We can change the
8489 first form only if overflow is undefined. */
8490 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8491 /* In principle pointers also have undefined overflow behavior,
8492 but that causes problems elsewhere. */
8493 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8494 && (code0 == MINUS_EXPR
8495 || code0 == PLUS_EXPR)
8496 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8497 || code0 == INTEGER_CST))
8498 return NULL_TREE;
8500 /* Identify the constant in arg0 and its sign. */
8501 if (code0 == INTEGER_CST)
8502 cst0 = arg0;
8503 else
8504 cst0 = TREE_OPERAND (arg0, 1);
8505 sgn0 = tree_int_cst_sgn (cst0);
8507 /* Overflowed constants and zero will cause problems. */
8508 if (integer_zerop (cst0)
8509 || TREE_OVERFLOW (cst0))
8510 return NULL_TREE;
8512 /* See if we can reduce the magnitude of the constant in
8513 arg0 by changing the comparison code. */
8514 if (code0 == INTEGER_CST)
8516 /* CST <= arg1 -> CST-1 < arg1. */
8517 if (code == LE_EXPR && sgn0 == 1)
8518 code = LT_EXPR;
8519 /* -CST < arg1 -> -CST-1 <= arg1. */
8520 else if (code == LT_EXPR && sgn0 == -1)
8521 code = LE_EXPR;
8522 /* CST > arg1 -> CST-1 >= arg1. */
8523 else if (code == GT_EXPR && sgn0 == 1)
8524 code = GE_EXPR;
8525 /* -CST >= arg1 -> -CST-1 > arg1. */
8526 else if (code == GE_EXPR && sgn0 == -1)
8527 code = GT_EXPR;
8528 else
8529 return NULL_TREE;
8530 /* arg1 code' CST' might be more canonical. */
8531 swap = true;
8533 else
8535 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8536 if (code == LT_EXPR
8537 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8538 code = LE_EXPR;
8539 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8540 else if (code == GT_EXPR
8541 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8542 code = GE_EXPR;
8543 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8544 else if (code == LE_EXPR
8545 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8546 code = LT_EXPR;
8547 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8548 else if (code == GE_EXPR
8549 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8550 code = GT_EXPR;
8551 else
8552 return NULL_TREE;
8553 *strict_overflow_p = true;
8556 /* Now build the constant reduced in magnitude. But not if that
8557 would produce one outside of its types range. */
8558 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8559 && ((sgn0 == 1
8560 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8561 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8562 || (sgn0 == -1
8563 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8564 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8565 /* We cannot swap the comparison here as that would cause us to
8566 endlessly recurse. */
8567 return NULL_TREE;
8569 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8570 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8571 if (code0 != INTEGER_CST)
8572 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8573 t = fold_convert (TREE_TYPE (arg1), t);
8575 /* If swapping might yield to a more canonical form, do so. */
8576 if (swap)
8577 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8578 else
8579 return fold_build2_loc (loc, code, type, t, arg1);
8582 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8583 overflow further. Try to decrease the magnitude of constants involved
8584 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8585 and put sole constants at the second argument position.
8586 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8588 static tree
8589 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8590 tree arg0, tree arg1)
8592 tree t;
8593 bool strict_overflow_p;
8594 const char * const warnmsg = G_("assuming signed overflow does not occur "
8595 "when reducing constant in comparison");
8597 /* Try canonicalization by simplifying arg0. */
8598 strict_overflow_p = false;
8599 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8600 &strict_overflow_p);
8601 if (t)
8603 if (strict_overflow_p)
8604 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8605 return t;
8608 /* Try canonicalization by simplifying arg1 using the swapped
8609 comparison. */
8610 code = swap_tree_comparison (code);
8611 strict_overflow_p = false;
8612 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8613 &strict_overflow_p);
8614 if (t && strict_overflow_p)
8615 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8616 return t;
8619 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8620 space. This is used to avoid issuing overflow warnings for
8621 expressions like &p->x which can not wrap. */
8623 static bool
8624 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8626 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8627 return true;
8629 if (bitpos < 0)
8630 return true;
8632 wide_int wi_offset;
8633 int precision = TYPE_PRECISION (TREE_TYPE (base));
8634 if (offset == NULL_TREE)
8635 wi_offset = wi::zero (precision);
8636 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8637 return true;
8638 else
8639 wi_offset = offset;
8641 bool overflow;
8642 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8643 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8644 if (overflow)
8645 return true;
8647 if (!wi::fits_uhwi_p (total))
8648 return true;
8650 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8651 if (size <= 0)
8652 return true;
8654 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8655 array. */
8656 if (TREE_CODE (base) == ADDR_EXPR)
8658 HOST_WIDE_INT base_size;
8660 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8661 if (base_size > 0 && size < base_size)
8662 size = base_size;
8665 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8668 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8669 kind INTEGER_CST. This makes sure to properly sign-extend the
8670 constant. */
8672 static HOST_WIDE_INT
8673 size_low_cst (const_tree t)
8675 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8676 int prec = TYPE_PRECISION (TREE_TYPE (t));
8677 if (prec < HOST_BITS_PER_WIDE_INT)
8678 return sext_hwi (w, prec);
8679 return w;
8682 /* Subroutine of fold_binary. This routine performs all of the
8683 transformations that are common to the equality/inequality
8684 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8685 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8686 fold_binary should call fold_binary. Fold a comparison with
8687 tree code CODE and type TYPE with operands OP0 and OP1. Return
8688 the folded comparison or NULL_TREE. */
8690 static tree
8691 fold_comparison (location_t loc, enum tree_code code, tree type,
8692 tree op0, tree op1)
8694 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8695 tree arg0, arg1, tem;
8697 arg0 = op0;
8698 arg1 = op1;
8700 STRIP_SIGN_NOPS (arg0);
8701 STRIP_SIGN_NOPS (arg1);
8703 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8704 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8705 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8706 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8707 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8708 && TREE_CODE (arg1) == INTEGER_CST
8709 && !TREE_OVERFLOW (arg1))
8711 const enum tree_code
8712 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8713 tree const1 = TREE_OPERAND (arg0, 1);
8714 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8715 tree variable = TREE_OPERAND (arg0, 0);
8716 tree new_const = int_const_binop (reverse_op, const2, const1);
8718 /* If the constant operation overflowed this can be
8719 simplified as a comparison against INT_MAX/INT_MIN. */
8720 if (TREE_OVERFLOW (new_const)
8721 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8723 int const1_sgn = tree_int_cst_sgn (const1);
8724 enum tree_code code2 = code;
8726 /* Get the sign of the constant on the lhs if the
8727 operation were VARIABLE + CONST1. */
8728 if (TREE_CODE (arg0) == MINUS_EXPR)
8729 const1_sgn = -const1_sgn;
8731 /* The sign of the constant determines if we overflowed
8732 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8733 Canonicalize to the INT_MIN overflow by swapping the comparison
8734 if necessary. */
8735 if (const1_sgn == -1)
8736 code2 = swap_tree_comparison (code);
8738 /* We now can look at the canonicalized case
8739 VARIABLE + 1 CODE2 INT_MIN
8740 and decide on the result. */
8741 switch (code2)
8743 case EQ_EXPR:
8744 case LT_EXPR:
8745 case LE_EXPR:
8746 return
8747 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8749 case NE_EXPR:
8750 case GE_EXPR:
8751 case GT_EXPR:
8752 return
8753 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8755 default:
8756 gcc_unreachable ();
8759 else
8761 if (!equality_code)
8762 fold_overflow_warning ("assuming signed overflow does not occur "
8763 "when changing X +- C1 cmp C2 to "
8764 "X cmp C2 -+ C1",
8765 WARN_STRICT_OVERFLOW_COMPARISON);
8766 return fold_build2_loc (loc, code, type, variable, new_const);
8770 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8771 if (TREE_CODE (arg0) == MINUS_EXPR
8772 && equality_code
8773 && integer_zerop (arg1))
8775 /* ??? The transformation is valid for the other operators if overflow
8776 is undefined for the type, but performing it here badly interacts
8777 with the transformation in fold_cond_expr_with_comparison which
8778 attempts to synthetize ABS_EXPR. */
8779 if (!equality_code)
8780 fold_overflow_warning ("assuming signed overflow does not occur "
8781 "when changing X - Y cmp 0 to X cmp Y",
8782 WARN_STRICT_OVERFLOW_COMPARISON);
8783 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8784 TREE_OPERAND (arg0, 1));
8787 /* For comparisons of pointers we can decompose it to a compile time
8788 comparison of the base objects and the offsets into the object.
8789 This requires at least one operand being an ADDR_EXPR or a
8790 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8791 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8792 && (TREE_CODE (arg0) == ADDR_EXPR
8793 || TREE_CODE (arg1) == ADDR_EXPR
8794 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8795 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8797 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8798 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8799 machine_mode mode;
8800 int volatilep, unsignedp;
8801 bool indirect_base0 = false, indirect_base1 = false;
8803 /* Get base and offset for the access. Strip ADDR_EXPR for
8804 get_inner_reference, but put it back by stripping INDIRECT_REF
8805 off the base object if possible. indirect_baseN will be true
8806 if baseN is not an address but refers to the object itself. */
8807 base0 = arg0;
8808 if (TREE_CODE (arg0) == ADDR_EXPR)
8810 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8811 &bitsize, &bitpos0, &offset0, &mode,
8812 &unsignedp, &volatilep, false);
8813 if (TREE_CODE (base0) == INDIRECT_REF)
8814 base0 = TREE_OPERAND (base0, 0);
8815 else
8816 indirect_base0 = true;
8818 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8820 base0 = TREE_OPERAND (arg0, 0);
8821 STRIP_SIGN_NOPS (base0);
8822 if (TREE_CODE (base0) == ADDR_EXPR)
8824 base0 = TREE_OPERAND (base0, 0);
8825 indirect_base0 = true;
8827 offset0 = TREE_OPERAND (arg0, 1);
8828 if (tree_fits_shwi_p (offset0))
8830 HOST_WIDE_INT off = size_low_cst (offset0);
8831 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8832 * BITS_PER_UNIT)
8833 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8835 bitpos0 = off * BITS_PER_UNIT;
8836 offset0 = NULL_TREE;
8841 base1 = arg1;
8842 if (TREE_CODE (arg1) == ADDR_EXPR)
8844 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8845 &bitsize, &bitpos1, &offset1, &mode,
8846 &unsignedp, &volatilep, false);
8847 if (TREE_CODE (base1) == INDIRECT_REF)
8848 base1 = TREE_OPERAND (base1, 0);
8849 else
8850 indirect_base1 = true;
8852 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8854 base1 = TREE_OPERAND (arg1, 0);
8855 STRIP_SIGN_NOPS (base1);
8856 if (TREE_CODE (base1) == ADDR_EXPR)
8858 base1 = TREE_OPERAND (base1, 0);
8859 indirect_base1 = true;
8861 offset1 = TREE_OPERAND (arg1, 1);
8862 if (tree_fits_shwi_p (offset1))
8864 HOST_WIDE_INT off = size_low_cst (offset1);
8865 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8866 * BITS_PER_UNIT)
8867 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8869 bitpos1 = off * BITS_PER_UNIT;
8870 offset1 = NULL_TREE;
8875 /* A local variable can never be pointed to by
8876 the default SSA name of an incoming parameter. */
8877 if ((TREE_CODE (arg0) == ADDR_EXPR
8878 && indirect_base0
8879 && TREE_CODE (base0) == VAR_DECL
8880 && auto_var_in_fn_p (base0, current_function_decl)
8881 && !indirect_base1
8882 && TREE_CODE (base1) == SSA_NAME
8883 && SSA_NAME_IS_DEFAULT_DEF (base1)
8884 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8885 || (TREE_CODE (arg1) == ADDR_EXPR
8886 && indirect_base1
8887 && TREE_CODE (base1) == VAR_DECL
8888 && auto_var_in_fn_p (base1, current_function_decl)
8889 && !indirect_base0
8890 && TREE_CODE (base0) == SSA_NAME
8891 && SSA_NAME_IS_DEFAULT_DEF (base0)
8892 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8894 if (code == NE_EXPR)
8895 return constant_boolean_node (1, type);
8896 else if (code == EQ_EXPR)
8897 return constant_boolean_node (0, type);
8899 /* If we have equivalent bases we might be able to simplify. */
8900 else if (indirect_base0 == indirect_base1
8901 && operand_equal_p (base0, base1, 0))
8903 /* We can fold this expression to a constant if the non-constant
8904 offset parts are equal. */
8905 if ((offset0 == offset1
8906 || (offset0 && offset1
8907 && operand_equal_p (offset0, offset1, 0)))
8908 && (code == EQ_EXPR
8909 || code == NE_EXPR
8910 || (indirect_base0 && DECL_P (base0))
8911 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8914 if (!equality_code
8915 && bitpos0 != bitpos1
8916 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8917 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8918 fold_overflow_warning (("assuming pointer wraparound does not "
8919 "occur when comparing P +- C1 with "
8920 "P +- C2"),
8921 WARN_STRICT_OVERFLOW_CONDITIONAL);
8923 switch (code)
8925 case EQ_EXPR:
8926 return constant_boolean_node (bitpos0 == bitpos1, type);
8927 case NE_EXPR:
8928 return constant_boolean_node (bitpos0 != bitpos1, type);
8929 case LT_EXPR:
8930 return constant_boolean_node (bitpos0 < bitpos1, type);
8931 case LE_EXPR:
8932 return constant_boolean_node (bitpos0 <= bitpos1, type);
8933 case GE_EXPR:
8934 return constant_boolean_node (bitpos0 >= bitpos1, type);
8935 case GT_EXPR:
8936 return constant_boolean_node (bitpos0 > bitpos1, type);
8937 default:;
8940 /* We can simplify the comparison to a comparison of the variable
8941 offset parts if the constant offset parts are equal.
8942 Be careful to use signed sizetype here because otherwise we
8943 mess with array offsets in the wrong way. This is possible
8944 because pointer arithmetic is restricted to retain within an
8945 object and overflow on pointer differences is undefined as of
8946 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8947 else if (bitpos0 == bitpos1
8948 && (equality_code
8949 || (indirect_base0 && DECL_P (base0))
8950 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8952 /* By converting to signed sizetype we cover middle-end pointer
8953 arithmetic which operates on unsigned pointer types of size
8954 type size and ARRAY_REF offsets which are properly sign or
8955 zero extended from their type in case it is narrower than
8956 sizetype. */
8957 if (offset0 == NULL_TREE)
8958 offset0 = build_int_cst (ssizetype, 0);
8959 else
8960 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8961 if (offset1 == NULL_TREE)
8962 offset1 = build_int_cst (ssizetype, 0);
8963 else
8964 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8966 if (!equality_code
8967 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8968 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8969 fold_overflow_warning (("assuming pointer wraparound does not "
8970 "occur when comparing P +- C1 with "
8971 "P +- C2"),
8972 WARN_STRICT_OVERFLOW_COMPARISON);
8974 return fold_build2_loc (loc, code, type, offset0, offset1);
8977 /* For non-equal bases we can simplify if they are addresses
8978 of local binding decls or constants. */
8979 else if (indirect_base0 && indirect_base1
8980 /* We know that !operand_equal_p (base0, base1, 0)
8981 because the if condition was false. But make
8982 sure two decls are not the same. */
8983 && base0 != base1
8984 && TREE_CODE (arg0) == ADDR_EXPR
8985 && TREE_CODE (arg1) == ADDR_EXPR
8986 && (((TREE_CODE (base0) == VAR_DECL
8987 || TREE_CODE (base0) == PARM_DECL)
8988 && (targetm.binds_local_p (base0)
8989 || CONSTANT_CLASS_P (base1)))
8990 || CONSTANT_CLASS_P (base0))
8991 && (((TREE_CODE (base1) == VAR_DECL
8992 || TREE_CODE (base1) == PARM_DECL)
8993 && (targetm.binds_local_p (base1)
8994 || CONSTANT_CLASS_P (base0)))
8995 || CONSTANT_CLASS_P (base1)))
8997 if (code == EQ_EXPR)
8998 return omit_two_operands_loc (loc, type, boolean_false_node,
8999 arg0, arg1);
9000 else if (code == NE_EXPR)
9001 return omit_two_operands_loc (loc, type, boolean_true_node,
9002 arg0, arg1);
9004 /* For equal offsets we can simplify to a comparison of the
9005 base addresses. */
9006 else if (bitpos0 == bitpos1
9007 && (indirect_base0
9008 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9009 && (indirect_base1
9010 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9011 && ((offset0 == offset1)
9012 || (offset0 && offset1
9013 && operand_equal_p (offset0, offset1, 0))))
9015 if (indirect_base0)
9016 base0 = build_fold_addr_expr_loc (loc, base0);
9017 if (indirect_base1)
9018 base1 = build_fold_addr_expr_loc (loc, base1);
9019 return fold_build2_loc (loc, code, type, base0, base1);
9023 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9024 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9025 the resulting offset is smaller in absolute value than the
9026 original one and has the same sign. */
9027 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9028 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9029 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9030 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9031 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9032 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9033 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9035 tree const1 = TREE_OPERAND (arg0, 1);
9036 tree const2 = TREE_OPERAND (arg1, 1);
9037 tree variable1 = TREE_OPERAND (arg0, 0);
9038 tree variable2 = TREE_OPERAND (arg1, 0);
9039 tree cst;
9040 const char * const warnmsg = G_("assuming signed overflow does not "
9041 "occur when combining constants around "
9042 "a comparison");
9044 /* Put the constant on the side where it doesn't overflow and is
9045 of lower absolute value and of same sign than before. */
9046 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9047 ? MINUS_EXPR : PLUS_EXPR,
9048 const2, const1);
9049 if (!TREE_OVERFLOW (cst)
9050 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9051 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9053 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9054 return fold_build2_loc (loc, code, type,
9055 variable1,
9056 fold_build2_loc (loc, TREE_CODE (arg1),
9057 TREE_TYPE (arg1),
9058 variable2, cst));
9061 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9062 ? MINUS_EXPR : PLUS_EXPR,
9063 const1, const2);
9064 if (!TREE_OVERFLOW (cst)
9065 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9066 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9068 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9069 return fold_build2_loc (loc, code, type,
9070 fold_build2_loc (loc, TREE_CODE (arg0),
9071 TREE_TYPE (arg0),
9072 variable1, cst),
9073 variable2);
9077 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9078 signed arithmetic case. That form is created by the compiler
9079 often enough for folding it to be of value. One example is in
9080 computing loop trip counts after Operator Strength Reduction. */
9081 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9082 && TREE_CODE (arg0) == MULT_EXPR
9083 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9084 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9085 && integer_zerop (arg1))
9087 tree const1 = TREE_OPERAND (arg0, 1);
9088 tree const2 = arg1; /* zero */
9089 tree variable1 = TREE_OPERAND (arg0, 0);
9090 enum tree_code cmp_code = code;
9092 /* Handle unfolded multiplication by zero. */
9093 if (integer_zerop (const1))
9094 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9096 fold_overflow_warning (("assuming signed overflow does not occur when "
9097 "eliminating multiplication in comparison "
9098 "with zero"),
9099 WARN_STRICT_OVERFLOW_COMPARISON);
9101 /* If const1 is negative we swap the sense of the comparison. */
9102 if (tree_int_cst_sgn (const1) < 0)
9103 cmp_code = swap_tree_comparison (cmp_code);
9105 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9108 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9109 if (tem)
9110 return tem;
9112 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9114 tree targ0 = strip_float_extensions (arg0);
9115 tree targ1 = strip_float_extensions (arg1);
9116 tree newtype = TREE_TYPE (targ0);
9118 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9119 newtype = TREE_TYPE (targ1);
9121 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9122 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9123 return fold_build2_loc (loc, code, type,
9124 fold_convert_loc (loc, newtype, targ0),
9125 fold_convert_loc (loc, newtype, targ1));
9127 /* (-a) CMP (-b) -> b CMP a */
9128 if (TREE_CODE (arg0) == NEGATE_EXPR
9129 && TREE_CODE (arg1) == NEGATE_EXPR)
9130 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9131 TREE_OPERAND (arg0, 0));
9133 if (TREE_CODE (arg1) == REAL_CST)
9135 REAL_VALUE_TYPE cst;
9136 cst = TREE_REAL_CST (arg1);
9138 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9139 if (TREE_CODE (arg0) == NEGATE_EXPR)
9140 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9141 TREE_OPERAND (arg0, 0),
9142 build_real (TREE_TYPE (arg1),
9143 real_value_negate (&cst)));
9145 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9146 /* a CMP (-0) -> a CMP 0 */
9147 if (REAL_VALUE_MINUS_ZERO (cst))
9148 return fold_build2_loc (loc, code, type, arg0,
9149 build_real (TREE_TYPE (arg1), dconst0));
9151 /* x != NaN is always true, other ops are always false. */
9152 if (REAL_VALUE_ISNAN (cst)
9153 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9155 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9156 return omit_one_operand_loc (loc, type, tem, arg0);
9159 /* Fold comparisons against infinity. */
9160 if (REAL_VALUE_ISINF (cst)
9161 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9163 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9164 if (tem != NULL_TREE)
9165 return tem;
9169 /* If this is a comparison of a real constant with a PLUS_EXPR
9170 or a MINUS_EXPR of a real constant, we can convert it into a
9171 comparison with a revised real constant as long as no overflow
9172 occurs when unsafe_math_optimizations are enabled. */
9173 if (flag_unsafe_math_optimizations
9174 && TREE_CODE (arg1) == REAL_CST
9175 && (TREE_CODE (arg0) == PLUS_EXPR
9176 || TREE_CODE (arg0) == MINUS_EXPR)
9177 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9178 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9179 ? MINUS_EXPR : PLUS_EXPR,
9180 arg1, TREE_OPERAND (arg0, 1)))
9181 && !TREE_OVERFLOW (tem))
9182 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9184 /* Likewise, we can simplify a comparison of a real constant with
9185 a MINUS_EXPR whose first operand is also a real constant, i.e.
9186 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9187 floating-point types only if -fassociative-math is set. */
9188 if (flag_associative_math
9189 && TREE_CODE (arg1) == REAL_CST
9190 && TREE_CODE (arg0) == MINUS_EXPR
9191 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9192 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9193 arg1))
9194 && !TREE_OVERFLOW (tem))
9195 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9196 TREE_OPERAND (arg0, 1), tem);
9198 /* Fold comparisons against built-in math functions. */
9199 if (TREE_CODE (arg1) == REAL_CST
9200 && flag_unsafe_math_optimizations
9201 && ! flag_errno_math)
9203 enum built_in_function fcode = builtin_mathfn_code (arg0);
9205 if (fcode != END_BUILTINS)
9207 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9208 if (tem != NULL_TREE)
9209 return tem;
9214 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9215 && CONVERT_EXPR_P (arg0))
9217 /* If we are widening one operand of an integer comparison,
9218 see if the other operand is similarly being widened. Perhaps we
9219 can do the comparison in the narrower type. */
9220 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9221 if (tem)
9222 return tem;
9224 /* Or if we are changing signedness. */
9225 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9226 if (tem)
9227 return tem;
9230 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9231 constant, we can simplify it. */
9232 if (TREE_CODE (arg1) == INTEGER_CST
9233 && (TREE_CODE (arg0) == MIN_EXPR
9234 || TREE_CODE (arg0) == MAX_EXPR)
9235 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9237 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9238 if (tem)
9239 return tem;
9242 /* Simplify comparison of something with itself. (For IEEE
9243 floating-point, we can only do some of these simplifications.) */
9244 if (operand_equal_p (arg0, arg1, 0))
9246 switch (code)
9248 case EQ_EXPR:
9249 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9250 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9251 return constant_boolean_node (1, type);
9252 break;
9254 case GE_EXPR:
9255 case LE_EXPR:
9256 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9257 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9258 return constant_boolean_node (1, type);
9259 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9261 case NE_EXPR:
9262 /* For NE, we can only do this simplification if integer
9263 or we don't honor IEEE floating point NaNs. */
9264 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9265 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9266 break;
9267 /* ... fall through ... */
9268 case GT_EXPR:
9269 case LT_EXPR:
9270 return constant_boolean_node (0, type);
9271 default:
9272 gcc_unreachable ();
9276 /* If we are comparing an expression that just has comparisons
9277 of two integer values, arithmetic expressions of those comparisons,
9278 and constants, we can simplify it. There are only three cases
9279 to check: the two values can either be equal, the first can be
9280 greater, or the second can be greater. Fold the expression for
9281 those three values. Since each value must be 0 or 1, we have
9282 eight possibilities, each of which corresponds to the constant 0
9283 or 1 or one of the six possible comparisons.
9285 This handles common cases like (a > b) == 0 but also handles
9286 expressions like ((x > y) - (y > x)) > 0, which supposedly
9287 occur in macroized code. */
9289 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9291 tree cval1 = 0, cval2 = 0;
9292 int save_p = 0;
9294 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9295 /* Don't handle degenerate cases here; they should already
9296 have been handled anyway. */
9297 && cval1 != 0 && cval2 != 0
9298 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9299 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9300 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9301 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9302 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9303 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9304 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9306 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9307 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9309 /* We can't just pass T to eval_subst in case cval1 or cval2
9310 was the same as ARG1. */
9312 tree high_result
9313 = fold_build2_loc (loc, code, type,
9314 eval_subst (loc, arg0, cval1, maxval,
9315 cval2, minval),
9316 arg1);
9317 tree equal_result
9318 = fold_build2_loc (loc, code, type,
9319 eval_subst (loc, arg0, cval1, maxval,
9320 cval2, maxval),
9321 arg1);
9322 tree low_result
9323 = fold_build2_loc (loc, code, type,
9324 eval_subst (loc, arg0, cval1, minval,
9325 cval2, maxval),
9326 arg1);
9328 /* All three of these results should be 0 or 1. Confirm they are.
9329 Then use those values to select the proper code to use. */
9331 if (TREE_CODE (high_result) == INTEGER_CST
9332 && TREE_CODE (equal_result) == INTEGER_CST
9333 && TREE_CODE (low_result) == INTEGER_CST)
9335 /* Make a 3-bit mask with the high-order bit being the
9336 value for `>', the next for '=', and the low for '<'. */
9337 switch ((integer_onep (high_result) * 4)
9338 + (integer_onep (equal_result) * 2)
9339 + integer_onep (low_result))
9341 case 0:
9342 /* Always false. */
9343 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9344 case 1:
9345 code = LT_EXPR;
9346 break;
9347 case 2:
9348 code = EQ_EXPR;
9349 break;
9350 case 3:
9351 code = LE_EXPR;
9352 break;
9353 case 4:
9354 code = GT_EXPR;
9355 break;
9356 case 5:
9357 code = NE_EXPR;
9358 break;
9359 case 6:
9360 code = GE_EXPR;
9361 break;
9362 case 7:
9363 /* Always true. */
9364 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9367 if (save_p)
9369 tem = save_expr (build2 (code, type, cval1, cval2));
9370 SET_EXPR_LOCATION (tem, loc);
9371 return tem;
9373 return fold_build2_loc (loc, code, type, cval1, cval2);
9378 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9379 into a single range test. */
9380 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9381 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9382 && TREE_CODE (arg1) == INTEGER_CST
9383 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9384 && !integer_zerop (TREE_OPERAND (arg0, 1))
9385 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9386 && !TREE_OVERFLOW (arg1))
9388 tem = fold_div_compare (loc, code, type, arg0, arg1);
9389 if (tem != NULL_TREE)
9390 return tem;
9393 /* Fold ~X op ~Y as Y op X. */
9394 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9395 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9397 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9398 return fold_build2_loc (loc, code, type,
9399 fold_convert_loc (loc, cmp_type,
9400 TREE_OPERAND (arg1, 0)),
9401 TREE_OPERAND (arg0, 0));
9404 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9405 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9406 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9408 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9409 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9410 TREE_OPERAND (arg0, 0),
9411 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9412 fold_convert_loc (loc, cmp_type, arg1)));
9415 return NULL_TREE;
9419 /* Subroutine of fold_binary. Optimize complex multiplications of the
9420 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9421 argument EXPR represents the expression "z" of type TYPE. */
9423 static tree
9424 fold_mult_zconjz (location_t loc, tree type, tree expr)
9426 tree itype = TREE_TYPE (type);
9427 tree rpart, ipart, tem;
9429 if (TREE_CODE (expr) == COMPLEX_EXPR)
9431 rpart = TREE_OPERAND (expr, 0);
9432 ipart = TREE_OPERAND (expr, 1);
9434 else if (TREE_CODE (expr) == COMPLEX_CST)
9436 rpart = TREE_REALPART (expr);
9437 ipart = TREE_IMAGPART (expr);
9439 else
9441 expr = save_expr (expr);
9442 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9443 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9446 rpart = save_expr (rpart);
9447 ipart = save_expr (ipart);
9448 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9449 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9450 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9451 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9452 build_zero_cst (itype));
9456 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9457 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9458 guarantees that P and N have the same least significant log2(M) bits.
9459 N is not otherwise constrained. In particular, N is not normalized to
9460 0 <= N < M as is common. In general, the precise value of P is unknown.
9461 M is chosen as large as possible such that constant N can be determined.
9463 Returns M and sets *RESIDUE to N.
9465 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9466 account. This is not always possible due to PR 35705.
9469 static unsigned HOST_WIDE_INT
9470 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9471 bool allow_func_align)
9473 enum tree_code code;
9475 *residue = 0;
9477 code = TREE_CODE (expr);
9478 if (code == ADDR_EXPR)
9480 unsigned int bitalign;
9481 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9482 *residue /= BITS_PER_UNIT;
9483 return bitalign / BITS_PER_UNIT;
9485 else if (code == POINTER_PLUS_EXPR)
9487 tree op0, op1;
9488 unsigned HOST_WIDE_INT modulus;
9489 enum tree_code inner_code;
9491 op0 = TREE_OPERAND (expr, 0);
9492 STRIP_NOPS (op0);
9493 modulus = get_pointer_modulus_and_residue (op0, residue,
9494 allow_func_align);
9496 op1 = TREE_OPERAND (expr, 1);
9497 STRIP_NOPS (op1);
9498 inner_code = TREE_CODE (op1);
9499 if (inner_code == INTEGER_CST)
9501 *residue += TREE_INT_CST_LOW (op1);
9502 return modulus;
9504 else if (inner_code == MULT_EXPR)
9506 op1 = TREE_OPERAND (op1, 1);
9507 if (TREE_CODE (op1) == INTEGER_CST)
9509 unsigned HOST_WIDE_INT align;
9511 /* Compute the greatest power-of-2 divisor of op1. */
9512 align = TREE_INT_CST_LOW (op1);
9513 align &= -align;
9515 /* If align is non-zero and less than *modulus, replace
9516 *modulus with align., If align is 0, then either op1 is 0
9517 or the greatest power-of-2 divisor of op1 doesn't fit in an
9518 unsigned HOST_WIDE_INT. In either case, no additional
9519 constraint is imposed. */
9520 if (align)
9521 modulus = MIN (modulus, align);
9523 return modulus;
9528 /* If we get here, we were unable to determine anything useful about the
9529 expression. */
9530 return 1;
9533 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9534 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9536 static bool
9537 vec_cst_ctor_to_array (tree arg, tree *elts)
9539 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9541 if (TREE_CODE (arg) == VECTOR_CST)
9543 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9544 elts[i] = VECTOR_CST_ELT (arg, i);
9546 else if (TREE_CODE (arg) == CONSTRUCTOR)
9548 constructor_elt *elt;
9550 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9551 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9552 return false;
9553 else
9554 elts[i] = elt->value;
9556 else
9557 return false;
9558 for (; i < nelts; i++)
9559 elts[i]
9560 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9561 return true;
9564 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9565 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9566 NULL_TREE otherwise. */
9568 static tree
9569 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9571 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9572 tree *elts;
9573 bool need_ctor = false;
9575 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9576 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9577 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9578 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9579 return NULL_TREE;
9581 elts = XALLOCAVEC (tree, nelts * 3);
9582 if (!vec_cst_ctor_to_array (arg0, elts)
9583 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9584 return NULL_TREE;
9586 for (i = 0; i < nelts; i++)
9588 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9589 need_ctor = true;
9590 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9593 if (need_ctor)
9595 vec<constructor_elt, va_gc> *v;
9596 vec_alloc (v, nelts);
9597 for (i = 0; i < nelts; i++)
9598 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9599 return build_constructor (type, v);
9601 else
9602 return build_vector (type, &elts[2 * nelts]);
9605 /* Try to fold a pointer difference of type TYPE two address expressions of
9606 array references AREF0 and AREF1 using location LOC. Return a
9607 simplified expression for the difference or NULL_TREE. */
9609 static tree
9610 fold_addr_of_array_ref_difference (location_t loc, tree type,
9611 tree aref0, tree aref1)
9613 tree base0 = TREE_OPERAND (aref0, 0);
9614 tree base1 = TREE_OPERAND (aref1, 0);
9615 tree base_offset = build_int_cst (type, 0);
9617 /* If the bases are array references as well, recurse. If the bases
9618 are pointer indirections compute the difference of the pointers.
9619 If the bases are equal, we are set. */
9620 if ((TREE_CODE (base0) == ARRAY_REF
9621 && TREE_CODE (base1) == ARRAY_REF
9622 && (base_offset
9623 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9624 || (INDIRECT_REF_P (base0)
9625 && INDIRECT_REF_P (base1)
9626 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9627 TREE_OPERAND (base0, 0),
9628 TREE_OPERAND (base1, 0))))
9629 || operand_equal_p (base0, base1, 0))
9631 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9632 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9633 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9634 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9635 return fold_build2_loc (loc, PLUS_EXPR, type,
9636 base_offset,
9637 fold_build2_loc (loc, MULT_EXPR, type,
9638 diff, esz));
9640 return NULL_TREE;
9643 /* If the real or vector real constant CST of type TYPE has an exact
9644 inverse, return it, else return NULL. */
9646 static tree
9647 exact_inverse (tree type, tree cst)
9649 REAL_VALUE_TYPE r;
9650 tree unit_type, *elts;
9651 machine_mode mode;
9652 unsigned vec_nelts, i;
9654 switch (TREE_CODE (cst))
9656 case REAL_CST:
9657 r = TREE_REAL_CST (cst);
9659 if (exact_real_inverse (TYPE_MODE (type), &r))
9660 return build_real (type, r);
9662 return NULL_TREE;
9664 case VECTOR_CST:
9665 vec_nelts = VECTOR_CST_NELTS (cst);
9666 elts = XALLOCAVEC (tree, vec_nelts);
9667 unit_type = TREE_TYPE (type);
9668 mode = TYPE_MODE (unit_type);
9670 for (i = 0; i < vec_nelts; i++)
9672 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9673 if (!exact_real_inverse (mode, &r))
9674 return NULL_TREE;
9675 elts[i] = build_real (unit_type, r);
9678 return build_vector (type, elts);
9680 default:
9681 return NULL_TREE;
9685 /* Mask out the tz least significant bits of X of type TYPE where
9686 tz is the number of trailing zeroes in Y. */
9687 static wide_int
9688 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9690 int tz = wi::ctz (y);
9691 if (tz > 0)
9692 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9693 return x;
9696 /* Return true when T is an address and is known to be nonzero.
9697 For floating point we further ensure that T is not denormal.
9698 Similar logic is present in nonzero_address in rtlanal.h.
9700 If the return value is based on the assumption that signed overflow
9701 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9702 change *STRICT_OVERFLOW_P. */
9704 static bool
9705 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9707 tree type = TREE_TYPE (t);
9708 enum tree_code code;
9710 /* Doing something useful for floating point would need more work. */
9711 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9712 return false;
9714 code = TREE_CODE (t);
9715 switch (TREE_CODE_CLASS (code))
9717 case tcc_unary:
9718 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9719 strict_overflow_p);
9720 case tcc_binary:
9721 case tcc_comparison:
9722 return tree_binary_nonzero_warnv_p (code, type,
9723 TREE_OPERAND (t, 0),
9724 TREE_OPERAND (t, 1),
9725 strict_overflow_p);
9726 case tcc_constant:
9727 case tcc_declaration:
9728 case tcc_reference:
9729 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9731 default:
9732 break;
9735 switch (code)
9737 case TRUTH_NOT_EXPR:
9738 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9739 strict_overflow_p);
9741 case TRUTH_AND_EXPR:
9742 case TRUTH_OR_EXPR:
9743 case TRUTH_XOR_EXPR:
9744 return tree_binary_nonzero_warnv_p (code, type,
9745 TREE_OPERAND (t, 0),
9746 TREE_OPERAND (t, 1),
9747 strict_overflow_p);
9749 case COND_EXPR:
9750 case CONSTRUCTOR:
9751 case OBJ_TYPE_REF:
9752 case ASSERT_EXPR:
9753 case ADDR_EXPR:
9754 case WITH_SIZE_EXPR:
9755 case SSA_NAME:
9756 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9758 case COMPOUND_EXPR:
9759 case MODIFY_EXPR:
9760 case BIND_EXPR:
9761 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9762 strict_overflow_p);
9764 case SAVE_EXPR:
9765 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9766 strict_overflow_p);
9768 case CALL_EXPR:
9770 tree fndecl = get_callee_fndecl (t);
9771 if (!fndecl) return false;
9772 if (flag_delete_null_pointer_checks && !flag_check_new
9773 && DECL_IS_OPERATOR_NEW (fndecl)
9774 && !TREE_NOTHROW (fndecl))
9775 return true;
9776 if (flag_delete_null_pointer_checks
9777 && lookup_attribute ("returns_nonnull",
9778 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9779 return true;
9780 return alloca_call_p (t);
9783 default:
9784 break;
9786 return false;
9789 /* Return true when T is an address and is known to be nonzero.
9790 Handle warnings about undefined signed overflow. */
9792 static bool
9793 tree_expr_nonzero_p (tree t)
9795 bool ret, strict_overflow_p;
9797 strict_overflow_p = false;
9798 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9799 if (strict_overflow_p)
9800 fold_overflow_warning (("assuming signed overflow does not occur when "
9801 "determining that expression is always "
9802 "non-zero"),
9803 WARN_STRICT_OVERFLOW_MISC);
9804 return ret;
9807 /* Fold a binary expression of code CODE and type TYPE with operands
9808 OP0 and OP1. LOC is the location of the resulting expression.
9809 Return the folded expression if folding is successful. Otherwise,
9810 return NULL_TREE. */
9812 tree
9813 fold_binary_loc (location_t loc,
9814 enum tree_code code, tree type, tree op0, tree op1)
9816 enum tree_code_class kind = TREE_CODE_CLASS (code);
9817 tree arg0, arg1, tem;
9818 tree t1 = NULL_TREE;
9819 bool strict_overflow_p;
9820 unsigned int prec;
9822 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9823 && TREE_CODE_LENGTH (code) == 2
9824 && op0 != NULL_TREE
9825 && op1 != NULL_TREE);
9827 arg0 = op0;
9828 arg1 = op1;
9830 /* Strip any conversions that don't change the mode. This is
9831 safe for every expression, except for a comparison expression
9832 because its signedness is derived from its operands. So, in
9833 the latter case, only strip conversions that don't change the
9834 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9835 preserved.
9837 Note that this is done as an internal manipulation within the
9838 constant folder, in order to find the simplest representation
9839 of the arguments so that their form can be studied. In any
9840 cases, the appropriate type conversions should be put back in
9841 the tree that will get out of the constant folder. */
9843 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9845 STRIP_SIGN_NOPS (arg0);
9846 STRIP_SIGN_NOPS (arg1);
9848 else
9850 STRIP_NOPS (arg0);
9851 STRIP_NOPS (arg1);
9854 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9855 constant but we can't do arithmetic on them. */
9856 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9857 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9858 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9859 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9860 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9861 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9862 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9864 if (kind == tcc_binary)
9866 /* Make sure type and arg0 have the same saturating flag. */
9867 gcc_assert (TYPE_SATURATING (type)
9868 == TYPE_SATURATING (TREE_TYPE (arg0)));
9869 tem = const_binop (code, arg0, arg1);
9871 else if (kind == tcc_comparison)
9872 tem = fold_relational_const (code, type, arg0, arg1);
9873 else
9874 tem = NULL_TREE;
9876 if (tem != NULL_TREE)
9878 if (TREE_TYPE (tem) != type)
9879 tem = fold_convert_loc (loc, type, tem);
9880 return tem;
9884 /* If this is a commutative operation, and ARG0 is a constant, move it
9885 to ARG1 to reduce the number of tests below. */
9886 if (commutative_tree_code (code)
9887 && tree_swap_operands_p (arg0, arg1, true))
9888 return fold_build2_loc (loc, code, type, op1, op0);
9890 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9891 to ARG1 to reduce the number of tests below. */
9892 if (kind == tcc_comparison
9893 && tree_swap_operands_p (arg0, arg1, true))
9894 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9896 tem = generic_simplify (loc, code, type, op0, op1);
9897 if (tem)
9898 return tem;
9900 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9902 First check for cases where an arithmetic operation is applied to a
9903 compound, conditional, or comparison operation. Push the arithmetic
9904 operation inside the compound or conditional to see if any folding
9905 can then be done. Convert comparison to conditional for this purpose.
9906 The also optimizes non-constant cases that used to be done in
9907 expand_expr.
9909 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9910 one of the operands is a comparison and the other is a comparison, a
9911 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9912 code below would make the expression more complex. Change it to a
9913 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9914 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9916 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9917 || code == EQ_EXPR || code == NE_EXPR)
9918 && TREE_CODE (type) != VECTOR_TYPE
9919 && ((truth_value_p (TREE_CODE (arg0))
9920 && (truth_value_p (TREE_CODE (arg1))
9921 || (TREE_CODE (arg1) == BIT_AND_EXPR
9922 && integer_onep (TREE_OPERAND (arg1, 1)))))
9923 || (truth_value_p (TREE_CODE (arg1))
9924 && (truth_value_p (TREE_CODE (arg0))
9925 || (TREE_CODE (arg0) == BIT_AND_EXPR
9926 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9928 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9929 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9930 : TRUTH_XOR_EXPR,
9931 boolean_type_node,
9932 fold_convert_loc (loc, boolean_type_node, arg0),
9933 fold_convert_loc (loc, boolean_type_node, arg1));
9935 if (code == EQ_EXPR)
9936 tem = invert_truthvalue_loc (loc, tem);
9938 return fold_convert_loc (loc, type, tem);
9941 if (TREE_CODE_CLASS (code) == tcc_binary
9942 || TREE_CODE_CLASS (code) == tcc_comparison)
9944 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9946 tem = fold_build2_loc (loc, code, type,
9947 fold_convert_loc (loc, TREE_TYPE (op0),
9948 TREE_OPERAND (arg0, 1)), op1);
9949 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9950 tem);
9952 if (TREE_CODE (arg1) == COMPOUND_EXPR
9953 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9955 tem = fold_build2_loc (loc, code, type, op0,
9956 fold_convert_loc (loc, TREE_TYPE (op1),
9957 TREE_OPERAND (arg1, 1)));
9958 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9959 tem);
9962 if (TREE_CODE (arg0) == COND_EXPR
9963 || TREE_CODE (arg0) == VEC_COND_EXPR
9964 || COMPARISON_CLASS_P (arg0))
9966 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9967 arg0, arg1,
9968 /*cond_first_p=*/1);
9969 if (tem != NULL_TREE)
9970 return tem;
9973 if (TREE_CODE (arg1) == COND_EXPR
9974 || TREE_CODE (arg1) == VEC_COND_EXPR
9975 || COMPARISON_CLASS_P (arg1))
9977 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9978 arg1, arg0,
9979 /*cond_first_p=*/0);
9980 if (tem != NULL_TREE)
9981 return tem;
9985 switch (code)
9987 case MEM_REF:
9988 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9989 if (TREE_CODE (arg0) == ADDR_EXPR
9990 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9992 tree iref = TREE_OPERAND (arg0, 0);
9993 return fold_build2 (MEM_REF, type,
9994 TREE_OPERAND (iref, 0),
9995 int_const_binop (PLUS_EXPR, arg1,
9996 TREE_OPERAND (iref, 1)));
9999 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10000 if (TREE_CODE (arg0) == ADDR_EXPR
10001 && handled_component_p (TREE_OPERAND (arg0, 0)))
10003 tree base;
10004 HOST_WIDE_INT coffset;
10005 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10006 &coffset);
10007 if (!base)
10008 return NULL_TREE;
10009 return fold_build2 (MEM_REF, type,
10010 build_fold_addr_expr (base),
10011 int_const_binop (PLUS_EXPR, arg1,
10012 size_int (coffset)));
10015 return NULL_TREE;
10017 case POINTER_PLUS_EXPR:
10018 /* 0 +p index -> (type)index */
10019 if (integer_zerop (arg0))
10020 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10022 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10023 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10024 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10025 return fold_convert_loc (loc, type,
10026 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10027 fold_convert_loc (loc, sizetype,
10028 arg1),
10029 fold_convert_loc (loc, sizetype,
10030 arg0)));
10032 /* (PTR +p B) +p A -> PTR +p (B + A) */
10033 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10035 tree inner;
10036 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10037 tree arg00 = TREE_OPERAND (arg0, 0);
10038 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10039 arg01, fold_convert_loc (loc, sizetype, arg1));
10040 return fold_convert_loc (loc, type,
10041 fold_build_pointer_plus_loc (loc,
10042 arg00, inner));
10045 /* PTR_CST +p CST -> CST1 */
10046 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10047 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10048 fold_convert_loc (loc, type, arg1));
10050 return NULL_TREE;
10052 case PLUS_EXPR:
10053 /* A + (-B) -> A - B */
10054 if (TREE_CODE (arg1) == NEGATE_EXPR
10055 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10056 return fold_build2_loc (loc, MINUS_EXPR, type,
10057 fold_convert_loc (loc, type, arg0),
10058 fold_convert_loc (loc, type,
10059 TREE_OPERAND (arg1, 0)));
10060 /* (-A) + B -> B - A */
10061 if (TREE_CODE (arg0) == NEGATE_EXPR
10062 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)
10063 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10064 return fold_build2_loc (loc, MINUS_EXPR, type,
10065 fold_convert_loc (loc, type, arg1),
10066 fold_convert_loc (loc, type,
10067 TREE_OPERAND (arg0, 0)));
10069 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10071 /* Convert ~A + 1 to -A. */
10072 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10073 && integer_each_onep (arg1))
10074 return fold_build1_loc (loc, NEGATE_EXPR, type,
10075 fold_convert_loc (loc, type,
10076 TREE_OPERAND (arg0, 0)));
10078 /* ~X + X is -1. */
10079 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10080 && !TYPE_OVERFLOW_TRAPS (type))
10082 tree tem = TREE_OPERAND (arg0, 0);
10084 STRIP_NOPS (tem);
10085 if (operand_equal_p (tem, arg1, 0))
10087 t1 = build_all_ones_cst (type);
10088 return omit_one_operand_loc (loc, type, t1, arg1);
10092 /* X + ~X is -1. */
10093 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10094 && !TYPE_OVERFLOW_TRAPS (type))
10096 tree tem = TREE_OPERAND (arg1, 0);
10098 STRIP_NOPS (tem);
10099 if (operand_equal_p (arg0, tem, 0))
10101 t1 = build_all_ones_cst (type);
10102 return omit_one_operand_loc (loc, type, t1, arg0);
10106 /* X + (X / CST) * -CST is X % CST. */
10107 if (TREE_CODE (arg1) == MULT_EXPR
10108 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10109 && operand_equal_p (arg0,
10110 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10112 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10113 tree cst1 = TREE_OPERAND (arg1, 1);
10114 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10115 cst1, cst0);
10116 if (sum && integer_zerop (sum))
10117 return fold_convert_loc (loc, type,
10118 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10119 TREE_TYPE (arg0), arg0,
10120 cst0));
10124 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10125 one. Make sure the type is not saturating and has the signedness of
10126 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10127 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10128 if ((TREE_CODE (arg0) == MULT_EXPR
10129 || TREE_CODE (arg1) == MULT_EXPR)
10130 && !TYPE_SATURATING (type)
10131 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10132 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10133 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10135 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10136 if (tem)
10137 return tem;
10140 if (! FLOAT_TYPE_P (type))
10142 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10143 with a constant, and the two constants have no bits in common,
10144 we should treat this as a BIT_IOR_EXPR since this may produce more
10145 simplifications. */
10146 if (TREE_CODE (arg0) == BIT_AND_EXPR
10147 && TREE_CODE (arg1) == BIT_AND_EXPR
10148 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10149 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10150 && wi::bit_and (TREE_OPERAND (arg0, 1),
10151 TREE_OPERAND (arg1, 1)) == 0)
10153 code = BIT_IOR_EXPR;
10154 goto bit_ior;
10157 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10158 (plus (plus (mult) (mult)) (foo)) so that we can
10159 take advantage of the factoring cases below. */
10160 if (TYPE_OVERFLOW_WRAPS (type)
10161 && (((TREE_CODE (arg0) == PLUS_EXPR
10162 || TREE_CODE (arg0) == MINUS_EXPR)
10163 && TREE_CODE (arg1) == MULT_EXPR)
10164 || ((TREE_CODE (arg1) == PLUS_EXPR
10165 || TREE_CODE (arg1) == MINUS_EXPR)
10166 && TREE_CODE (arg0) == MULT_EXPR)))
10168 tree parg0, parg1, parg, marg;
10169 enum tree_code pcode;
10171 if (TREE_CODE (arg1) == MULT_EXPR)
10172 parg = arg0, marg = arg1;
10173 else
10174 parg = arg1, marg = arg0;
10175 pcode = TREE_CODE (parg);
10176 parg0 = TREE_OPERAND (parg, 0);
10177 parg1 = TREE_OPERAND (parg, 1);
10178 STRIP_NOPS (parg0);
10179 STRIP_NOPS (parg1);
10181 if (TREE_CODE (parg0) == MULT_EXPR
10182 && TREE_CODE (parg1) != MULT_EXPR)
10183 return fold_build2_loc (loc, pcode, type,
10184 fold_build2_loc (loc, PLUS_EXPR, type,
10185 fold_convert_loc (loc, type,
10186 parg0),
10187 fold_convert_loc (loc, type,
10188 marg)),
10189 fold_convert_loc (loc, type, parg1));
10190 if (TREE_CODE (parg0) != MULT_EXPR
10191 && TREE_CODE (parg1) == MULT_EXPR)
10192 return
10193 fold_build2_loc (loc, PLUS_EXPR, type,
10194 fold_convert_loc (loc, type, parg0),
10195 fold_build2_loc (loc, pcode, type,
10196 fold_convert_loc (loc, type, marg),
10197 fold_convert_loc (loc, type,
10198 parg1)));
10201 else
10203 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10204 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10205 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10207 /* Likewise if the operands are reversed. */
10208 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10209 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10211 /* Convert X + -C into X - C. */
10212 if (TREE_CODE (arg1) == REAL_CST
10213 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10215 tem = fold_negate_const (arg1, type);
10216 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10217 return fold_build2_loc (loc, MINUS_EXPR, type,
10218 fold_convert_loc (loc, type, arg0),
10219 fold_convert_loc (loc, type, tem));
10222 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10223 to __complex__ ( x, y ). This is not the same for SNaNs or
10224 if signed zeros are involved. */
10225 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10226 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10227 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10229 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10230 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10231 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10232 bool arg0rz = false, arg0iz = false;
10233 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10234 || (arg0i && (arg0iz = real_zerop (arg0i))))
10236 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10237 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10238 if (arg0rz && arg1i && real_zerop (arg1i))
10240 tree rp = arg1r ? arg1r
10241 : build1 (REALPART_EXPR, rtype, arg1);
10242 tree ip = arg0i ? arg0i
10243 : build1 (IMAGPART_EXPR, rtype, arg0);
10244 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10246 else if (arg0iz && arg1r && real_zerop (arg1r))
10248 tree rp = arg0r ? arg0r
10249 : build1 (REALPART_EXPR, rtype, arg0);
10250 tree ip = arg1i ? arg1i
10251 : build1 (IMAGPART_EXPR, rtype, arg1);
10252 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10257 if (flag_unsafe_math_optimizations
10258 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10259 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10260 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10261 return tem;
10263 /* Convert x+x into x*2.0. */
10264 if (operand_equal_p (arg0, arg1, 0)
10265 && SCALAR_FLOAT_TYPE_P (type))
10266 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10267 build_real (type, dconst2));
10269 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10270 We associate floats only if the user has specified
10271 -fassociative-math. */
10272 if (flag_associative_math
10273 && TREE_CODE (arg1) == PLUS_EXPR
10274 && TREE_CODE (arg0) != MULT_EXPR)
10276 tree tree10 = TREE_OPERAND (arg1, 0);
10277 tree tree11 = TREE_OPERAND (arg1, 1);
10278 if (TREE_CODE (tree11) == MULT_EXPR
10279 && TREE_CODE (tree10) == MULT_EXPR)
10281 tree tree0;
10282 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10283 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10286 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10287 We associate floats only if the user has specified
10288 -fassociative-math. */
10289 if (flag_associative_math
10290 && TREE_CODE (arg0) == PLUS_EXPR
10291 && TREE_CODE (arg1) != MULT_EXPR)
10293 tree tree00 = TREE_OPERAND (arg0, 0);
10294 tree tree01 = TREE_OPERAND (arg0, 1);
10295 if (TREE_CODE (tree01) == MULT_EXPR
10296 && TREE_CODE (tree00) == MULT_EXPR)
10298 tree tree0;
10299 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10300 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10305 bit_rotate:
10306 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10307 is a rotate of A by C1 bits. */
10308 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10309 is a rotate of A by B bits. */
10311 enum tree_code code0, code1;
10312 tree rtype;
10313 code0 = TREE_CODE (arg0);
10314 code1 = TREE_CODE (arg1);
10315 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10316 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10317 && operand_equal_p (TREE_OPERAND (arg0, 0),
10318 TREE_OPERAND (arg1, 0), 0)
10319 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10320 TYPE_UNSIGNED (rtype))
10321 /* Only create rotates in complete modes. Other cases are not
10322 expanded properly. */
10323 && (element_precision (rtype)
10324 == element_precision (TYPE_MODE (rtype))))
10326 tree tree01, tree11;
10327 enum tree_code code01, code11;
10329 tree01 = TREE_OPERAND (arg0, 1);
10330 tree11 = TREE_OPERAND (arg1, 1);
10331 STRIP_NOPS (tree01);
10332 STRIP_NOPS (tree11);
10333 code01 = TREE_CODE (tree01);
10334 code11 = TREE_CODE (tree11);
10335 if (code01 == INTEGER_CST
10336 && code11 == INTEGER_CST
10337 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10338 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10340 tem = build2_loc (loc, LROTATE_EXPR,
10341 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10342 TREE_OPERAND (arg0, 0),
10343 code0 == LSHIFT_EXPR ? tree01 : tree11);
10344 return fold_convert_loc (loc, type, tem);
10346 else if (code11 == MINUS_EXPR)
10348 tree tree110, tree111;
10349 tree110 = TREE_OPERAND (tree11, 0);
10350 tree111 = TREE_OPERAND (tree11, 1);
10351 STRIP_NOPS (tree110);
10352 STRIP_NOPS (tree111);
10353 if (TREE_CODE (tree110) == INTEGER_CST
10354 && 0 == compare_tree_int (tree110,
10355 element_precision
10356 (TREE_TYPE (TREE_OPERAND
10357 (arg0, 0))))
10358 && operand_equal_p (tree01, tree111, 0))
10359 return
10360 fold_convert_loc (loc, type,
10361 build2 ((code0 == LSHIFT_EXPR
10362 ? LROTATE_EXPR
10363 : RROTATE_EXPR),
10364 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10365 TREE_OPERAND (arg0, 0), tree01));
10367 else if (code01 == MINUS_EXPR)
10369 tree tree010, tree011;
10370 tree010 = TREE_OPERAND (tree01, 0);
10371 tree011 = TREE_OPERAND (tree01, 1);
10372 STRIP_NOPS (tree010);
10373 STRIP_NOPS (tree011);
10374 if (TREE_CODE (tree010) == INTEGER_CST
10375 && 0 == compare_tree_int (tree010,
10376 element_precision
10377 (TREE_TYPE (TREE_OPERAND
10378 (arg0, 0))))
10379 && operand_equal_p (tree11, tree011, 0))
10380 return fold_convert_loc
10381 (loc, type,
10382 build2 ((code0 != LSHIFT_EXPR
10383 ? LROTATE_EXPR
10384 : RROTATE_EXPR),
10385 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10386 TREE_OPERAND (arg0, 0), tree11));
10391 associate:
10392 /* In most languages, can't associate operations on floats through
10393 parentheses. Rather than remember where the parentheses were, we
10394 don't associate floats at all, unless the user has specified
10395 -fassociative-math.
10396 And, we need to make sure type is not saturating. */
10398 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10399 && !TYPE_SATURATING (type))
10401 tree var0, con0, lit0, minus_lit0;
10402 tree var1, con1, lit1, minus_lit1;
10403 tree atype = type;
10404 bool ok = true;
10406 /* Split both trees into variables, constants, and literals. Then
10407 associate each group together, the constants with literals,
10408 then the result with variables. This increases the chances of
10409 literals being recombined later and of generating relocatable
10410 expressions for the sum of a constant and literal. */
10411 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10412 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10413 code == MINUS_EXPR);
10415 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10416 if (code == MINUS_EXPR)
10417 code = PLUS_EXPR;
10419 /* With undefined overflow prefer doing association in a type
10420 which wraps on overflow, if that is one of the operand types. */
10421 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10422 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10424 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10425 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10426 atype = TREE_TYPE (arg0);
10427 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10428 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10429 atype = TREE_TYPE (arg1);
10430 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10433 /* With undefined overflow we can only associate constants with one
10434 variable, and constants whose association doesn't overflow. */
10435 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10436 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10438 if (var0 && var1)
10440 tree tmp0 = var0;
10441 tree tmp1 = var1;
10443 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10444 tmp0 = TREE_OPERAND (tmp0, 0);
10445 if (CONVERT_EXPR_P (tmp0)
10446 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10447 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10448 <= TYPE_PRECISION (atype)))
10449 tmp0 = TREE_OPERAND (tmp0, 0);
10450 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10451 tmp1 = TREE_OPERAND (tmp1, 0);
10452 if (CONVERT_EXPR_P (tmp1)
10453 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10454 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10455 <= TYPE_PRECISION (atype)))
10456 tmp1 = TREE_OPERAND (tmp1, 0);
10457 /* The only case we can still associate with two variables
10458 is if they are the same, modulo negation and bit-pattern
10459 preserving conversions. */
10460 if (!operand_equal_p (tmp0, tmp1, 0))
10461 ok = false;
10465 /* Only do something if we found more than two objects. Otherwise,
10466 nothing has changed and we risk infinite recursion. */
10467 if (ok
10468 && (2 < ((var0 != 0) + (var1 != 0)
10469 + (con0 != 0) + (con1 != 0)
10470 + (lit0 != 0) + (lit1 != 0)
10471 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10473 bool any_overflows = false;
10474 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10475 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10476 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10477 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10478 var0 = associate_trees (loc, var0, var1, code, atype);
10479 con0 = associate_trees (loc, con0, con1, code, atype);
10480 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10481 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10482 code, atype);
10484 /* Preserve the MINUS_EXPR if the negative part of the literal is
10485 greater than the positive part. Otherwise, the multiplicative
10486 folding code (i.e extract_muldiv) may be fooled in case
10487 unsigned constants are subtracted, like in the following
10488 example: ((X*2 + 4) - 8U)/2. */
10489 if (minus_lit0 && lit0)
10491 if (TREE_CODE (lit0) == INTEGER_CST
10492 && TREE_CODE (minus_lit0) == INTEGER_CST
10493 && tree_int_cst_lt (lit0, minus_lit0))
10495 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10496 MINUS_EXPR, atype);
10497 lit0 = 0;
10499 else
10501 lit0 = associate_trees (loc, lit0, minus_lit0,
10502 MINUS_EXPR, atype);
10503 minus_lit0 = 0;
10507 /* Don't introduce overflows through reassociation. */
10508 if (!any_overflows
10509 && ((lit0 && TREE_OVERFLOW (lit0))
10510 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10511 return NULL_TREE;
10513 if (minus_lit0)
10515 if (con0 == 0)
10516 return
10517 fold_convert_loc (loc, type,
10518 associate_trees (loc, var0, minus_lit0,
10519 MINUS_EXPR, atype));
10520 else
10522 con0 = associate_trees (loc, con0, minus_lit0,
10523 MINUS_EXPR, atype);
10524 return
10525 fold_convert_loc (loc, type,
10526 associate_trees (loc, var0, con0,
10527 PLUS_EXPR, atype));
10531 con0 = associate_trees (loc, con0, lit0, code, atype);
10532 return
10533 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10534 code, atype));
10538 return NULL_TREE;
10540 case MINUS_EXPR:
10541 /* Pointer simplifications for subtraction, simple reassociations. */
10542 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10544 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10545 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10546 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10548 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10549 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10550 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10551 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10552 return fold_build2_loc (loc, PLUS_EXPR, type,
10553 fold_build2_loc (loc, MINUS_EXPR, type,
10554 arg00, arg10),
10555 fold_build2_loc (loc, MINUS_EXPR, type,
10556 arg01, arg11));
10558 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10559 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10561 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10562 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10563 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10564 fold_convert_loc (loc, type, arg1));
10565 if (tmp)
10566 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10568 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10569 simplifies. */
10570 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10572 tree arg10 = fold_convert_loc (loc, type,
10573 TREE_OPERAND (arg1, 0));
10574 tree arg11 = fold_convert_loc (loc, type,
10575 TREE_OPERAND (arg1, 1));
10576 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10577 fold_convert_loc (loc, type, arg0),
10578 arg10);
10579 if (tmp)
10580 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10583 /* A - (-B) -> A + B */
10584 if (TREE_CODE (arg1) == NEGATE_EXPR)
10585 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10586 fold_convert_loc (loc, type,
10587 TREE_OPERAND (arg1, 0)));
10588 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10589 if (TREE_CODE (arg0) == NEGATE_EXPR
10590 && negate_expr_p (arg1)
10591 && reorder_operands_p (arg0, arg1))
10592 return fold_build2_loc (loc, MINUS_EXPR, type,
10593 fold_convert_loc (loc, type,
10594 negate_expr (arg1)),
10595 fold_convert_loc (loc, type,
10596 TREE_OPERAND (arg0, 0)));
10597 /* Convert -A - 1 to ~A. */
10598 if (TREE_CODE (arg0) == NEGATE_EXPR
10599 && integer_each_onep (arg1)
10600 && !TYPE_OVERFLOW_TRAPS (type))
10601 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10602 fold_convert_loc (loc, type,
10603 TREE_OPERAND (arg0, 0)));
10605 /* Convert -1 - A to ~A. */
10606 if (TREE_CODE (type) != COMPLEX_TYPE
10607 && integer_all_onesp (arg0))
10608 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10611 /* X - (X / Y) * Y is X % Y. */
10612 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10613 && TREE_CODE (arg1) == MULT_EXPR
10614 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10615 && operand_equal_p (arg0,
10616 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10617 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10618 TREE_OPERAND (arg1, 1), 0))
10619 return
10620 fold_convert_loc (loc, type,
10621 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10622 arg0, TREE_OPERAND (arg1, 1)));
10624 if (! FLOAT_TYPE_P (type))
10626 if (integer_zerop (arg0))
10627 return negate_expr (fold_convert_loc (loc, type, arg1));
10629 /* Fold A - (A & B) into ~B & A. */
10630 if (!TREE_SIDE_EFFECTS (arg0)
10631 && TREE_CODE (arg1) == BIT_AND_EXPR)
10633 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10635 tree arg10 = fold_convert_loc (loc, type,
10636 TREE_OPERAND (arg1, 0));
10637 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10638 fold_build1_loc (loc, BIT_NOT_EXPR,
10639 type, arg10),
10640 fold_convert_loc (loc, type, arg0));
10642 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10644 tree arg11 = fold_convert_loc (loc,
10645 type, TREE_OPERAND (arg1, 1));
10646 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10647 fold_build1_loc (loc, BIT_NOT_EXPR,
10648 type, arg11),
10649 fold_convert_loc (loc, type, arg0));
10653 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10654 any power of 2 minus 1. */
10655 if (TREE_CODE (arg0) == BIT_AND_EXPR
10656 && TREE_CODE (arg1) == BIT_AND_EXPR
10657 && operand_equal_p (TREE_OPERAND (arg0, 0),
10658 TREE_OPERAND (arg1, 0), 0))
10660 tree mask0 = TREE_OPERAND (arg0, 1);
10661 tree mask1 = TREE_OPERAND (arg1, 1);
10662 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10664 if (operand_equal_p (tem, mask1, 0))
10666 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10667 TREE_OPERAND (arg0, 0), mask1);
10668 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10673 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10674 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10675 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10677 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10678 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10679 (-ARG1 + ARG0) reduces to -ARG1. */
10680 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10681 return negate_expr (fold_convert_loc (loc, type, arg1));
10683 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10684 __complex__ ( x, -y ). This is not the same for SNaNs or if
10685 signed zeros are involved. */
10686 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10687 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10688 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10690 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10691 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10692 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10693 bool arg0rz = false, arg0iz = false;
10694 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10695 || (arg0i && (arg0iz = real_zerop (arg0i))))
10697 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10698 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10699 if (arg0rz && arg1i && real_zerop (arg1i))
10701 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10702 arg1r ? arg1r
10703 : build1 (REALPART_EXPR, rtype, arg1));
10704 tree ip = arg0i ? arg0i
10705 : build1 (IMAGPART_EXPR, rtype, arg0);
10706 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10708 else if (arg0iz && arg1r && real_zerop (arg1r))
10710 tree rp = arg0r ? arg0r
10711 : build1 (REALPART_EXPR, rtype, arg0);
10712 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10713 arg1i ? arg1i
10714 : build1 (IMAGPART_EXPR, rtype, arg1));
10715 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10720 /* A - B -> A + (-B) if B is easily negatable. */
10721 if (negate_expr_p (arg1)
10722 && ((FLOAT_TYPE_P (type)
10723 /* Avoid this transformation if B is a positive REAL_CST. */
10724 && (TREE_CODE (arg1) != REAL_CST
10725 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10726 || INTEGRAL_TYPE_P (type)))
10727 return fold_build2_loc (loc, PLUS_EXPR, type,
10728 fold_convert_loc (loc, type, arg0),
10729 fold_convert_loc (loc, type,
10730 negate_expr (arg1)));
10732 /* Try folding difference of addresses. */
10734 HOST_WIDE_INT diff;
10736 if ((TREE_CODE (arg0) == ADDR_EXPR
10737 || TREE_CODE (arg1) == ADDR_EXPR)
10738 && ptr_difference_const (arg0, arg1, &diff))
10739 return build_int_cst_type (type, diff);
10742 /* Fold &a[i] - &a[j] to i-j. */
10743 if (TREE_CODE (arg0) == ADDR_EXPR
10744 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10745 && TREE_CODE (arg1) == ADDR_EXPR
10746 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10748 tree tem = fold_addr_of_array_ref_difference (loc, type,
10749 TREE_OPERAND (arg0, 0),
10750 TREE_OPERAND (arg1, 0));
10751 if (tem)
10752 return tem;
10755 if (FLOAT_TYPE_P (type)
10756 && flag_unsafe_math_optimizations
10757 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10758 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10759 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10760 return tem;
10762 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10763 one. Make sure the type is not saturating and has the signedness of
10764 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10765 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10766 if ((TREE_CODE (arg0) == MULT_EXPR
10767 || TREE_CODE (arg1) == MULT_EXPR)
10768 && !TYPE_SATURATING (type)
10769 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10770 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10771 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10773 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10774 if (tem)
10775 return tem;
10778 goto associate;
10780 case MULT_EXPR:
10781 /* (-A) * (-B) -> A * B */
10782 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10783 return fold_build2_loc (loc, MULT_EXPR, type,
10784 fold_convert_loc (loc, type,
10785 TREE_OPERAND (arg0, 0)),
10786 fold_convert_loc (loc, type,
10787 negate_expr (arg1)));
10788 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10789 return fold_build2_loc (loc, MULT_EXPR, type,
10790 fold_convert_loc (loc, type,
10791 negate_expr (arg0)),
10792 fold_convert_loc (loc, type,
10793 TREE_OPERAND (arg1, 0)));
10795 if (! FLOAT_TYPE_P (type))
10797 /* Transform x * -1 into -x. Make sure to do the negation
10798 on the original operand with conversions not stripped
10799 because we can only strip non-sign-changing conversions. */
10800 if (integer_minus_onep (arg1))
10801 return fold_convert_loc (loc, type, negate_expr (op0));
10802 /* Transform x * -C into -x * C if x is easily negatable. */
10803 if (TREE_CODE (arg1) == INTEGER_CST
10804 && tree_int_cst_sgn (arg1) == -1
10805 && negate_expr_p (arg0)
10806 && (tem = negate_expr (arg1)) != arg1
10807 && !TREE_OVERFLOW (tem))
10808 return fold_build2_loc (loc, MULT_EXPR, type,
10809 fold_convert_loc (loc, type,
10810 negate_expr (arg0)),
10811 tem);
10813 /* (a * (1 << b)) is (a << b) */
10814 if (TREE_CODE (arg1) == LSHIFT_EXPR
10815 && integer_onep (TREE_OPERAND (arg1, 0)))
10816 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10817 TREE_OPERAND (arg1, 1));
10818 if (TREE_CODE (arg0) == LSHIFT_EXPR
10819 && integer_onep (TREE_OPERAND (arg0, 0)))
10820 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10821 TREE_OPERAND (arg0, 1));
10823 /* (A + A) * C -> A * 2 * C */
10824 if (TREE_CODE (arg0) == PLUS_EXPR
10825 && TREE_CODE (arg1) == INTEGER_CST
10826 && operand_equal_p (TREE_OPERAND (arg0, 0),
10827 TREE_OPERAND (arg0, 1), 0))
10828 return fold_build2_loc (loc, MULT_EXPR, type,
10829 omit_one_operand_loc (loc, type,
10830 TREE_OPERAND (arg0, 0),
10831 TREE_OPERAND (arg0, 1)),
10832 fold_build2_loc (loc, MULT_EXPR, type,
10833 build_int_cst (type, 2) , arg1));
10835 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10836 sign-changing only. */
10837 if (TREE_CODE (arg1) == INTEGER_CST
10838 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10839 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10840 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10842 strict_overflow_p = false;
10843 if (TREE_CODE (arg1) == INTEGER_CST
10844 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10845 &strict_overflow_p)))
10847 if (strict_overflow_p)
10848 fold_overflow_warning (("assuming signed overflow does not "
10849 "occur when simplifying "
10850 "multiplication"),
10851 WARN_STRICT_OVERFLOW_MISC);
10852 return fold_convert_loc (loc, type, tem);
10855 /* Optimize z * conj(z) for integer complex numbers. */
10856 if (TREE_CODE (arg0) == CONJ_EXPR
10857 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10858 return fold_mult_zconjz (loc, type, arg1);
10859 if (TREE_CODE (arg1) == CONJ_EXPR
10860 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10861 return fold_mult_zconjz (loc, type, arg0);
10863 else
10865 /* Maybe fold x * 0 to 0. The expressions aren't the same
10866 when x is NaN, since x * 0 is also NaN. Nor are they the
10867 same in modes with signed zeros, since multiplying a
10868 negative value by 0 gives -0, not +0. */
10869 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10870 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10871 && real_zerop (arg1))
10872 return omit_one_operand_loc (loc, type, arg1, arg0);
10873 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10874 Likewise for complex arithmetic with signed zeros. */
10875 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10876 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10877 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10878 && real_onep (arg1))
10879 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10881 /* Transform x * -1.0 into -x. */
10882 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10883 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10884 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10885 && real_minus_onep (arg1))
10886 return fold_convert_loc (loc, type, negate_expr (arg0));
10888 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10889 the result for floating point types due to rounding so it is applied
10890 only if -fassociative-math was specify. */
10891 if (flag_associative_math
10892 && TREE_CODE (arg0) == RDIV_EXPR
10893 && TREE_CODE (arg1) == REAL_CST
10894 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10896 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10897 arg1);
10898 if (tem)
10899 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10900 TREE_OPERAND (arg0, 1));
10903 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10904 if (operand_equal_p (arg0, arg1, 0))
10906 tree tem = fold_strip_sign_ops (arg0);
10907 if (tem != NULL_TREE)
10909 tem = fold_convert_loc (loc, type, tem);
10910 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10914 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10915 This is not the same for NaNs or if signed zeros are
10916 involved. */
10917 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10918 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10919 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10920 && TREE_CODE (arg1) == COMPLEX_CST
10921 && real_zerop (TREE_REALPART (arg1)))
10923 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10924 if (real_onep (TREE_IMAGPART (arg1)))
10925 return
10926 fold_build2_loc (loc, COMPLEX_EXPR, type,
10927 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10928 rtype, arg0)),
10929 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10930 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10931 return
10932 fold_build2_loc (loc, COMPLEX_EXPR, type,
10933 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10934 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10935 rtype, arg0)));
10938 /* Optimize z * conj(z) for floating point complex numbers.
10939 Guarded by flag_unsafe_math_optimizations as non-finite
10940 imaginary components don't produce scalar results. */
10941 if (flag_unsafe_math_optimizations
10942 && TREE_CODE (arg0) == CONJ_EXPR
10943 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10944 return fold_mult_zconjz (loc, type, arg1);
10945 if (flag_unsafe_math_optimizations
10946 && TREE_CODE (arg1) == CONJ_EXPR
10947 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10948 return fold_mult_zconjz (loc, type, arg0);
10950 if (flag_unsafe_math_optimizations)
10952 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10953 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10955 /* Optimizations of root(...)*root(...). */
10956 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10958 tree rootfn, arg;
10959 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10960 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10962 /* Optimize sqrt(x)*sqrt(x) as x. */
10963 if (BUILTIN_SQRT_P (fcode0)
10964 && operand_equal_p (arg00, arg10, 0)
10965 && ! HONOR_SNANS (TYPE_MODE (type)))
10966 return arg00;
10968 /* Optimize root(x)*root(y) as root(x*y). */
10969 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10970 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10971 return build_call_expr_loc (loc, rootfn, 1, arg);
10974 /* Optimize expN(x)*expN(y) as expN(x+y). */
10975 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10977 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10978 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10979 CALL_EXPR_ARG (arg0, 0),
10980 CALL_EXPR_ARG (arg1, 0));
10981 return build_call_expr_loc (loc, expfn, 1, arg);
10984 /* Optimizations of pow(...)*pow(...). */
10985 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10986 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10987 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10989 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10990 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10991 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10992 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10994 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10995 if (operand_equal_p (arg01, arg11, 0))
10997 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10998 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10999 arg00, arg10);
11000 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11003 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11004 if (operand_equal_p (arg00, arg10, 0))
11006 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11007 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11008 arg01, arg11);
11009 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11013 /* Optimize tan(x)*cos(x) as sin(x). */
11014 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11015 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11016 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11017 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11018 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11019 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11020 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11021 CALL_EXPR_ARG (arg1, 0), 0))
11023 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11025 if (sinfn != NULL_TREE)
11026 return build_call_expr_loc (loc, sinfn, 1,
11027 CALL_EXPR_ARG (arg0, 0));
11030 /* Optimize x*pow(x,c) as pow(x,c+1). */
11031 if (fcode1 == BUILT_IN_POW
11032 || fcode1 == BUILT_IN_POWF
11033 || fcode1 == BUILT_IN_POWL)
11035 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11036 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11037 if (TREE_CODE (arg11) == REAL_CST
11038 && !TREE_OVERFLOW (arg11)
11039 && operand_equal_p (arg0, arg10, 0))
11041 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11042 REAL_VALUE_TYPE c;
11043 tree arg;
11045 c = TREE_REAL_CST (arg11);
11046 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11047 arg = build_real (type, c);
11048 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11052 /* Optimize pow(x,c)*x as pow(x,c+1). */
11053 if (fcode0 == BUILT_IN_POW
11054 || fcode0 == BUILT_IN_POWF
11055 || fcode0 == BUILT_IN_POWL)
11057 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11058 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11059 if (TREE_CODE (arg01) == REAL_CST
11060 && !TREE_OVERFLOW (arg01)
11061 && operand_equal_p (arg1, arg00, 0))
11063 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11064 REAL_VALUE_TYPE c;
11065 tree arg;
11067 c = TREE_REAL_CST (arg01);
11068 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11069 arg = build_real (type, c);
11070 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11074 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11075 if (!in_gimple_form
11076 && optimize
11077 && operand_equal_p (arg0, arg1, 0))
11079 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11081 if (powfn)
11083 tree arg = build_real (type, dconst2);
11084 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11089 goto associate;
11091 case BIT_IOR_EXPR:
11092 bit_ior:
11093 /* ~X | X is -1. */
11094 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11095 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11097 t1 = build_zero_cst (type);
11098 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11099 return omit_one_operand_loc (loc, type, t1, arg1);
11102 /* X | ~X is -1. */
11103 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11104 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11106 t1 = build_zero_cst (type);
11107 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11108 return omit_one_operand_loc (loc, type, t1, arg0);
11111 /* Canonicalize (X & C1) | C2. */
11112 if (TREE_CODE (arg0) == BIT_AND_EXPR
11113 && TREE_CODE (arg1) == INTEGER_CST
11114 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11116 int width = TYPE_PRECISION (type), w;
11117 wide_int c1 = TREE_OPERAND (arg0, 1);
11118 wide_int c2 = arg1;
11120 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11121 if ((c1 & c2) == c1)
11122 return omit_one_operand_loc (loc, type, arg1,
11123 TREE_OPERAND (arg0, 0));
11125 wide_int msk = wi::mask (width, false,
11126 TYPE_PRECISION (TREE_TYPE (arg1)));
11128 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11129 if (msk.and_not (c1 | c2) == 0)
11130 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11131 TREE_OPERAND (arg0, 0), arg1);
11133 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11134 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11135 mode which allows further optimizations. */
11136 c1 &= msk;
11137 c2 &= msk;
11138 wide_int c3 = c1.and_not (c2);
11139 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11141 wide_int mask = wi::mask (w, false,
11142 TYPE_PRECISION (type));
11143 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11145 c3 = mask;
11146 break;
11150 if (c3 != c1)
11151 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11152 fold_build2_loc (loc, BIT_AND_EXPR, type,
11153 TREE_OPERAND (arg0, 0),
11154 wide_int_to_tree (type,
11155 c3)),
11156 arg1);
11159 /* (X & Y) | Y is (X, Y). */
11160 if (TREE_CODE (arg0) == BIT_AND_EXPR
11161 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11162 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11163 /* (X & Y) | X is (Y, X). */
11164 if (TREE_CODE (arg0) == BIT_AND_EXPR
11165 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11166 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11167 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11168 /* X | (X & Y) is (Y, X). */
11169 if (TREE_CODE (arg1) == BIT_AND_EXPR
11170 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11171 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11172 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11173 /* X | (Y & X) is (Y, X). */
11174 if (TREE_CODE (arg1) == BIT_AND_EXPR
11175 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11176 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11177 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11179 /* (X & ~Y) | (~X & Y) is X ^ Y */
11180 if (TREE_CODE (arg0) == BIT_AND_EXPR
11181 && TREE_CODE (arg1) == BIT_AND_EXPR)
11183 tree a0, a1, l0, l1, n0, n1;
11185 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11186 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11188 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11189 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11191 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11192 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11194 if ((operand_equal_p (n0, a0, 0)
11195 && operand_equal_p (n1, a1, 0))
11196 || (operand_equal_p (n0, a1, 0)
11197 && operand_equal_p (n1, a0, 0)))
11198 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11201 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11202 if (t1 != NULL_TREE)
11203 return t1;
11205 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11207 This results in more efficient code for machines without a NAND
11208 instruction. Combine will canonicalize to the first form
11209 which will allow use of NAND instructions provided by the
11210 backend if they exist. */
11211 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11212 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11214 return
11215 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11216 build2 (BIT_AND_EXPR, type,
11217 fold_convert_loc (loc, type,
11218 TREE_OPERAND (arg0, 0)),
11219 fold_convert_loc (loc, type,
11220 TREE_OPERAND (arg1, 0))));
11223 /* See if this can be simplified into a rotate first. If that
11224 is unsuccessful continue in the association code. */
11225 goto bit_rotate;
11227 case BIT_XOR_EXPR:
11228 /* ~X ^ X is -1. */
11229 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11230 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11232 t1 = build_zero_cst (type);
11233 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11234 return omit_one_operand_loc (loc, type, t1, arg1);
11237 /* X ^ ~X is -1. */
11238 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11239 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11241 t1 = build_zero_cst (type);
11242 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11243 return omit_one_operand_loc (loc, type, t1, arg0);
11246 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11247 with a constant, and the two constants have no bits in common,
11248 we should treat this as a BIT_IOR_EXPR since this may produce more
11249 simplifications. */
11250 if (TREE_CODE (arg0) == BIT_AND_EXPR
11251 && TREE_CODE (arg1) == BIT_AND_EXPR
11252 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11253 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11254 && wi::bit_and (TREE_OPERAND (arg0, 1),
11255 TREE_OPERAND (arg1, 1)) == 0)
11257 code = BIT_IOR_EXPR;
11258 goto bit_ior;
11261 /* (X | Y) ^ X -> Y & ~ X*/
11262 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11263 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11265 tree t2 = TREE_OPERAND (arg0, 1);
11266 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11267 arg1);
11268 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11269 fold_convert_loc (loc, type, t2),
11270 fold_convert_loc (loc, type, t1));
11271 return t1;
11274 /* (Y | X) ^ X -> Y & ~ X*/
11275 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11276 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11278 tree t2 = TREE_OPERAND (arg0, 0);
11279 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11280 arg1);
11281 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11282 fold_convert_loc (loc, type, t2),
11283 fold_convert_loc (loc, type, t1));
11284 return t1;
11287 /* X ^ (X | Y) -> Y & ~ X*/
11288 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11289 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11291 tree t2 = TREE_OPERAND (arg1, 1);
11292 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11293 arg0);
11294 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11295 fold_convert_loc (loc, type, t2),
11296 fold_convert_loc (loc, type, t1));
11297 return t1;
11300 /* X ^ (Y | X) -> Y & ~ X*/
11301 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11302 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11304 tree t2 = TREE_OPERAND (arg1, 0);
11305 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11306 arg0);
11307 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11308 fold_convert_loc (loc, type, t2),
11309 fold_convert_loc (loc, type, t1));
11310 return t1;
11313 /* Convert ~X ^ ~Y to X ^ Y. */
11314 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11315 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11316 return fold_build2_loc (loc, code, type,
11317 fold_convert_loc (loc, type,
11318 TREE_OPERAND (arg0, 0)),
11319 fold_convert_loc (loc, type,
11320 TREE_OPERAND (arg1, 0)));
11322 /* Convert ~X ^ C to X ^ ~C. */
11323 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11324 && TREE_CODE (arg1) == INTEGER_CST)
11325 return fold_build2_loc (loc, code, type,
11326 fold_convert_loc (loc, type,
11327 TREE_OPERAND (arg0, 0)),
11328 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11330 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11331 if (TREE_CODE (arg0) == BIT_AND_EXPR
11332 && INTEGRAL_TYPE_P (type)
11333 && integer_onep (TREE_OPERAND (arg0, 1))
11334 && integer_onep (arg1))
11335 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11336 build_zero_cst (TREE_TYPE (arg0)));
11338 /* Fold (X & Y) ^ Y as ~X & Y. */
11339 if (TREE_CODE (arg0) == BIT_AND_EXPR
11340 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11342 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11343 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11344 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11345 fold_convert_loc (loc, type, arg1));
11347 /* Fold (X & Y) ^ X as ~Y & X. */
11348 if (TREE_CODE (arg0) == BIT_AND_EXPR
11349 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11350 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11352 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11353 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11354 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11355 fold_convert_loc (loc, type, arg1));
11357 /* Fold X ^ (X & Y) as X & ~Y. */
11358 if (TREE_CODE (arg1) == BIT_AND_EXPR
11359 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11361 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11362 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11363 fold_convert_loc (loc, type, arg0),
11364 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11366 /* Fold X ^ (Y & X) as ~Y & X. */
11367 if (TREE_CODE (arg1) == BIT_AND_EXPR
11368 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11369 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11371 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11372 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11373 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11374 fold_convert_loc (loc, type, arg0));
11377 /* See if this can be simplified into a rotate first. If that
11378 is unsuccessful continue in the association code. */
11379 goto bit_rotate;
11381 case BIT_AND_EXPR:
11382 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11383 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11384 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11385 || (TREE_CODE (arg0) == EQ_EXPR
11386 && integer_zerop (TREE_OPERAND (arg0, 1))))
11387 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11388 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11390 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11391 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11392 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11393 || (TREE_CODE (arg1) == EQ_EXPR
11394 && integer_zerop (TREE_OPERAND (arg1, 1))))
11395 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11396 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11398 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11399 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11400 && TREE_CODE (arg1) == INTEGER_CST
11401 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11403 tree tmp1 = fold_convert_loc (loc, type, arg1);
11404 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11405 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11406 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11407 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11408 return
11409 fold_convert_loc (loc, type,
11410 fold_build2_loc (loc, BIT_IOR_EXPR,
11411 type, tmp2, tmp3));
11414 /* (X | Y) & Y is (X, Y). */
11415 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11416 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11417 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11418 /* (X | Y) & X is (Y, X). */
11419 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11420 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11421 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11422 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11423 /* X & (X | Y) is (Y, X). */
11424 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11425 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11426 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11427 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11428 /* X & (Y | X) is (Y, X). */
11429 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11430 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11431 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11432 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11434 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11435 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11436 && INTEGRAL_TYPE_P (type)
11437 && integer_onep (TREE_OPERAND (arg0, 1))
11438 && integer_onep (arg1))
11440 tree tem2;
11441 tem = TREE_OPERAND (arg0, 0);
11442 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11443 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11444 tem, tem2);
11445 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11446 build_zero_cst (TREE_TYPE (tem)));
11448 /* Fold ~X & 1 as (X & 1) == 0. */
11449 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11450 && INTEGRAL_TYPE_P (type)
11451 && integer_onep (arg1))
11453 tree tem2;
11454 tem = TREE_OPERAND (arg0, 0);
11455 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11456 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11457 tem, tem2);
11458 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11459 build_zero_cst (TREE_TYPE (tem)));
11461 /* Fold !X & 1 as X == 0. */
11462 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11463 && integer_onep (arg1))
11465 tem = TREE_OPERAND (arg0, 0);
11466 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11467 build_zero_cst (TREE_TYPE (tem)));
11470 /* Fold (X ^ Y) & Y as ~X & Y. */
11471 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11472 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11474 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11475 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11476 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11477 fold_convert_loc (loc, type, arg1));
11479 /* Fold (X ^ Y) & X as ~Y & X. */
11480 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11481 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11482 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11484 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11485 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11486 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11487 fold_convert_loc (loc, type, arg1));
11489 /* Fold X & (X ^ Y) as X & ~Y. */
11490 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11491 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11493 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11494 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11495 fold_convert_loc (loc, type, arg0),
11496 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11498 /* Fold X & (Y ^ X) as ~Y & X. */
11499 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11500 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11501 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11503 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11504 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11505 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11506 fold_convert_loc (loc, type, arg0));
11509 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11510 multiple of 1 << CST. */
11511 if (TREE_CODE (arg1) == INTEGER_CST)
11513 wide_int cst1 = arg1;
11514 wide_int ncst1 = -cst1;
11515 if ((cst1 & ncst1) == ncst1
11516 && multiple_of_p (type, arg0,
11517 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11518 return fold_convert_loc (loc, type, arg0);
11521 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11522 bits from CST2. */
11523 if (TREE_CODE (arg1) == INTEGER_CST
11524 && TREE_CODE (arg0) == MULT_EXPR
11525 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11527 wide_int warg1 = arg1;
11528 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11530 if (masked == 0)
11531 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11532 arg0, arg1);
11533 else if (masked != warg1)
11535 /* Avoid the transform if arg1 is a mask of some
11536 mode which allows further optimizations. */
11537 int pop = wi::popcount (warg1);
11538 if (!(pop >= BITS_PER_UNIT
11539 && exact_log2 (pop) != -1
11540 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11541 return fold_build2_loc (loc, code, type, op0,
11542 wide_int_to_tree (type, masked));
11546 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11547 ((A & N) + B) & M -> (A + B) & M
11548 Similarly if (N & M) == 0,
11549 ((A | N) + B) & M -> (A + B) & M
11550 and for - instead of + (or unary - instead of +)
11551 and/or ^ instead of |.
11552 If B is constant and (B & M) == 0, fold into A & M. */
11553 if (TREE_CODE (arg1) == INTEGER_CST)
11555 wide_int cst1 = arg1;
11556 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11557 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11558 && (TREE_CODE (arg0) == PLUS_EXPR
11559 || TREE_CODE (arg0) == MINUS_EXPR
11560 || TREE_CODE (arg0) == NEGATE_EXPR)
11561 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11562 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11564 tree pmop[2];
11565 int which = 0;
11566 wide_int cst0;
11568 /* Now we know that arg0 is (C + D) or (C - D) or
11569 -C and arg1 (M) is == (1LL << cst) - 1.
11570 Store C into PMOP[0] and D into PMOP[1]. */
11571 pmop[0] = TREE_OPERAND (arg0, 0);
11572 pmop[1] = NULL;
11573 if (TREE_CODE (arg0) != NEGATE_EXPR)
11575 pmop[1] = TREE_OPERAND (arg0, 1);
11576 which = 1;
11579 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11580 which = -1;
11582 for (; which >= 0; which--)
11583 switch (TREE_CODE (pmop[which]))
11585 case BIT_AND_EXPR:
11586 case BIT_IOR_EXPR:
11587 case BIT_XOR_EXPR:
11588 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11589 != INTEGER_CST)
11590 break;
11591 cst0 = TREE_OPERAND (pmop[which], 1);
11592 cst0 &= cst1;
11593 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11595 if (cst0 != cst1)
11596 break;
11598 else if (cst0 != 0)
11599 break;
11600 /* If C or D is of the form (A & N) where
11601 (N & M) == M, or of the form (A | N) or
11602 (A ^ N) where (N & M) == 0, replace it with A. */
11603 pmop[which] = TREE_OPERAND (pmop[which], 0);
11604 break;
11605 case INTEGER_CST:
11606 /* If C or D is a N where (N & M) == 0, it can be
11607 omitted (assumed 0). */
11608 if ((TREE_CODE (arg0) == PLUS_EXPR
11609 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11610 && (cst1 & pmop[which]) == 0)
11611 pmop[which] = NULL;
11612 break;
11613 default:
11614 break;
11617 /* Only build anything new if we optimized one or both arguments
11618 above. */
11619 if (pmop[0] != TREE_OPERAND (arg0, 0)
11620 || (TREE_CODE (arg0) != NEGATE_EXPR
11621 && pmop[1] != TREE_OPERAND (arg0, 1)))
11623 tree utype = TREE_TYPE (arg0);
11624 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11626 /* Perform the operations in a type that has defined
11627 overflow behavior. */
11628 utype = unsigned_type_for (TREE_TYPE (arg0));
11629 if (pmop[0] != NULL)
11630 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11631 if (pmop[1] != NULL)
11632 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11635 if (TREE_CODE (arg0) == NEGATE_EXPR)
11636 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11637 else if (TREE_CODE (arg0) == PLUS_EXPR)
11639 if (pmop[0] != NULL && pmop[1] != NULL)
11640 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11641 pmop[0], pmop[1]);
11642 else if (pmop[0] != NULL)
11643 tem = pmop[0];
11644 else if (pmop[1] != NULL)
11645 tem = pmop[1];
11646 else
11647 return build_int_cst (type, 0);
11649 else if (pmop[0] == NULL)
11650 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11651 else
11652 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11653 pmop[0], pmop[1]);
11654 /* TEM is now the new binary +, - or unary - replacement. */
11655 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11656 fold_convert_loc (loc, utype, arg1));
11657 return fold_convert_loc (loc, type, tem);
11662 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11663 if (t1 != NULL_TREE)
11664 return t1;
11665 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11666 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11667 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11669 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11671 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11672 if (mask == -1)
11673 return
11674 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11677 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11679 This results in more efficient code for machines without a NOR
11680 instruction. Combine will canonicalize to the first form
11681 which will allow use of NOR instructions provided by the
11682 backend if they exist. */
11683 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11684 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11686 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11687 build2 (BIT_IOR_EXPR, type,
11688 fold_convert_loc (loc, type,
11689 TREE_OPERAND (arg0, 0)),
11690 fold_convert_loc (loc, type,
11691 TREE_OPERAND (arg1, 0))));
11694 /* If arg0 is derived from the address of an object or function, we may
11695 be able to fold this expression using the object or function's
11696 alignment. */
11697 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11699 unsigned HOST_WIDE_INT modulus, residue;
11700 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11702 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11703 integer_onep (arg1));
11705 /* This works because modulus is a power of 2. If this weren't the
11706 case, we'd have to replace it by its greatest power-of-2
11707 divisor: modulus & -modulus. */
11708 if (low < modulus)
11709 return build_int_cst (type, residue & low);
11712 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11713 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11714 if the new mask might be further optimized. */
11715 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11716 || TREE_CODE (arg0) == RSHIFT_EXPR)
11717 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11718 && TREE_CODE (arg1) == INTEGER_CST
11719 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11720 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11721 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11722 < TYPE_PRECISION (TREE_TYPE (arg0))))
11724 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11725 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11726 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11727 tree shift_type = TREE_TYPE (arg0);
11729 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11730 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11731 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11732 && TYPE_PRECISION (TREE_TYPE (arg0))
11733 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11735 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11736 tree arg00 = TREE_OPERAND (arg0, 0);
11737 /* See if more bits can be proven as zero because of
11738 zero extension. */
11739 if (TREE_CODE (arg00) == NOP_EXPR
11740 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11742 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11743 if (TYPE_PRECISION (inner_type)
11744 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11745 && TYPE_PRECISION (inner_type) < prec)
11747 prec = TYPE_PRECISION (inner_type);
11748 /* See if we can shorten the right shift. */
11749 if (shiftc < prec)
11750 shift_type = inner_type;
11751 /* Otherwise X >> C1 is all zeros, so we'll optimize
11752 it into (X, 0) later on by making sure zerobits
11753 is all ones. */
11756 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11757 if (shiftc < prec)
11759 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11760 zerobits <<= prec - shiftc;
11762 /* For arithmetic shift if sign bit could be set, zerobits
11763 can contain actually sign bits, so no transformation is
11764 possible, unless MASK masks them all away. In that
11765 case the shift needs to be converted into logical shift. */
11766 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11767 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11769 if ((mask & zerobits) == 0)
11770 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11771 else
11772 zerobits = 0;
11776 /* ((X << 16) & 0xff00) is (X, 0). */
11777 if ((mask & zerobits) == mask)
11778 return omit_one_operand_loc (loc, type,
11779 build_int_cst (type, 0), arg0);
11781 newmask = mask | zerobits;
11782 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11784 /* Only do the transformation if NEWMASK is some integer
11785 mode's mask. */
11786 for (prec = BITS_PER_UNIT;
11787 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11788 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11789 break;
11790 if (prec < HOST_BITS_PER_WIDE_INT
11791 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11793 tree newmaskt;
11795 if (shift_type != TREE_TYPE (arg0))
11797 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11798 fold_convert_loc (loc, shift_type,
11799 TREE_OPERAND (arg0, 0)),
11800 TREE_OPERAND (arg0, 1));
11801 tem = fold_convert_loc (loc, type, tem);
11803 else
11804 tem = op0;
11805 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11806 if (!tree_int_cst_equal (newmaskt, arg1))
11807 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11812 goto associate;
11814 case RDIV_EXPR:
11815 /* Don't touch a floating-point divide by zero unless the mode
11816 of the constant can represent infinity. */
11817 if (TREE_CODE (arg1) == REAL_CST
11818 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11819 && real_zerop (arg1))
11820 return NULL_TREE;
11822 /* Optimize A / A to 1.0 if we don't care about
11823 NaNs or Infinities. Skip the transformation
11824 for non-real operands. */
11825 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11826 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11827 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11828 && operand_equal_p (arg0, arg1, 0))
11830 tree r = build_real (TREE_TYPE (arg0), dconst1);
11832 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11835 /* The complex version of the above A / A optimization. */
11836 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11837 && operand_equal_p (arg0, arg1, 0))
11839 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11840 if (! HONOR_NANS (TYPE_MODE (elem_type))
11841 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11843 tree r = build_real (elem_type, dconst1);
11844 /* omit_two_operands will call fold_convert for us. */
11845 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11849 /* (-A) / (-B) -> A / B */
11850 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11851 return fold_build2_loc (loc, RDIV_EXPR, type,
11852 TREE_OPERAND (arg0, 0),
11853 negate_expr (arg1));
11854 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11855 return fold_build2_loc (loc, RDIV_EXPR, type,
11856 negate_expr (arg0),
11857 TREE_OPERAND (arg1, 0));
11859 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11860 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11861 && real_onep (arg1))
11862 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11864 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11865 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11866 && real_minus_onep (arg1))
11867 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11868 negate_expr (arg0)));
11870 /* If ARG1 is a constant, we can convert this to a multiply by the
11871 reciprocal. This does not have the same rounding properties,
11872 so only do this if -freciprocal-math. We can actually
11873 always safely do it if ARG1 is a power of two, but it's hard to
11874 tell if it is or not in a portable manner. */
11875 if (optimize
11876 && (TREE_CODE (arg1) == REAL_CST
11877 || (TREE_CODE (arg1) == COMPLEX_CST
11878 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11879 || (TREE_CODE (arg1) == VECTOR_CST
11880 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11882 if (flag_reciprocal_math
11883 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11884 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11885 /* Find the reciprocal if optimizing and the result is exact.
11886 TODO: Complex reciprocal not implemented. */
11887 if (TREE_CODE (arg1) != COMPLEX_CST)
11889 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11891 if (inverse)
11892 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11895 /* Convert A/B/C to A/(B*C). */
11896 if (flag_reciprocal_math
11897 && TREE_CODE (arg0) == RDIV_EXPR)
11898 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11899 fold_build2_loc (loc, MULT_EXPR, type,
11900 TREE_OPERAND (arg0, 1), arg1));
11902 /* Convert A/(B/C) to (A/B)*C. */
11903 if (flag_reciprocal_math
11904 && TREE_CODE (arg1) == RDIV_EXPR)
11905 return fold_build2_loc (loc, MULT_EXPR, type,
11906 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11907 TREE_OPERAND (arg1, 0)),
11908 TREE_OPERAND (arg1, 1));
11910 /* Convert C1/(X*C2) into (C1/C2)/X. */
11911 if (flag_reciprocal_math
11912 && TREE_CODE (arg1) == MULT_EXPR
11913 && TREE_CODE (arg0) == REAL_CST
11914 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11916 tree tem = const_binop (RDIV_EXPR, arg0,
11917 TREE_OPERAND (arg1, 1));
11918 if (tem)
11919 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11920 TREE_OPERAND (arg1, 0));
11923 if (flag_unsafe_math_optimizations)
11925 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11926 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11928 /* Optimize sin(x)/cos(x) as tan(x). */
11929 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11930 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11931 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11932 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11933 CALL_EXPR_ARG (arg1, 0), 0))
11935 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11937 if (tanfn != NULL_TREE)
11938 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11941 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11942 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11943 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11944 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11945 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11946 CALL_EXPR_ARG (arg1, 0), 0))
11948 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11950 if (tanfn != NULL_TREE)
11952 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11953 CALL_EXPR_ARG (arg0, 0));
11954 return fold_build2_loc (loc, RDIV_EXPR, type,
11955 build_real (type, dconst1), tmp);
11959 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11960 NaNs or Infinities. */
11961 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11962 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11963 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11965 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11966 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11968 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11969 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11970 && operand_equal_p (arg00, arg01, 0))
11972 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11974 if (cosfn != NULL_TREE)
11975 return build_call_expr_loc (loc, cosfn, 1, arg00);
11979 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11980 NaNs or Infinities. */
11981 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11982 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11983 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11985 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11986 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11988 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11989 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11990 && operand_equal_p (arg00, arg01, 0))
11992 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11994 if (cosfn != NULL_TREE)
11996 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11997 return fold_build2_loc (loc, RDIV_EXPR, type,
11998 build_real (type, dconst1),
11999 tmp);
12004 /* Optimize pow(x,c)/x as pow(x,c-1). */
12005 if (fcode0 == BUILT_IN_POW
12006 || fcode0 == BUILT_IN_POWF
12007 || fcode0 == BUILT_IN_POWL)
12009 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12010 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12011 if (TREE_CODE (arg01) == REAL_CST
12012 && !TREE_OVERFLOW (arg01)
12013 && operand_equal_p (arg1, arg00, 0))
12015 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12016 REAL_VALUE_TYPE c;
12017 tree arg;
12019 c = TREE_REAL_CST (arg01);
12020 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12021 arg = build_real (type, c);
12022 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12026 /* Optimize a/root(b/c) into a*root(c/b). */
12027 if (BUILTIN_ROOT_P (fcode1))
12029 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12031 if (TREE_CODE (rootarg) == RDIV_EXPR)
12033 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12034 tree b = TREE_OPERAND (rootarg, 0);
12035 tree c = TREE_OPERAND (rootarg, 1);
12037 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12039 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12040 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12044 /* Optimize x/expN(y) into x*expN(-y). */
12045 if (BUILTIN_EXPONENT_P (fcode1))
12047 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12048 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12049 arg1 = build_call_expr_loc (loc,
12050 expfn, 1,
12051 fold_convert_loc (loc, type, arg));
12052 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12055 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12056 if (fcode1 == BUILT_IN_POW
12057 || fcode1 == BUILT_IN_POWF
12058 || fcode1 == BUILT_IN_POWL)
12060 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12061 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12062 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12063 tree neg11 = fold_convert_loc (loc, type,
12064 negate_expr (arg11));
12065 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12066 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12069 return NULL_TREE;
12071 case TRUNC_DIV_EXPR:
12072 /* Optimize (X & (-A)) / A where A is a power of 2,
12073 to X >> log2(A) */
12074 if (TREE_CODE (arg0) == BIT_AND_EXPR
12075 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12076 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12078 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12079 arg1, TREE_OPERAND (arg0, 1));
12080 if (sum && integer_zerop (sum)) {
12081 tree pow2 = build_int_cst (integer_type_node,
12082 wi::exact_log2 (arg1));
12083 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12084 TREE_OPERAND (arg0, 0), pow2);
12088 /* Fall through */
12090 case FLOOR_DIV_EXPR:
12091 /* Simplify A / (B << N) where A and B are positive and B is
12092 a power of 2, to A >> (N + log2(B)). */
12093 strict_overflow_p = false;
12094 if (TREE_CODE (arg1) == LSHIFT_EXPR
12095 && (TYPE_UNSIGNED (type)
12096 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12098 tree sval = TREE_OPERAND (arg1, 0);
12099 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12101 tree sh_cnt = TREE_OPERAND (arg1, 1);
12102 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12103 wi::exact_log2 (sval));
12105 if (strict_overflow_p)
12106 fold_overflow_warning (("assuming signed overflow does not "
12107 "occur when simplifying A / (B << N)"),
12108 WARN_STRICT_OVERFLOW_MISC);
12110 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12111 sh_cnt, pow2);
12112 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12113 fold_convert_loc (loc, type, arg0), sh_cnt);
12117 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12118 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12119 if (INTEGRAL_TYPE_P (type)
12120 && TYPE_UNSIGNED (type)
12121 && code == FLOOR_DIV_EXPR)
12122 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12124 /* Fall through */
12126 case ROUND_DIV_EXPR:
12127 case CEIL_DIV_EXPR:
12128 case EXACT_DIV_EXPR:
12129 if (integer_zerop (arg1))
12130 return NULL_TREE;
12131 /* X / -1 is -X. */
12132 if (!TYPE_UNSIGNED (type)
12133 && TREE_CODE (arg1) == INTEGER_CST
12134 && wi::eq_p (arg1, -1))
12135 return fold_convert_loc (loc, type, negate_expr (arg0));
12137 /* Convert -A / -B to A / B when the type is signed and overflow is
12138 undefined. */
12139 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12140 && TREE_CODE (arg0) == NEGATE_EXPR
12141 && negate_expr_p (arg1))
12143 if (INTEGRAL_TYPE_P (type))
12144 fold_overflow_warning (("assuming signed overflow does not occur "
12145 "when distributing negation across "
12146 "division"),
12147 WARN_STRICT_OVERFLOW_MISC);
12148 return fold_build2_loc (loc, code, type,
12149 fold_convert_loc (loc, type,
12150 TREE_OPERAND (arg0, 0)),
12151 fold_convert_loc (loc, type,
12152 negate_expr (arg1)));
12154 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12155 && TREE_CODE (arg1) == NEGATE_EXPR
12156 && negate_expr_p (arg0))
12158 if (INTEGRAL_TYPE_P (type))
12159 fold_overflow_warning (("assuming signed overflow does not occur "
12160 "when distributing negation across "
12161 "division"),
12162 WARN_STRICT_OVERFLOW_MISC);
12163 return fold_build2_loc (loc, code, type,
12164 fold_convert_loc (loc, type,
12165 negate_expr (arg0)),
12166 fold_convert_loc (loc, type,
12167 TREE_OPERAND (arg1, 0)));
12170 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12171 operation, EXACT_DIV_EXPR.
12173 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12174 At one time others generated faster code, it's not clear if they do
12175 after the last round to changes to the DIV code in expmed.c. */
12176 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12177 && multiple_of_p (type, arg0, arg1))
12178 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12180 strict_overflow_p = false;
12181 if (TREE_CODE (arg1) == INTEGER_CST
12182 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12183 &strict_overflow_p)))
12185 if (strict_overflow_p)
12186 fold_overflow_warning (("assuming signed overflow does not occur "
12187 "when simplifying division"),
12188 WARN_STRICT_OVERFLOW_MISC);
12189 return fold_convert_loc (loc, type, tem);
12192 return NULL_TREE;
12194 case CEIL_MOD_EXPR:
12195 case FLOOR_MOD_EXPR:
12196 case ROUND_MOD_EXPR:
12197 case TRUNC_MOD_EXPR:
12198 /* X % -1 is zero. */
12199 if (!TYPE_UNSIGNED (type)
12200 && TREE_CODE (arg1) == INTEGER_CST
12201 && wi::eq_p (arg1, -1))
12202 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12204 /* X % -C is the same as X % C. */
12205 if (code == TRUNC_MOD_EXPR
12206 && TYPE_SIGN (type) == SIGNED
12207 && TREE_CODE (arg1) == INTEGER_CST
12208 && !TREE_OVERFLOW (arg1)
12209 && wi::neg_p (arg1)
12210 && !TYPE_OVERFLOW_TRAPS (type)
12211 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12212 && !sign_bit_p (arg1, arg1))
12213 return fold_build2_loc (loc, code, type,
12214 fold_convert_loc (loc, type, arg0),
12215 fold_convert_loc (loc, type,
12216 negate_expr (arg1)));
12218 /* X % -Y is the same as X % Y. */
12219 if (code == TRUNC_MOD_EXPR
12220 && !TYPE_UNSIGNED (type)
12221 && TREE_CODE (arg1) == NEGATE_EXPR
12222 && !TYPE_OVERFLOW_TRAPS (type))
12223 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12224 fold_convert_loc (loc, type,
12225 TREE_OPERAND (arg1, 0)));
12227 strict_overflow_p = false;
12228 if (TREE_CODE (arg1) == INTEGER_CST
12229 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12230 &strict_overflow_p)))
12232 if (strict_overflow_p)
12233 fold_overflow_warning (("assuming signed overflow does not occur "
12234 "when simplifying modulus"),
12235 WARN_STRICT_OVERFLOW_MISC);
12236 return fold_convert_loc (loc, type, tem);
12239 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12240 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12241 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12242 && (TYPE_UNSIGNED (type)
12243 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12245 tree c = arg1;
12246 /* Also optimize A % (C << N) where C is a power of 2,
12247 to A & ((C << N) - 1). */
12248 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12249 c = TREE_OPERAND (arg1, 0);
12251 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12253 tree mask
12254 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12255 build_int_cst (TREE_TYPE (arg1), 1));
12256 if (strict_overflow_p)
12257 fold_overflow_warning (("assuming signed overflow does not "
12258 "occur when simplifying "
12259 "X % (power of two)"),
12260 WARN_STRICT_OVERFLOW_MISC);
12261 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12262 fold_convert_loc (loc, type, arg0),
12263 fold_convert_loc (loc, type, mask));
12267 return NULL_TREE;
12269 case LROTATE_EXPR:
12270 case RROTATE_EXPR:
12271 if (integer_all_onesp (arg0))
12272 return omit_one_operand_loc (loc, type, arg0, arg1);
12273 goto shift;
12275 case RSHIFT_EXPR:
12276 /* Optimize -1 >> x for arithmetic right shifts. */
12277 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12278 && tree_expr_nonnegative_p (arg1))
12279 return omit_one_operand_loc (loc, type, arg0, arg1);
12280 /* ... fall through ... */
12282 case LSHIFT_EXPR:
12283 shift:
12284 if (integer_zerop (arg1))
12285 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12286 if (integer_zerop (arg0))
12287 return omit_one_operand_loc (loc, type, arg0, arg1);
12289 /* Prefer vector1 << scalar to vector1 << vector2
12290 if vector2 is uniform. */
12291 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12292 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12293 return fold_build2_loc (loc, code, type, op0, tem);
12295 /* Since negative shift count is not well-defined,
12296 don't try to compute it in the compiler. */
12297 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12298 return NULL_TREE;
12300 prec = element_precision (type);
12302 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12303 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12304 && tree_to_uhwi (arg1) < prec
12305 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12306 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12308 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12309 + tree_to_uhwi (arg1));
12311 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12312 being well defined. */
12313 if (low >= prec)
12315 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12316 low = low % prec;
12317 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12318 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12319 TREE_OPERAND (arg0, 0));
12320 else
12321 low = prec - 1;
12324 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12325 build_int_cst (TREE_TYPE (arg1), low));
12328 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12329 into x & ((unsigned)-1 >> c) for unsigned types. */
12330 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12331 || (TYPE_UNSIGNED (type)
12332 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12333 && tree_fits_uhwi_p (arg1)
12334 && tree_to_uhwi (arg1) < prec
12335 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12336 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12338 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12339 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12340 tree lshift;
12341 tree arg00;
12343 if (low0 == low1)
12345 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12347 lshift = build_minus_one_cst (type);
12348 lshift = const_binop (code, lshift, arg1);
12350 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12354 /* Rewrite an LROTATE_EXPR by a constant into an
12355 RROTATE_EXPR by a new constant. */
12356 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12358 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12359 tem = const_binop (MINUS_EXPR, tem, arg1);
12360 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12363 /* If we have a rotate of a bit operation with the rotate count and
12364 the second operand of the bit operation both constant,
12365 permute the two operations. */
12366 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12367 && (TREE_CODE (arg0) == BIT_AND_EXPR
12368 || TREE_CODE (arg0) == BIT_IOR_EXPR
12369 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12370 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12371 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12372 fold_build2_loc (loc, code, type,
12373 TREE_OPERAND (arg0, 0), arg1),
12374 fold_build2_loc (loc, code, type,
12375 TREE_OPERAND (arg0, 1), arg1));
12377 /* Two consecutive rotates adding up to the some integer
12378 multiple of the precision of the type can be ignored. */
12379 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12380 && TREE_CODE (arg0) == RROTATE_EXPR
12381 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12382 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12383 prec) == 0)
12384 return TREE_OPERAND (arg0, 0);
12386 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12387 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12388 if the latter can be further optimized. */
12389 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12390 && TREE_CODE (arg0) == BIT_AND_EXPR
12391 && TREE_CODE (arg1) == INTEGER_CST
12392 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12394 tree mask = fold_build2_loc (loc, code, type,
12395 fold_convert_loc (loc, type,
12396 TREE_OPERAND (arg0, 1)),
12397 arg1);
12398 tree shift = fold_build2_loc (loc, code, type,
12399 fold_convert_loc (loc, type,
12400 TREE_OPERAND (arg0, 0)),
12401 arg1);
12402 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12403 if (tem)
12404 return tem;
12407 return NULL_TREE;
12409 case MIN_EXPR:
12410 if (operand_equal_p (arg0, arg1, 0))
12411 return omit_one_operand_loc (loc, type, arg0, arg1);
12412 if (INTEGRAL_TYPE_P (type)
12413 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12414 return omit_one_operand_loc (loc, type, arg1, arg0);
12415 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12416 if (tem)
12417 return tem;
12418 goto associate;
12420 case MAX_EXPR:
12421 if (operand_equal_p (arg0, arg1, 0))
12422 return omit_one_operand_loc (loc, type, arg0, arg1);
12423 if (INTEGRAL_TYPE_P (type)
12424 && TYPE_MAX_VALUE (type)
12425 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12426 return omit_one_operand_loc (loc, type, arg1, arg0);
12427 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12428 if (tem)
12429 return tem;
12430 goto associate;
12432 case TRUTH_ANDIF_EXPR:
12433 /* Note that the operands of this must be ints
12434 and their values must be 0 or 1.
12435 ("true" is a fixed value perhaps depending on the language.) */
12436 /* If first arg is constant zero, return it. */
12437 if (integer_zerop (arg0))
12438 return fold_convert_loc (loc, type, arg0);
12439 case TRUTH_AND_EXPR:
12440 /* If either arg is constant true, drop it. */
12441 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12442 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12443 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12444 /* Preserve sequence points. */
12445 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12446 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12447 /* If second arg is constant zero, result is zero, but first arg
12448 must be evaluated. */
12449 if (integer_zerop (arg1))
12450 return omit_one_operand_loc (loc, type, arg1, arg0);
12451 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12452 case will be handled here. */
12453 if (integer_zerop (arg0))
12454 return omit_one_operand_loc (loc, type, arg0, arg1);
12456 /* !X && X is always false. */
12457 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12458 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12459 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12460 /* X && !X is always false. */
12461 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12462 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12463 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12465 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12466 means A >= Y && A != MAX, but in this case we know that
12467 A < X <= MAX. */
12469 if (!TREE_SIDE_EFFECTS (arg0)
12470 && !TREE_SIDE_EFFECTS (arg1))
12472 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12473 if (tem && !operand_equal_p (tem, arg0, 0))
12474 return fold_build2_loc (loc, code, type, tem, arg1);
12476 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12477 if (tem && !operand_equal_p (tem, arg1, 0))
12478 return fold_build2_loc (loc, code, type, arg0, tem);
12481 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12482 != NULL_TREE)
12483 return tem;
12485 return NULL_TREE;
12487 case TRUTH_ORIF_EXPR:
12488 /* Note that the operands of this must be ints
12489 and their values must be 0 or true.
12490 ("true" is a fixed value perhaps depending on the language.) */
12491 /* If first arg is constant true, return it. */
12492 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12493 return fold_convert_loc (loc, type, arg0);
12494 case TRUTH_OR_EXPR:
12495 /* If either arg is constant zero, drop it. */
12496 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12497 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12498 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12499 /* Preserve sequence points. */
12500 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12501 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12502 /* If second arg is constant true, result is true, but we must
12503 evaluate first arg. */
12504 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12505 return omit_one_operand_loc (loc, type, arg1, arg0);
12506 /* Likewise for first arg, but note this only occurs here for
12507 TRUTH_OR_EXPR. */
12508 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12509 return omit_one_operand_loc (loc, type, arg0, arg1);
12511 /* !X || X is always true. */
12512 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12513 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12514 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12515 /* X || !X is always true. */
12516 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12517 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12518 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12520 /* (X && !Y) || (!X && Y) is X ^ Y */
12521 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12522 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12524 tree a0, a1, l0, l1, n0, n1;
12526 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12527 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12529 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12530 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12532 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12533 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12535 if ((operand_equal_p (n0, a0, 0)
12536 && operand_equal_p (n1, a1, 0))
12537 || (operand_equal_p (n0, a1, 0)
12538 && operand_equal_p (n1, a0, 0)))
12539 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12542 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12543 != NULL_TREE)
12544 return tem;
12546 return NULL_TREE;
12548 case TRUTH_XOR_EXPR:
12549 /* If the second arg is constant zero, drop it. */
12550 if (integer_zerop (arg1))
12551 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12552 /* If the second arg is constant true, this is a logical inversion. */
12553 if (integer_onep (arg1))
12555 tem = invert_truthvalue_loc (loc, arg0);
12556 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12558 /* Identical arguments cancel to zero. */
12559 if (operand_equal_p (arg0, arg1, 0))
12560 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12562 /* !X ^ X is always true. */
12563 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12564 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12565 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12567 /* X ^ !X is always true. */
12568 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12569 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12570 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12572 return NULL_TREE;
12574 case EQ_EXPR:
12575 case NE_EXPR:
12576 STRIP_NOPS (arg0);
12577 STRIP_NOPS (arg1);
12579 tem = fold_comparison (loc, code, type, op0, op1);
12580 if (tem != NULL_TREE)
12581 return tem;
12583 /* bool_var != 0 becomes bool_var. */
12584 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12585 && code == NE_EXPR)
12586 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12588 /* bool_var == 1 becomes bool_var. */
12589 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12590 && code == EQ_EXPR)
12591 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12593 /* bool_var != 1 becomes !bool_var. */
12594 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12595 && code == NE_EXPR)
12596 return fold_convert_loc (loc, type,
12597 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12598 TREE_TYPE (arg0), arg0));
12600 /* bool_var == 0 becomes !bool_var. */
12601 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12602 && code == EQ_EXPR)
12603 return fold_convert_loc (loc, type,
12604 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12605 TREE_TYPE (arg0), arg0));
12607 /* !exp != 0 becomes !exp */
12608 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12609 && code == NE_EXPR)
12610 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12612 /* If this is an equality comparison of the address of two non-weak,
12613 unaliased symbols neither of which are extern (since we do not
12614 have access to attributes for externs), then we know the result. */
12615 if (TREE_CODE (arg0) == ADDR_EXPR
12616 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12617 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12618 && ! lookup_attribute ("alias",
12619 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12620 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12621 && TREE_CODE (arg1) == ADDR_EXPR
12622 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12623 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12624 && ! lookup_attribute ("alias",
12625 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12626 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12628 /* We know that we're looking at the address of two
12629 non-weak, unaliased, static _DECL nodes.
12631 It is both wasteful and incorrect to call operand_equal_p
12632 to compare the two ADDR_EXPR nodes. It is wasteful in that
12633 all we need to do is test pointer equality for the arguments
12634 to the two ADDR_EXPR nodes. It is incorrect to use
12635 operand_equal_p as that function is NOT equivalent to a
12636 C equality test. It can in fact return false for two
12637 objects which would test as equal using the C equality
12638 operator. */
12639 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12640 return constant_boolean_node (equal
12641 ? code == EQ_EXPR : code != EQ_EXPR,
12642 type);
12645 /* Similarly for a NEGATE_EXPR. */
12646 if (TREE_CODE (arg0) == NEGATE_EXPR
12647 && TREE_CODE (arg1) == INTEGER_CST
12648 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12649 arg1)))
12650 && TREE_CODE (tem) == INTEGER_CST
12651 && !TREE_OVERFLOW (tem))
12652 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12654 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12655 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12656 && TREE_CODE (arg1) == INTEGER_CST
12657 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12658 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12659 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12660 fold_convert_loc (loc,
12661 TREE_TYPE (arg0),
12662 arg1),
12663 TREE_OPERAND (arg0, 1)));
12665 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12666 if ((TREE_CODE (arg0) == PLUS_EXPR
12667 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12668 || TREE_CODE (arg0) == MINUS_EXPR)
12669 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12670 0)),
12671 arg1, 0)
12672 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12673 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12675 tree val = TREE_OPERAND (arg0, 1);
12676 return omit_two_operands_loc (loc, type,
12677 fold_build2_loc (loc, code, type,
12678 val,
12679 build_int_cst (TREE_TYPE (val),
12680 0)),
12681 TREE_OPERAND (arg0, 0), arg1);
12684 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12685 if (TREE_CODE (arg0) == MINUS_EXPR
12686 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12687 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12688 1)),
12689 arg1, 0)
12690 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12692 return omit_two_operands_loc (loc, type,
12693 code == NE_EXPR
12694 ? boolean_true_node : boolean_false_node,
12695 TREE_OPERAND (arg0, 1), arg1);
12698 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12699 if (TREE_CODE (arg0) == ABS_EXPR
12700 && (integer_zerop (arg1) || real_zerop (arg1)))
12701 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12703 /* If this is an EQ or NE comparison with zero and ARG0 is
12704 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12705 two operations, but the latter can be done in one less insn
12706 on machines that have only two-operand insns or on which a
12707 constant cannot be the first operand. */
12708 if (TREE_CODE (arg0) == BIT_AND_EXPR
12709 && integer_zerop (arg1))
12711 tree arg00 = TREE_OPERAND (arg0, 0);
12712 tree arg01 = TREE_OPERAND (arg0, 1);
12713 if (TREE_CODE (arg00) == LSHIFT_EXPR
12714 && integer_onep (TREE_OPERAND (arg00, 0)))
12716 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12717 arg01, TREE_OPERAND (arg00, 1));
12718 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12719 build_int_cst (TREE_TYPE (arg0), 1));
12720 return fold_build2_loc (loc, code, type,
12721 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12722 arg1);
12724 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12725 && integer_onep (TREE_OPERAND (arg01, 0)))
12727 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12728 arg00, TREE_OPERAND (arg01, 1));
12729 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12730 build_int_cst (TREE_TYPE (arg0), 1));
12731 return fold_build2_loc (loc, code, type,
12732 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12733 arg1);
12737 /* If this is an NE or EQ comparison of zero against the result of a
12738 signed MOD operation whose second operand is a power of 2, make
12739 the MOD operation unsigned since it is simpler and equivalent. */
12740 if (integer_zerop (arg1)
12741 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12742 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12743 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12744 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12745 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12746 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12748 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12749 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12750 fold_convert_loc (loc, newtype,
12751 TREE_OPERAND (arg0, 0)),
12752 fold_convert_loc (loc, newtype,
12753 TREE_OPERAND (arg0, 1)));
12755 return fold_build2_loc (loc, code, type, newmod,
12756 fold_convert_loc (loc, newtype, arg1));
12759 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12760 C1 is a valid shift constant, and C2 is a power of two, i.e.
12761 a single bit. */
12762 if (TREE_CODE (arg0) == BIT_AND_EXPR
12763 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12764 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12765 == INTEGER_CST
12766 && integer_pow2p (TREE_OPERAND (arg0, 1))
12767 && integer_zerop (arg1))
12769 tree itype = TREE_TYPE (arg0);
12770 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12771 prec = TYPE_PRECISION (itype);
12773 /* Check for a valid shift count. */
12774 if (wi::ltu_p (arg001, prec))
12776 tree arg01 = TREE_OPERAND (arg0, 1);
12777 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12778 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12779 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12780 can be rewritten as (X & (C2 << C1)) != 0. */
12781 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12783 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12784 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12785 return fold_build2_loc (loc, code, type, tem,
12786 fold_convert_loc (loc, itype, arg1));
12788 /* Otherwise, for signed (arithmetic) shifts,
12789 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12790 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12791 else if (!TYPE_UNSIGNED (itype))
12792 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12793 arg000, build_int_cst (itype, 0));
12794 /* Otherwise, of unsigned (logical) shifts,
12795 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12796 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12797 else
12798 return omit_one_operand_loc (loc, type,
12799 code == EQ_EXPR ? integer_one_node
12800 : integer_zero_node,
12801 arg000);
12805 /* If we have (A & C) == C where C is a power of 2, convert this into
12806 (A & C) != 0. Similarly for NE_EXPR. */
12807 if (TREE_CODE (arg0) == BIT_AND_EXPR
12808 && integer_pow2p (TREE_OPERAND (arg0, 1))
12809 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12810 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12811 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12812 integer_zero_node));
12814 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12815 bit, then fold the expression into A < 0 or A >= 0. */
12816 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12817 if (tem)
12818 return tem;
12820 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12821 Similarly for NE_EXPR. */
12822 if (TREE_CODE (arg0) == BIT_AND_EXPR
12823 && TREE_CODE (arg1) == INTEGER_CST
12824 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12826 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12827 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12828 TREE_OPERAND (arg0, 1));
12829 tree dandnotc
12830 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12831 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12832 notc);
12833 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12834 if (integer_nonzerop (dandnotc))
12835 return omit_one_operand_loc (loc, type, rslt, arg0);
12838 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12839 Similarly for NE_EXPR. */
12840 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12841 && TREE_CODE (arg1) == INTEGER_CST
12842 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12844 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12845 tree candnotd
12846 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12847 TREE_OPERAND (arg0, 1),
12848 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12849 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12850 if (integer_nonzerop (candnotd))
12851 return omit_one_operand_loc (loc, type, rslt, arg0);
12854 /* If this is a comparison of a field, we may be able to simplify it. */
12855 if ((TREE_CODE (arg0) == COMPONENT_REF
12856 || TREE_CODE (arg0) == BIT_FIELD_REF)
12857 /* Handle the constant case even without -O
12858 to make sure the warnings are given. */
12859 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12861 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12862 if (t1)
12863 return t1;
12866 /* Optimize comparisons of strlen vs zero to a compare of the
12867 first character of the string vs zero. To wit,
12868 strlen(ptr) == 0 => *ptr == 0
12869 strlen(ptr) != 0 => *ptr != 0
12870 Other cases should reduce to one of these two (or a constant)
12871 due to the return value of strlen being unsigned. */
12872 if (TREE_CODE (arg0) == CALL_EXPR
12873 && integer_zerop (arg1))
12875 tree fndecl = get_callee_fndecl (arg0);
12877 if (fndecl
12878 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12879 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12880 && call_expr_nargs (arg0) == 1
12881 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12883 tree iref = build_fold_indirect_ref_loc (loc,
12884 CALL_EXPR_ARG (arg0, 0));
12885 return fold_build2_loc (loc, code, type, iref,
12886 build_int_cst (TREE_TYPE (iref), 0));
12890 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12891 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12892 if (TREE_CODE (arg0) == RSHIFT_EXPR
12893 && integer_zerop (arg1)
12894 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12896 tree arg00 = TREE_OPERAND (arg0, 0);
12897 tree arg01 = TREE_OPERAND (arg0, 1);
12898 tree itype = TREE_TYPE (arg00);
12899 if (wi::eq_p (arg01, TYPE_PRECISION (itype) - 1))
12901 if (TYPE_UNSIGNED (itype))
12903 itype = signed_type_for (itype);
12904 arg00 = fold_convert_loc (loc, itype, arg00);
12906 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12907 type, arg00, build_zero_cst (itype));
12911 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12912 if (integer_zerop (arg1)
12913 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12914 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12915 TREE_OPERAND (arg0, 1));
12917 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12918 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12919 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12920 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12921 build_zero_cst (TREE_TYPE (arg0)));
12922 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12923 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12924 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12925 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12926 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12927 build_zero_cst (TREE_TYPE (arg0)));
12929 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12930 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12931 && TREE_CODE (arg1) == INTEGER_CST
12932 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12933 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12934 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12935 TREE_OPERAND (arg0, 1), arg1));
12937 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12938 (X & C) == 0 when C is a single bit. */
12939 if (TREE_CODE (arg0) == BIT_AND_EXPR
12940 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12941 && integer_zerop (arg1)
12942 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12944 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12945 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12946 TREE_OPERAND (arg0, 1));
12947 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12948 type, tem,
12949 fold_convert_loc (loc, TREE_TYPE (arg0),
12950 arg1));
12953 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12954 constant C is a power of two, i.e. a single bit. */
12955 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12956 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12957 && integer_zerop (arg1)
12958 && integer_pow2p (TREE_OPERAND (arg0, 1))
12959 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12960 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12962 tree arg00 = TREE_OPERAND (arg0, 0);
12963 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12964 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12967 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12968 when is C is a power of two, i.e. a single bit. */
12969 if (TREE_CODE (arg0) == BIT_AND_EXPR
12970 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12971 && integer_zerop (arg1)
12972 && integer_pow2p (TREE_OPERAND (arg0, 1))
12973 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12974 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12976 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12977 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12978 arg000, TREE_OPERAND (arg0, 1));
12979 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12980 tem, build_int_cst (TREE_TYPE (tem), 0));
12983 if (integer_zerop (arg1)
12984 && tree_expr_nonzero_p (arg0))
12986 tree res = constant_boolean_node (code==NE_EXPR, type);
12987 return omit_one_operand_loc (loc, type, res, arg0);
12990 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12991 if (TREE_CODE (arg0) == NEGATE_EXPR
12992 && TREE_CODE (arg1) == NEGATE_EXPR)
12993 return fold_build2_loc (loc, code, type,
12994 TREE_OPERAND (arg0, 0),
12995 fold_convert_loc (loc, TREE_TYPE (arg0),
12996 TREE_OPERAND (arg1, 0)));
12998 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12999 if (TREE_CODE (arg0) == BIT_AND_EXPR
13000 && TREE_CODE (arg1) == BIT_AND_EXPR)
13002 tree arg00 = TREE_OPERAND (arg0, 0);
13003 tree arg01 = TREE_OPERAND (arg0, 1);
13004 tree arg10 = TREE_OPERAND (arg1, 0);
13005 tree arg11 = TREE_OPERAND (arg1, 1);
13006 tree itype = TREE_TYPE (arg0);
13008 if (operand_equal_p (arg01, arg11, 0))
13009 return fold_build2_loc (loc, code, type,
13010 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13011 fold_build2_loc (loc,
13012 BIT_XOR_EXPR, itype,
13013 arg00, arg10),
13014 arg01),
13015 build_zero_cst (itype));
13017 if (operand_equal_p (arg01, arg10, 0))
13018 return fold_build2_loc (loc, code, type,
13019 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13020 fold_build2_loc (loc,
13021 BIT_XOR_EXPR, itype,
13022 arg00, arg11),
13023 arg01),
13024 build_zero_cst (itype));
13026 if (operand_equal_p (arg00, arg11, 0))
13027 return fold_build2_loc (loc, code, type,
13028 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13029 fold_build2_loc (loc,
13030 BIT_XOR_EXPR, itype,
13031 arg01, arg10),
13032 arg00),
13033 build_zero_cst (itype));
13035 if (operand_equal_p (arg00, arg10, 0))
13036 return fold_build2_loc (loc, code, type,
13037 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13038 fold_build2_loc (loc,
13039 BIT_XOR_EXPR, itype,
13040 arg01, arg11),
13041 arg00),
13042 build_zero_cst (itype));
13045 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13046 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13048 tree arg00 = TREE_OPERAND (arg0, 0);
13049 tree arg01 = TREE_OPERAND (arg0, 1);
13050 tree arg10 = TREE_OPERAND (arg1, 0);
13051 tree arg11 = TREE_OPERAND (arg1, 1);
13052 tree itype = TREE_TYPE (arg0);
13054 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13055 operand_equal_p guarantees no side-effects so we don't need
13056 to use omit_one_operand on Z. */
13057 if (operand_equal_p (arg01, arg11, 0))
13058 return fold_build2_loc (loc, code, type, arg00,
13059 fold_convert_loc (loc, TREE_TYPE (arg00),
13060 arg10));
13061 if (operand_equal_p (arg01, arg10, 0))
13062 return fold_build2_loc (loc, code, type, arg00,
13063 fold_convert_loc (loc, TREE_TYPE (arg00),
13064 arg11));
13065 if (operand_equal_p (arg00, arg11, 0))
13066 return fold_build2_loc (loc, code, type, arg01,
13067 fold_convert_loc (loc, TREE_TYPE (arg01),
13068 arg10));
13069 if (operand_equal_p (arg00, arg10, 0))
13070 return fold_build2_loc (loc, code, type, arg01,
13071 fold_convert_loc (loc, TREE_TYPE (arg01),
13072 arg11));
13074 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13075 if (TREE_CODE (arg01) == INTEGER_CST
13076 && TREE_CODE (arg11) == INTEGER_CST)
13078 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13079 fold_convert_loc (loc, itype, arg11));
13080 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13081 return fold_build2_loc (loc, code, type, tem,
13082 fold_convert_loc (loc, itype, arg10));
13086 /* Attempt to simplify equality/inequality comparisons of complex
13087 values. Only lower the comparison if the result is known or
13088 can be simplified to a single scalar comparison. */
13089 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13090 || TREE_CODE (arg0) == COMPLEX_CST)
13091 && (TREE_CODE (arg1) == COMPLEX_EXPR
13092 || TREE_CODE (arg1) == COMPLEX_CST))
13094 tree real0, imag0, real1, imag1;
13095 tree rcond, icond;
13097 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13099 real0 = TREE_OPERAND (arg0, 0);
13100 imag0 = TREE_OPERAND (arg0, 1);
13102 else
13104 real0 = TREE_REALPART (arg0);
13105 imag0 = TREE_IMAGPART (arg0);
13108 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13110 real1 = TREE_OPERAND (arg1, 0);
13111 imag1 = TREE_OPERAND (arg1, 1);
13113 else
13115 real1 = TREE_REALPART (arg1);
13116 imag1 = TREE_IMAGPART (arg1);
13119 rcond = fold_binary_loc (loc, code, type, real0, real1);
13120 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13122 if (integer_zerop (rcond))
13124 if (code == EQ_EXPR)
13125 return omit_two_operands_loc (loc, type, boolean_false_node,
13126 imag0, imag1);
13127 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13129 else
13131 if (code == NE_EXPR)
13132 return omit_two_operands_loc (loc, type, boolean_true_node,
13133 imag0, imag1);
13134 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13138 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13139 if (icond && TREE_CODE (icond) == INTEGER_CST)
13141 if (integer_zerop (icond))
13143 if (code == EQ_EXPR)
13144 return omit_two_operands_loc (loc, type, boolean_false_node,
13145 real0, real1);
13146 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13148 else
13150 if (code == NE_EXPR)
13151 return omit_two_operands_loc (loc, type, boolean_true_node,
13152 real0, real1);
13153 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13158 return NULL_TREE;
13160 case LT_EXPR:
13161 case GT_EXPR:
13162 case LE_EXPR:
13163 case GE_EXPR:
13164 tem = fold_comparison (loc, code, type, op0, op1);
13165 if (tem != NULL_TREE)
13166 return tem;
13168 /* Transform comparisons of the form X +- C CMP X. */
13169 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13170 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13171 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13172 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13173 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13174 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13176 tree arg01 = TREE_OPERAND (arg0, 1);
13177 enum tree_code code0 = TREE_CODE (arg0);
13178 int is_positive;
13180 if (TREE_CODE (arg01) == REAL_CST)
13181 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13182 else
13183 is_positive = tree_int_cst_sgn (arg01);
13185 /* (X - c) > X becomes false. */
13186 if (code == GT_EXPR
13187 && ((code0 == MINUS_EXPR && is_positive >= 0)
13188 || (code0 == PLUS_EXPR && is_positive <= 0)))
13190 if (TREE_CODE (arg01) == INTEGER_CST
13191 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13192 fold_overflow_warning (("assuming signed overflow does not "
13193 "occur when assuming that (X - c) > X "
13194 "is always false"),
13195 WARN_STRICT_OVERFLOW_ALL);
13196 return constant_boolean_node (0, type);
13199 /* Likewise (X + c) < X becomes false. */
13200 if (code == LT_EXPR
13201 && ((code0 == PLUS_EXPR && is_positive >= 0)
13202 || (code0 == MINUS_EXPR && is_positive <= 0)))
13204 if (TREE_CODE (arg01) == INTEGER_CST
13205 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13206 fold_overflow_warning (("assuming signed overflow does not "
13207 "occur when assuming that "
13208 "(X + c) < X is always false"),
13209 WARN_STRICT_OVERFLOW_ALL);
13210 return constant_boolean_node (0, type);
13213 /* Convert (X - c) <= X to true. */
13214 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13215 && code == LE_EXPR
13216 && ((code0 == MINUS_EXPR && is_positive >= 0)
13217 || (code0 == PLUS_EXPR && is_positive <= 0)))
13219 if (TREE_CODE (arg01) == INTEGER_CST
13220 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13221 fold_overflow_warning (("assuming signed overflow does not "
13222 "occur when assuming that "
13223 "(X - c) <= X is always true"),
13224 WARN_STRICT_OVERFLOW_ALL);
13225 return constant_boolean_node (1, type);
13228 /* Convert (X + c) >= X to true. */
13229 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13230 && code == GE_EXPR
13231 && ((code0 == PLUS_EXPR && is_positive >= 0)
13232 || (code0 == MINUS_EXPR && is_positive <= 0)))
13234 if (TREE_CODE (arg01) == INTEGER_CST
13235 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13236 fold_overflow_warning (("assuming signed overflow does not "
13237 "occur when assuming that "
13238 "(X + c) >= X is always true"),
13239 WARN_STRICT_OVERFLOW_ALL);
13240 return constant_boolean_node (1, type);
13243 if (TREE_CODE (arg01) == INTEGER_CST)
13245 /* Convert X + c > X and X - c < X to true for integers. */
13246 if (code == GT_EXPR
13247 && ((code0 == PLUS_EXPR && is_positive > 0)
13248 || (code0 == MINUS_EXPR && is_positive < 0)))
13250 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13251 fold_overflow_warning (("assuming signed overflow does "
13252 "not occur when assuming that "
13253 "(X + c) > X is always true"),
13254 WARN_STRICT_OVERFLOW_ALL);
13255 return constant_boolean_node (1, type);
13258 if (code == LT_EXPR
13259 && ((code0 == MINUS_EXPR && is_positive > 0)
13260 || (code0 == PLUS_EXPR && is_positive < 0)))
13262 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13263 fold_overflow_warning (("assuming signed overflow does "
13264 "not occur when assuming that "
13265 "(X - c) < X is always true"),
13266 WARN_STRICT_OVERFLOW_ALL);
13267 return constant_boolean_node (1, type);
13270 /* Convert X + c <= X and X - c >= X to false for integers. */
13271 if (code == LE_EXPR
13272 && ((code0 == PLUS_EXPR && is_positive > 0)
13273 || (code0 == MINUS_EXPR && is_positive < 0)))
13275 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13276 fold_overflow_warning (("assuming signed overflow does "
13277 "not occur when assuming that "
13278 "(X + c) <= X is always false"),
13279 WARN_STRICT_OVERFLOW_ALL);
13280 return constant_boolean_node (0, type);
13283 if (code == GE_EXPR
13284 && ((code0 == MINUS_EXPR && is_positive > 0)
13285 || (code0 == PLUS_EXPR && is_positive < 0)))
13287 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13288 fold_overflow_warning (("assuming signed overflow does "
13289 "not occur when assuming that "
13290 "(X - c) >= X is always false"),
13291 WARN_STRICT_OVERFLOW_ALL);
13292 return constant_boolean_node (0, type);
13297 /* Comparisons with the highest or lowest possible integer of
13298 the specified precision will have known values. */
13300 tree arg1_type = TREE_TYPE (arg1);
13301 unsigned int prec = TYPE_PRECISION (arg1_type);
13303 if (TREE_CODE (arg1) == INTEGER_CST
13304 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13306 wide_int max = wi::max_value (arg1_type);
13307 wide_int signed_max = wi::max_value (prec, SIGNED);
13308 wide_int min = wi::min_value (arg1_type);
13310 if (wi::eq_p (arg1, max))
13311 switch (code)
13313 case GT_EXPR:
13314 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13316 case GE_EXPR:
13317 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13319 case LE_EXPR:
13320 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13322 case LT_EXPR:
13323 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13325 /* The GE_EXPR and LT_EXPR cases above are not normally
13326 reached because of previous transformations. */
13328 default:
13329 break;
13331 else if (wi::eq_p (arg1, max - 1))
13332 switch (code)
13334 case GT_EXPR:
13335 arg1 = const_binop (PLUS_EXPR, arg1,
13336 build_int_cst (TREE_TYPE (arg1), 1));
13337 return fold_build2_loc (loc, EQ_EXPR, type,
13338 fold_convert_loc (loc,
13339 TREE_TYPE (arg1), arg0),
13340 arg1);
13341 case LE_EXPR:
13342 arg1 = const_binop (PLUS_EXPR, arg1,
13343 build_int_cst (TREE_TYPE (arg1), 1));
13344 return fold_build2_loc (loc, NE_EXPR, type,
13345 fold_convert_loc (loc, TREE_TYPE (arg1),
13346 arg0),
13347 arg1);
13348 default:
13349 break;
13351 else if (wi::eq_p (arg1, min))
13352 switch (code)
13354 case LT_EXPR:
13355 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13357 case LE_EXPR:
13358 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13360 case GE_EXPR:
13361 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13363 case GT_EXPR:
13364 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13366 default:
13367 break;
13369 else if (wi::eq_p (arg1, min + 1))
13370 switch (code)
13372 case GE_EXPR:
13373 arg1 = const_binop (MINUS_EXPR, arg1,
13374 build_int_cst (TREE_TYPE (arg1), 1));
13375 return fold_build2_loc (loc, NE_EXPR, type,
13376 fold_convert_loc (loc,
13377 TREE_TYPE (arg1), arg0),
13378 arg1);
13379 case LT_EXPR:
13380 arg1 = const_binop (MINUS_EXPR, arg1,
13381 build_int_cst (TREE_TYPE (arg1), 1));
13382 return fold_build2_loc (loc, EQ_EXPR, type,
13383 fold_convert_loc (loc, TREE_TYPE (arg1),
13384 arg0),
13385 arg1);
13386 default:
13387 break;
13390 else if (wi::eq_p (arg1, signed_max)
13391 && TYPE_UNSIGNED (arg1_type)
13392 /* We will flip the signedness of the comparison operator
13393 associated with the mode of arg1, so the sign bit is
13394 specified by this mode. Check that arg1 is the signed
13395 max associated with this sign bit. */
13396 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13397 /* signed_type does not work on pointer types. */
13398 && INTEGRAL_TYPE_P (arg1_type))
13400 /* The following case also applies to X < signed_max+1
13401 and X >= signed_max+1 because previous transformations. */
13402 if (code == LE_EXPR || code == GT_EXPR)
13404 tree st = signed_type_for (arg1_type);
13405 return fold_build2_loc (loc,
13406 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13407 type, fold_convert_loc (loc, st, arg0),
13408 build_int_cst (st, 0));
13414 /* If we are comparing an ABS_EXPR with a constant, we can
13415 convert all the cases into explicit comparisons, but they may
13416 well not be faster than doing the ABS and one comparison.
13417 But ABS (X) <= C is a range comparison, which becomes a subtraction
13418 and a comparison, and is probably faster. */
13419 if (code == LE_EXPR
13420 && TREE_CODE (arg1) == INTEGER_CST
13421 && TREE_CODE (arg0) == ABS_EXPR
13422 && ! TREE_SIDE_EFFECTS (arg0)
13423 && (0 != (tem = negate_expr (arg1)))
13424 && TREE_CODE (tem) == INTEGER_CST
13425 && !TREE_OVERFLOW (tem))
13426 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13427 build2 (GE_EXPR, type,
13428 TREE_OPERAND (arg0, 0), tem),
13429 build2 (LE_EXPR, type,
13430 TREE_OPERAND (arg0, 0), arg1));
13432 /* Convert ABS_EXPR<x> >= 0 to true. */
13433 strict_overflow_p = false;
13434 if (code == GE_EXPR
13435 && (integer_zerop (arg1)
13436 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13437 && real_zerop (arg1)))
13438 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13440 if (strict_overflow_p)
13441 fold_overflow_warning (("assuming signed overflow does not occur "
13442 "when simplifying comparison of "
13443 "absolute value and zero"),
13444 WARN_STRICT_OVERFLOW_CONDITIONAL);
13445 return omit_one_operand_loc (loc, type,
13446 constant_boolean_node (true, type),
13447 arg0);
13450 /* Convert ABS_EXPR<x> < 0 to false. */
13451 strict_overflow_p = false;
13452 if (code == LT_EXPR
13453 && (integer_zerop (arg1) || real_zerop (arg1))
13454 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13456 if (strict_overflow_p)
13457 fold_overflow_warning (("assuming signed overflow does not occur "
13458 "when simplifying comparison of "
13459 "absolute value and zero"),
13460 WARN_STRICT_OVERFLOW_CONDITIONAL);
13461 return omit_one_operand_loc (loc, type,
13462 constant_boolean_node (false, type),
13463 arg0);
13466 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13467 and similarly for >= into !=. */
13468 if ((code == LT_EXPR || code == GE_EXPR)
13469 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13470 && TREE_CODE (arg1) == LSHIFT_EXPR
13471 && integer_onep (TREE_OPERAND (arg1, 0)))
13472 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13473 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13474 TREE_OPERAND (arg1, 1)),
13475 build_zero_cst (TREE_TYPE (arg0)));
13477 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13478 otherwise Y might be >= # of bits in X's type and thus e.g.
13479 (unsigned char) (1 << Y) for Y 15 might be 0.
13480 If the cast is widening, then 1 << Y should have unsigned type,
13481 otherwise if Y is number of bits in the signed shift type minus 1,
13482 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13483 31 might be 0xffffffff80000000. */
13484 if ((code == LT_EXPR || code == GE_EXPR)
13485 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13486 && CONVERT_EXPR_P (arg1)
13487 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13488 && (TYPE_PRECISION (TREE_TYPE (arg1))
13489 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13490 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13491 || (TYPE_PRECISION (TREE_TYPE (arg1))
13492 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13493 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13495 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13496 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13497 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13498 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13499 build_zero_cst (TREE_TYPE (arg0)));
13502 return NULL_TREE;
13504 case UNORDERED_EXPR:
13505 case ORDERED_EXPR:
13506 case UNLT_EXPR:
13507 case UNLE_EXPR:
13508 case UNGT_EXPR:
13509 case UNGE_EXPR:
13510 case UNEQ_EXPR:
13511 case LTGT_EXPR:
13512 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13514 t1 = fold_relational_const (code, type, arg0, arg1);
13515 if (t1 != NULL_TREE)
13516 return t1;
13519 /* If the first operand is NaN, the result is constant. */
13520 if (TREE_CODE (arg0) == REAL_CST
13521 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13522 && (code != LTGT_EXPR || ! flag_trapping_math))
13524 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13525 ? integer_zero_node
13526 : integer_one_node;
13527 return omit_one_operand_loc (loc, type, t1, arg1);
13530 /* If the second operand is NaN, the result is constant. */
13531 if (TREE_CODE (arg1) == REAL_CST
13532 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13533 && (code != LTGT_EXPR || ! flag_trapping_math))
13535 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13536 ? integer_zero_node
13537 : integer_one_node;
13538 return omit_one_operand_loc (loc, type, t1, arg0);
13541 /* Simplify unordered comparison of something with itself. */
13542 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13543 && operand_equal_p (arg0, arg1, 0))
13544 return constant_boolean_node (1, type);
13546 if (code == LTGT_EXPR
13547 && !flag_trapping_math
13548 && operand_equal_p (arg0, arg1, 0))
13549 return constant_boolean_node (0, type);
13551 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13553 tree targ0 = strip_float_extensions (arg0);
13554 tree targ1 = strip_float_extensions (arg1);
13555 tree newtype = TREE_TYPE (targ0);
13557 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13558 newtype = TREE_TYPE (targ1);
13560 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13561 return fold_build2_loc (loc, code, type,
13562 fold_convert_loc (loc, newtype, targ0),
13563 fold_convert_loc (loc, newtype, targ1));
13566 return NULL_TREE;
13568 case COMPOUND_EXPR:
13569 /* When pedantic, a compound expression can be neither an lvalue
13570 nor an integer constant expression. */
13571 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13572 return NULL_TREE;
13573 /* Don't let (0, 0) be null pointer constant. */
13574 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13575 : fold_convert_loc (loc, type, arg1);
13576 return pedantic_non_lvalue_loc (loc, tem);
13578 case COMPLEX_EXPR:
13579 if ((TREE_CODE (arg0) == REAL_CST
13580 && TREE_CODE (arg1) == REAL_CST)
13581 || (TREE_CODE (arg0) == INTEGER_CST
13582 && TREE_CODE (arg1) == INTEGER_CST))
13583 return build_complex (type, arg0, arg1);
13584 if (TREE_CODE (arg0) == REALPART_EXPR
13585 && TREE_CODE (arg1) == IMAGPART_EXPR
13586 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13587 && operand_equal_p (TREE_OPERAND (arg0, 0),
13588 TREE_OPERAND (arg1, 0), 0))
13589 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13590 TREE_OPERAND (arg1, 0));
13591 return NULL_TREE;
13593 case ASSERT_EXPR:
13594 /* An ASSERT_EXPR should never be passed to fold_binary. */
13595 gcc_unreachable ();
13597 case VEC_PACK_TRUNC_EXPR:
13598 case VEC_PACK_FIX_TRUNC_EXPR:
13600 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13601 tree *elts;
13603 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13604 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13605 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13606 return NULL_TREE;
13608 elts = XALLOCAVEC (tree, nelts);
13609 if (!vec_cst_ctor_to_array (arg0, elts)
13610 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13611 return NULL_TREE;
13613 for (i = 0; i < nelts; i++)
13615 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13616 ? NOP_EXPR : FIX_TRUNC_EXPR,
13617 TREE_TYPE (type), elts[i]);
13618 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13619 return NULL_TREE;
13622 return build_vector (type, elts);
13625 case VEC_WIDEN_MULT_LO_EXPR:
13626 case VEC_WIDEN_MULT_HI_EXPR:
13627 case VEC_WIDEN_MULT_EVEN_EXPR:
13628 case VEC_WIDEN_MULT_ODD_EXPR:
13630 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13631 unsigned int out, ofs, scale;
13632 tree *elts;
13634 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13635 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13636 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13637 return NULL_TREE;
13639 elts = XALLOCAVEC (tree, nelts * 4);
13640 if (!vec_cst_ctor_to_array (arg0, elts)
13641 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13642 return NULL_TREE;
13644 if (code == VEC_WIDEN_MULT_LO_EXPR)
13645 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13646 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13647 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13648 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13649 scale = 1, ofs = 0;
13650 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13651 scale = 1, ofs = 1;
13653 for (out = 0; out < nelts; out++)
13655 unsigned int in1 = (out << scale) + ofs;
13656 unsigned int in2 = in1 + nelts * 2;
13657 tree t1, t2;
13659 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13660 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13662 if (t1 == NULL_TREE || t2 == NULL_TREE)
13663 return NULL_TREE;
13664 elts[out] = const_binop (MULT_EXPR, t1, t2);
13665 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13666 return NULL_TREE;
13669 return build_vector (type, elts);
13672 default:
13673 return NULL_TREE;
13674 } /* switch (code) */
13677 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13678 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13679 of GOTO_EXPR. */
13681 static tree
13682 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13684 switch (TREE_CODE (*tp))
13686 case LABEL_EXPR:
13687 return *tp;
13689 case GOTO_EXPR:
13690 *walk_subtrees = 0;
13692 /* ... fall through ... */
13694 default:
13695 return NULL_TREE;
13699 /* Return whether the sub-tree ST contains a label which is accessible from
13700 outside the sub-tree. */
13702 static bool
13703 contains_label_p (tree st)
13705 return
13706 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13709 /* Fold a ternary expression of code CODE and type TYPE with operands
13710 OP0, OP1, and OP2. Return the folded expression if folding is
13711 successful. Otherwise, return NULL_TREE. */
13713 tree
13714 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13715 tree op0, tree op1, tree op2)
13717 tree tem;
13718 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13719 enum tree_code_class kind = TREE_CODE_CLASS (code);
13721 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13722 && TREE_CODE_LENGTH (code) == 3);
13724 /* If this is a commutative operation, and OP0 is a constant, move it
13725 to OP1 to reduce the number of tests below. */
13726 if (commutative_ternary_tree_code (code)
13727 && tree_swap_operands_p (op0, op1, true))
13728 return fold_build3_loc (loc, code, type, op1, op0, op2);
13730 tem = generic_simplify (loc, code, type, op0, op1, op2);
13731 if (tem)
13732 return tem;
13734 /* Strip any conversions that don't change the mode. This is safe
13735 for every expression, except for a comparison expression because
13736 its signedness is derived from its operands. So, in the latter
13737 case, only strip conversions that don't change the signedness.
13739 Note that this is done as an internal manipulation within the
13740 constant folder, in order to find the simplest representation of
13741 the arguments so that their form can be studied. In any cases,
13742 the appropriate type conversions should be put back in the tree
13743 that will get out of the constant folder. */
13744 if (op0)
13746 arg0 = op0;
13747 STRIP_NOPS (arg0);
13750 if (op1)
13752 arg1 = op1;
13753 STRIP_NOPS (arg1);
13756 if (op2)
13758 arg2 = op2;
13759 STRIP_NOPS (arg2);
13762 switch (code)
13764 case COMPONENT_REF:
13765 if (TREE_CODE (arg0) == CONSTRUCTOR
13766 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13768 unsigned HOST_WIDE_INT idx;
13769 tree field, value;
13770 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13771 if (field == arg1)
13772 return value;
13774 return NULL_TREE;
13776 case COND_EXPR:
13777 case VEC_COND_EXPR:
13778 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13779 so all simple results must be passed through pedantic_non_lvalue. */
13780 if (TREE_CODE (arg0) == INTEGER_CST)
13782 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13783 tem = integer_zerop (arg0) ? op2 : op1;
13784 /* Only optimize constant conditions when the selected branch
13785 has the same type as the COND_EXPR. This avoids optimizing
13786 away "c ? x : throw", where the throw has a void type.
13787 Avoid throwing away that operand which contains label. */
13788 if ((!TREE_SIDE_EFFECTS (unused_op)
13789 || !contains_label_p (unused_op))
13790 && (! VOID_TYPE_P (TREE_TYPE (tem))
13791 || VOID_TYPE_P (type)))
13792 return pedantic_non_lvalue_loc (loc, tem);
13793 return NULL_TREE;
13795 else if (TREE_CODE (arg0) == VECTOR_CST)
13797 if (integer_all_onesp (arg0))
13798 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
13799 if (integer_zerop (arg0))
13800 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
13802 if ((TREE_CODE (arg1) == VECTOR_CST
13803 || TREE_CODE (arg1) == CONSTRUCTOR)
13804 && (TREE_CODE (arg2) == VECTOR_CST
13805 || TREE_CODE (arg2) == CONSTRUCTOR))
13807 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13808 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13809 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13810 for (i = 0; i < nelts; i++)
13812 tree val = VECTOR_CST_ELT (arg0, i);
13813 if (integer_all_onesp (val))
13814 sel[i] = i;
13815 else if (integer_zerop (val))
13816 sel[i] = nelts + i;
13817 else /* Currently unreachable. */
13818 return NULL_TREE;
13820 tree t = fold_vec_perm (type, arg1, arg2, sel);
13821 if (t != NULL_TREE)
13822 return t;
13826 if (operand_equal_p (arg1, op2, 0))
13827 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13829 /* If we have A op B ? A : C, we may be able to convert this to a
13830 simpler expression, depending on the operation and the values
13831 of B and C. Signed zeros prevent all of these transformations,
13832 for reasons given above each one.
13834 Also try swapping the arguments and inverting the conditional. */
13835 if (COMPARISON_CLASS_P (arg0)
13836 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13837 arg1, TREE_OPERAND (arg0, 1))
13838 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13840 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13841 if (tem)
13842 return tem;
13845 if (COMPARISON_CLASS_P (arg0)
13846 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13847 op2,
13848 TREE_OPERAND (arg0, 1))
13849 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13851 location_t loc0 = expr_location_or (arg0, loc);
13852 tem = fold_invert_truthvalue (loc0, arg0);
13853 if (tem && COMPARISON_CLASS_P (tem))
13855 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13856 if (tem)
13857 return tem;
13861 /* If the second operand is simpler than the third, swap them
13862 since that produces better jump optimization results. */
13863 if (truth_value_p (TREE_CODE (arg0))
13864 && tree_swap_operands_p (op1, op2, false))
13866 location_t loc0 = expr_location_or (arg0, loc);
13867 /* See if this can be inverted. If it can't, possibly because
13868 it was a floating-point inequality comparison, don't do
13869 anything. */
13870 tem = fold_invert_truthvalue (loc0, arg0);
13871 if (tem)
13872 return fold_build3_loc (loc, code, type, tem, op2, op1);
13875 /* Convert A ? 1 : 0 to simply A. */
13876 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13877 : (integer_onep (op1)
13878 && !VECTOR_TYPE_P (type)))
13879 && integer_zerop (op2)
13880 /* If we try to convert OP0 to our type, the
13881 call to fold will try to move the conversion inside
13882 a COND, which will recurse. In that case, the COND_EXPR
13883 is probably the best choice, so leave it alone. */
13884 && type == TREE_TYPE (arg0))
13885 return pedantic_non_lvalue_loc (loc, arg0);
13887 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13888 over COND_EXPR in cases such as floating point comparisons. */
13889 if (integer_zerop (op1)
13890 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13891 : (integer_onep (op2)
13892 && !VECTOR_TYPE_P (type)))
13893 && truth_value_p (TREE_CODE (arg0)))
13894 return pedantic_non_lvalue_loc (loc,
13895 fold_convert_loc (loc, type,
13896 invert_truthvalue_loc (loc,
13897 arg0)));
13899 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13900 if (TREE_CODE (arg0) == LT_EXPR
13901 && integer_zerop (TREE_OPERAND (arg0, 1))
13902 && integer_zerop (op2)
13903 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13905 /* sign_bit_p looks through both zero and sign extensions,
13906 but for this optimization only sign extensions are
13907 usable. */
13908 tree tem2 = TREE_OPERAND (arg0, 0);
13909 while (tem != tem2)
13911 if (TREE_CODE (tem2) != NOP_EXPR
13912 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13914 tem = NULL_TREE;
13915 break;
13917 tem2 = TREE_OPERAND (tem2, 0);
13919 /* sign_bit_p only checks ARG1 bits within A's precision.
13920 If <sign bit of A> has wider type than A, bits outside
13921 of A's precision in <sign bit of A> need to be checked.
13922 If they are all 0, this optimization needs to be done
13923 in unsigned A's type, if they are all 1 in signed A's type,
13924 otherwise this can't be done. */
13925 if (tem
13926 && TYPE_PRECISION (TREE_TYPE (tem))
13927 < TYPE_PRECISION (TREE_TYPE (arg1))
13928 && TYPE_PRECISION (TREE_TYPE (tem))
13929 < TYPE_PRECISION (type))
13931 int inner_width, outer_width;
13932 tree tem_type;
13934 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13935 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13936 if (outer_width > TYPE_PRECISION (type))
13937 outer_width = TYPE_PRECISION (type);
13939 wide_int mask = wi::shifted_mask
13940 (inner_width, outer_width - inner_width, false,
13941 TYPE_PRECISION (TREE_TYPE (arg1)));
13943 wide_int common = mask & arg1;
13944 if (common == mask)
13946 tem_type = signed_type_for (TREE_TYPE (tem));
13947 tem = fold_convert_loc (loc, tem_type, tem);
13949 else if (common == 0)
13951 tem_type = unsigned_type_for (TREE_TYPE (tem));
13952 tem = fold_convert_loc (loc, tem_type, tem);
13954 else
13955 tem = NULL;
13958 if (tem)
13959 return
13960 fold_convert_loc (loc, type,
13961 fold_build2_loc (loc, BIT_AND_EXPR,
13962 TREE_TYPE (tem), tem,
13963 fold_convert_loc (loc,
13964 TREE_TYPE (tem),
13965 arg1)));
13968 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13969 already handled above. */
13970 if (TREE_CODE (arg0) == BIT_AND_EXPR
13971 && integer_onep (TREE_OPERAND (arg0, 1))
13972 && integer_zerop (op2)
13973 && integer_pow2p (arg1))
13975 tree tem = TREE_OPERAND (arg0, 0);
13976 STRIP_NOPS (tem);
13977 if (TREE_CODE (tem) == RSHIFT_EXPR
13978 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13979 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13980 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13981 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13982 TREE_OPERAND (tem, 0), arg1);
13985 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13986 is probably obsolete because the first operand should be a
13987 truth value (that's why we have the two cases above), but let's
13988 leave it in until we can confirm this for all front-ends. */
13989 if (integer_zerop (op2)
13990 && TREE_CODE (arg0) == NE_EXPR
13991 && integer_zerop (TREE_OPERAND (arg0, 1))
13992 && integer_pow2p (arg1)
13993 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13994 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13995 arg1, OEP_ONLY_CONST))
13996 return pedantic_non_lvalue_loc (loc,
13997 fold_convert_loc (loc, type,
13998 TREE_OPERAND (arg0, 0)));
14000 /* Disable the transformations below for vectors, since
14001 fold_binary_op_with_conditional_arg may undo them immediately,
14002 yielding an infinite loop. */
14003 if (code == VEC_COND_EXPR)
14004 return NULL_TREE;
14006 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14007 if (integer_zerop (op2)
14008 && truth_value_p (TREE_CODE (arg0))
14009 && truth_value_p (TREE_CODE (arg1))
14010 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14011 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14012 : TRUTH_ANDIF_EXPR,
14013 type, fold_convert_loc (loc, type, arg0), arg1);
14015 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14016 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14017 && truth_value_p (TREE_CODE (arg0))
14018 && truth_value_p (TREE_CODE (arg1))
14019 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14021 location_t loc0 = expr_location_or (arg0, loc);
14022 /* Only perform transformation if ARG0 is easily inverted. */
14023 tem = fold_invert_truthvalue (loc0, arg0);
14024 if (tem)
14025 return fold_build2_loc (loc, code == VEC_COND_EXPR
14026 ? BIT_IOR_EXPR
14027 : TRUTH_ORIF_EXPR,
14028 type, fold_convert_loc (loc, type, tem),
14029 arg1);
14032 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14033 if (integer_zerop (arg1)
14034 && truth_value_p (TREE_CODE (arg0))
14035 && truth_value_p (TREE_CODE (op2))
14036 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14038 location_t loc0 = expr_location_or (arg0, loc);
14039 /* Only perform transformation if ARG0 is easily inverted. */
14040 tem = fold_invert_truthvalue (loc0, arg0);
14041 if (tem)
14042 return fold_build2_loc (loc, code == VEC_COND_EXPR
14043 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14044 type, fold_convert_loc (loc, type, tem),
14045 op2);
14048 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14049 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14050 && truth_value_p (TREE_CODE (arg0))
14051 && truth_value_p (TREE_CODE (op2))
14052 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14053 return fold_build2_loc (loc, code == VEC_COND_EXPR
14054 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14055 type, fold_convert_loc (loc, type, arg0), op2);
14057 return NULL_TREE;
14059 case CALL_EXPR:
14060 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14061 of fold_ternary on them. */
14062 gcc_unreachable ();
14064 case BIT_FIELD_REF:
14065 if ((TREE_CODE (arg0) == VECTOR_CST
14066 || (TREE_CODE (arg0) == CONSTRUCTOR
14067 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14068 && (type == TREE_TYPE (TREE_TYPE (arg0))
14069 || (TREE_CODE (type) == VECTOR_TYPE
14070 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14072 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14073 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14074 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14075 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14077 if (n != 0
14078 && (idx % width) == 0
14079 && (n % width) == 0
14080 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14082 idx = idx / width;
14083 n = n / width;
14085 if (TREE_CODE (arg0) == VECTOR_CST)
14087 if (n == 1)
14088 return VECTOR_CST_ELT (arg0, idx);
14090 tree *vals = XALLOCAVEC (tree, n);
14091 for (unsigned i = 0; i < n; ++i)
14092 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14093 return build_vector (type, vals);
14096 /* Constructor elements can be subvectors. */
14097 unsigned HOST_WIDE_INT k = 1;
14098 if (CONSTRUCTOR_NELTS (arg0) != 0)
14100 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14101 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14102 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14105 /* We keep an exact subset of the constructor elements. */
14106 if ((idx % k) == 0 && (n % k) == 0)
14108 if (CONSTRUCTOR_NELTS (arg0) == 0)
14109 return build_constructor (type, NULL);
14110 idx /= k;
14111 n /= k;
14112 if (n == 1)
14114 if (idx < CONSTRUCTOR_NELTS (arg0))
14115 return CONSTRUCTOR_ELT (arg0, idx)->value;
14116 return build_zero_cst (type);
14119 vec<constructor_elt, va_gc> *vals;
14120 vec_alloc (vals, n);
14121 for (unsigned i = 0;
14122 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14123 ++i)
14124 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14125 CONSTRUCTOR_ELT
14126 (arg0, idx + i)->value);
14127 return build_constructor (type, vals);
14129 /* The bitfield references a single constructor element. */
14130 else if (idx + n <= (idx / k + 1) * k)
14132 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14133 return build_zero_cst (type);
14134 else if (n == k)
14135 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14136 else
14137 return fold_build3_loc (loc, code, type,
14138 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14139 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14144 /* A bit-field-ref that referenced the full argument can be stripped. */
14145 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14146 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14147 && integer_zerop (op2))
14148 return fold_convert_loc (loc, type, arg0);
14150 /* On constants we can use native encode/interpret to constant
14151 fold (nearly) all BIT_FIELD_REFs. */
14152 if (CONSTANT_CLASS_P (arg0)
14153 && can_native_interpret_type_p (type)
14154 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14155 /* This limitation should not be necessary, we just need to
14156 round this up to mode size. */
14157 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14158 /* Need bit-shifting of the buffer to relax the following. */
14159 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14161 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14162 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14163 unsigned HOST_WIDE_INT clen;
14164 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14165 /* ??? We cannot tell native_encode_expr to start at
14166 some random byte only. So limit us to a reasonable amount
14167 of work. */
14168 if (clen <= 4096)
14170 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14171 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14172 if (len > 0
14173 && len * BITS_PER_UNIT >= bitpos + bitsize)
14175 tree v = native_interpret_expr (type,
14176 b + bitpos / BITS_PER_UNIT,
14177 bitsize / BITS_PER_UNIT);
14178 if (v)
14179 return v;
14184 return NULL_TREE;
14186 case FMA_EXPR:
14187 /* For integers we can decompose the FMA if possible. */
14188 if (TREE_CODE (arg0) == INTEGER_CST
14189 && TREE_CODE (arg1) == INTEGER_CST)
14190 return fold_build2_loc (loc, PLUS_EXPR, type,
14191 const_binop (MULT_EXPR, arg0, arg1), arg2);
14192 if (integer_zerop (arg2))
14193 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14195 return fold_fma (loc, type, arg0, arg1, arg2);
14197 case VEC_PERM_EXPR:
14198 if (TREE_CODE (arg2) == VECTOR_CST)
14200 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
14201 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
14202 unsigned char *sel2 = sel + nelts;
14203 bool need_mask_canon = false;
14204 bool need_mask_canon2 = false;
14205 bool all_in_vec0 = true;
14206 bool all_in_vec1 = true;
14207 bool maybe_identity = true;
14208 bool single_arg = (op0 == op1);
14209 bool changed = false;
14211 mask2 = 2 * nelts - 1;
14212 mask = single_arg ? (nelts - 1) : mask2;
14213 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14214 for (i = 0; i < nelts; i++)
14216 tree val = VECTOR_CST_ELT (arg2, i);
14217 if (TREE_CODE (val) != INTEGER_CST)
14218 return NULL_TREE;
14220 /* Make sure that the perm value is in an acceptable
14221 range. */
14222 wide_int t = val;
14223 need_mask_canon |= wi::gtu_p (t, mask);
14224 need_mask_canon2 |= wi::gtu_p (t, mask2);
14225 sel[i] = t.to_uhwi () & mask;
14226 sel2[i] = t.to_uhwi () & mask2;
14228 if (sel[i] < nelts)
14229 all_in_vec1 = false;
14230 else
14231 all_in_vec0 = false;
14233 if ((sel[i] & (nelts-1)) != i)
14234 maybe_identity = false;
14237 if (maybe_identity)
14239 if (all_in_vec0)
14240 return op0;
14241 if (all_in_vec1)
14242 return op1;
14245 if (all_in_vec0)
14246 op1 = op0;
14247 else if (all_in_vec1)
14249 op0 = op1;
14250 for (i = 0; i < nelts; i++)
14251 sel[i] -= nelts;
14252 need_mask_canon = true;
14255 if ((TREE_CODE (op0) == VECTOR_CST
14256 || TREE_CODE (op0) == CONSTRUCTOR)
14257 && (TREE_CODE (op1) == VECTOR_CST
14258 || TREE_CODE (op1) == CONSTRUCTOR))
14260 tree t = fold_vec_perm (type, op0, op1, sel);
14261 if (t != NULL_TREE)
14262 return t;
14265 if (op0 == op1 && !single_arg)
14266 changed = true;
14268 /* Some targets are deficient and fail to expand a single
14269 argument permutation while still allowing an equivalent
14270 2-argument version. */
14271 if (need_mask_canon && arg2 == op2
14272 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
14273 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
14275 need_mask_canon = need_mask_canon2;
14276 sel = sel2;
14279 if (need_mask_canon && arg2 == op2)
14281 tree *tsel = XALLOCAVEC (tree, nelts);
14282 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14283 for (i = 0; i < nelts; i++)
14284 tsel[i] = build_int_cst (eltype, sel[i]);
14285 op2 = build_vector (TREE_TYPE (arg2), tsel);
14286 changed = true;
14289 if (changed)
14290 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14292 return NULL_TREE;
14294 default:
14295 return NULL_TREE;
14296 } /* switch (code) */
14299 /* Perform constant folding and related simplification of EXPR.
14300 The related simplifications include x*1 => x, x*0 => 0, etc.,
14301 and application of the associative law.
14302 NOP_EXPR conversions may be removed freely (as long as we
14303 are careful not to change the type of the overall expression).
14304 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14305 but we can constant-fold them if they have constant operands. */
14307 #ifdef ENABLE_FOLD_CHECKING
14308 # define fold(x) fold_1 (x)
14309 static tree fold_1 (tree);
14310 static
14311 #endif
14312 tree
14313 fold (tree expr)
14315 const tree t = expr;
14316 enum tree_code code = TREE_CODE (t);
14317 enum tree_code_class kind = TREE_CODE_CLASS (code);
14318 tree tem;
14319 location_t loc = EXPR_LOCATION (expr);
14321 /* Return right away if a constant. */
14322 if (kind == tcc_constant)
14323 return t;
14325 /* CALL_EXPR-like objects with variable numbers of operands are
14326 treated specially. */
14327 if (kind == tcc_vl_exp)
14329 if (code == CALL_EXPR)
14331 tem = fold_call_expr (loc, expr, false);
14332 return tem ? tem : expr;
14334 return expr;
14337 if (IS_EXPR_CODE_CLASS (kind))
14339 tree type = TREE_TYPE (t);
14340 tree op0, op1, op2;
14342 switch (TREE_CODE_LENGTH (code))
14344 case 1:
14345 op0 = TREE_OPERAND (t, 0);
14346 tem = fold_unary_loc (loc, code, type, op0);
14347 return tem ? tem : expr;
14348 case 2:
14349 op0 = TREE_OPERAND (t, 0);
14350 op1 = TREE_OPERAND (t, 1);
14351 tem = fold_binary_loc (loc, code, type, op0, op1);
14352 return tem ? tem : expr;
14353 case 3:
14354 op0 = TREE_OPERAND (t, 0);
14355 op1 = TREE_OPERAND (t, 1);
14356 op2 = TREE_OPERAND (t, 2);
14357 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14358 return tem ? tem : expr;
14359 default:
14360 break;
14364 switch (code)
14366 case ARRAY_REF:
14368 tree op0 = TREE_OPERAND (t, 0);
14369 tree op1 = TREE_OPERAND (t, 1);
14371 if (TREE_CODE (op1) == INTEGER_CST
14372 && TREE_CODE (op0) == CONSTRUCTOR
14373 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14375 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14376 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14377 unsigned HOST_WIDE_INT begin = 0;
14379 /* Find a matching index by means of a binary search. */
14380 while (begin != end)
14382 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14383 tree index = (*elts)[middle].index;
14385 if (TREE_CODE (index) == INTEGER_CST
14386 && tree_int_cst_lt (index, op1))
14387 begin = middle + 1;
14388 else if (TREE_CODE (index) == INTEGER_CST
14389 && tree_int_cst_lt (op1, index))
14390 end = middle;
14391 else if (TREE_CODE (index) == RANGE_EXPR
14392 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14393 begin = middle + 1;
14394 else if (TREE_CODE (index) == RANGE_EXPR
14395 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14396 end = middle;
14397 else
14398 return (*elts)[middle].value;
14402 return t;
14405 /* Return a VECTOR_CST if possible. */
14406 case CONSTRUCTOR:
14408 tree type = TREE_TYPE (t);
14409 if (TREE_CODE (type) != VECTOR_TYPE)
14410 return t;
14412 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14413 unsigned HOST_WIDE_INT idx, pos = 0;
14414 tree value;
14416 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14418 if (!CONSTANT_CLASS_P (value))
14419 return t;
14420 if (TREE_CODE (value) == VECTOR_CST)
14422 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14423 vec[pos++] = VECTOR_CST_ELT (value, i);
14425 else
14426 vec[pos++] = value;
14428 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14429 vec[pos] = build_zero_cst (TREE_TYPE (type));
14431 return build_vector (type, vec);
14434 case CONST_DECL:
14435 return fold (DECL_INITIAL (t));
14437 default:
14438 return t;
14439 } /* switch (code) */
14442 #ifdef ENABLE_FOLD_CHECKING
14443 #undef fold
14445 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14446 hash_table<pointer_hash<const tree_node> > *);
14447 static void fold_check_failed (const_tree, const_tree);
14448 void print_fold_checksum (const_tree);
14450 /* When --enable-checking=fold, compute a digest of expr before
14451 and after actual fold call to see if fold did not accidentally
14452 change original expr. */
14454 tree
14455 fold (tree expr)
14457 tree ret;
14458 struct md5_ctx ctx;
14459 unsigned char checksum_before[16], checksum_after[16];
14460 hash_table<pointer_hash<const tree_node> > ht (32);
14462 md5_init_ctx (&ctx);
14463 fold_checksum_tree (expr, &ctx, &ht);
14464 md5_finish_ctx (&ctx, checksum_before);
14465 ht.empty ();
14467 ret = fold_1 (expr);
14469 md5_init_ctx (&ctx);
14470 fold_checksum_tree (expr, &ctx, &ht);
14471 md5_finish_ctx (&ctx, checksum_after);
14473 if (memcmp (checksum_before, checksum_after, 16))
14474 fold_check_failed (expr, ret);
14476 return ret;
14479 void
14480 print_fold_checksum (const_tree expr)
14482 struct md5_ctx ctx;
14483 unsigned char checksum[16], cnt;
14484 hash_table<pointer_hash<const tree_node> > ht (32);
14486 md5_init_ctx (&ctx);
14487 fold_checksum_tree (expr, &ctx, &ht);
14488 md5_finish_ctx (&ctx, checksum);
14489 for (cnt = 0; cnt < 16; ++cnt)
14490 fprintf (stderr, "%02x", checksum[cnt]);
14491 putc ('\n', stderr);
14494 static void
14495 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14497 internal_error ("fold check: original tree changed by fold");
14500 static void
14501 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14502 hash_table<pointer_hash <const tree_node> > *ht)
14504 const tree_node **slot;
14505 enum tree_code code;
14506 union tree_node buf;
14507 int i, len;
14509 recursive_label:
14510 if (expr == NULL)
14511 return;
14512 slot = ht->find_slot (expr, INSERT);
14513 if (*slot != NULL)
14514 return;
14515 *slot = expr;
14516 code = TREE_CODE (expr);
14517 if (TREE_CODE_CLASS (code) == tcc_declaration
14518 && DECL_ASSEMBLER_NAME_SET_P (expr))
14520 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14521 memcpy ((char *) &buf, expr, tree_size (expr));
14522 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14523 expr = (tree) &buf;
14525 else if (TREE_CODE_CLASS (code) == tcc_type
14526 && (TYPE_POINTER_TO (expr)
14527 || TYPE_REFERENCE_TO (expr)
14528 || TYPE_CACHED_VALUES_P (expr)
14529 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14530 || TYPE_NEXT_VARIANT (expr)))
14532 /* Allow these fields to be modified. */
14533 tree tmp;
14534 memcpy ((char *) &buf, expr, tree_size (expr));
14535 expr = tmp = (tree) &buf;
14536 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14537 TYPE_POINTER_TO (tmp) = NULL;
14538 TYPE_REFERENCE_TO (tmp) = NULL;
14539 TYPE_NEXT_VARIANT (tmp) = NULL;
14540 if (TYPE_CACHED_VALUES_P (tmp))
14542 TYPE_CACHED_VALUES_P (tmp) = 0;
14543 TYPE_CACHED_VALUES (tmp) = NULL;
14546 md5_process_bytes (expr, tree_size (expr), ctx);
14547 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14548 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14549 if (TREE_CODE_CLASS (code) != tcc_type
14550 && TREE_CODE_CLASS (code) != tcc_declaration
14551 && code != TREE_LIST
14552 && code != SSA_NAME
14553 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14554 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14555 switch (TREE_CODE_CLASS (code))
14557 case tcc_constant:
14558 switch (code)
14560 case STRING_CST:
14561 md5_process_bytes (TREE_STRING_POINTER (expr),
14562 TREE_STRING_LENGTH (expr), ctx);
14563 break;
14564 case COMPLEX_CST:
14565 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14566 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14567 break;
14568 case VECTOR_CST:
14569 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14570 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14571 break;
14572 default:
14573 break;
14575 break;
14576 case tcc_exceptional:
14577 switch (code)
14579 case TREE_LIST:
14580 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14581 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14582 expr = TREE_CHAIN (expr);
14583 goto recursive_label;
14584 break;
14585 case TREE_VEC:
14586 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14587 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14588 break;
14589 default:
14590 break;
14592 break;
14593 case tcc_expression:
14594 case tcc_reference:
14595 case tcc_comparison:
14596 case tcc_unary:
14597 case tcc_binary:
14598 case tcc_statement:
14599 case tcc_vl_exp:
14600 len = TREE_OPERAND_LENGTH (expr);
14601 for (i = 0; i < len; ++i)
14602 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14603 break;
14604 case tcc_declaration:
14605 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14606 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14607 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14609 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14610 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14611 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14612 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14613 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14616 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14618 if (TREE_CODE (expr) == FUNCTION_DECL)
14620 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14621 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14623 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14625 break;
14626 case tcc_type:
14627 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14628 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14629 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14630 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14631 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14632 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14633 if (INTEGRAL_TYPE_P (expr)
14634 || SCALAR_FLOAT_TYPE_P (expr))
14636 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14637 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14639 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14640 if (TREE_CODE (expr) == RECORD_TYPE
14641 || TREE_CODE (expr) == UNION_TYPE
14642 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14643 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14644 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14645 break;
14646 default:
14647 break;
14651 /* Helper function for outputting the checksum of a tree T. When
14652 debugging with gdb, you can "define mynext" to be "next" followed
14653 by "call debug_fold_checksum (op0)", then just trace down till the
14654 outputs differ. */
14656 DEBUG_FUNCTION void
14657 debug_fold_checksum (const_tree t)
14659 int i;
14660 unsigned char checksum[16];
14661 struct md5_ctx ctx;
14662 hash_table<pointer_hash<const tree_node> > ht (32);
14664 md5_init_ctx (&ctx);
14665 fold_checksum_tree (t, &ctx, &ht);
14666 md5_finish_ctx (&ctx, checksum);
14667 ht.empty ();
14669 for (i = 0; i < 16; i++)
14670 fprintf (stderr, "%d ", checksum[i]);
14672 fprintf (stderr, "\n");
14675 #endif
14677 /* Fold a unary tree expression with code CODE of type TYPE with an
14678 operand OP0. LOC is the location of the resulting expression.
14679 Return a folded expression if successful. Otherwise, return a tree
14680 expression with code CODE of type TYPE with an operand OP0. */
14682 tree
14683 fold_build1_stat_loc (location_t loc,
14684 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14686 tree tem;
14687 #ifdef ENABLE_FOLD_CHECKING
14688 unsigned char checksum_before[16], checksum_after[16];
14689 struct md5_ctx ctx;
14690 hash_table<pointer_hash<const tree_node> > ht (32);
14692 md5_init_ctx (&ctx);
14693 fold_checksum_tree (op0, &ctx, &ht);
14694 md5_finish_ctx (&ctx, checksum_before);
14695 ht.empty ();
14696 #endif
14698 tem = fold_unary_loc (loc, code, type, op0);
14699 if (!tem)
14700 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14702 #ifdef ENABLE_FOLD_CHECKING
14703 md5_init_ctx (&ctx);
14704 fold_checksum_tree (op0, &ctx, &ht);
14705 md5_finish_ctx (&ctx, checksum_after);
14707 if (memcmp (checksum_before, checksum_after, 16))
14708 fold_check_failed (op0, tem);
14709 #endif
14710 return tem;
14713 /* Fold a binary tree expression with code CODE of type TYPE with
14714 operands OP0 and OP1. LOC is the location of the resulting
14715 expression. Return a folded expression if successful. Otherwise,
14716 return a tree expression with code CODE of type TYPE with operands
14717 OP0 and OP1. */
14719 tree
14720 fold_build2_stat_loc (location_t loc,
14721 enum tree_code code, tree type, tree op0, tree op1
14722 MEM_STAT_DECL)
14724 tree tem;
14725 #ifdef ENABLE_FOLD_CHECKING
14726 unsigned char checksum_before_op0[16],
14727 checksum_before_op1[16],
14728 checksum_after_op0[16],
14729 checksum_after_op1[16];
14730 struct md5_ctx ctx;
14731 hash_table<pointer_hash<const tree_node> > ht (32);
14733 md5_init_ctx (&ctx);
14734 fold_checksum_tree (op0, &ctx, &ht);
14735 md5_finish_ctx (&ctx, checksum_before_op0);
14736 ht.empty ();
14738 md5_init_ctx (&ctx);
14739 fold_checksum_tree (op1, &ctx, &ht);
14740 md5_finish_ctx (&ctx, checksum_before_op1);
14741 ht.empty ();
14742 #endif
14744 tem = fold_binary_loc (loc, code, type, op0, op1);
14745 if (!tem)
14746 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14748 #ifdef ENABLE_FOLD_CHECKING
14749 md5_init_ctx (&ctx);
14750 fold_checksum_tree (op0, &ctx, &ht);
14751 md5_finish_ctx (&ctx, checksum_after_op0);
14752 ht.empty ();
14754 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14755 fold_check_failed (op0, tem);
14757 md5_init_ctx (&ctx);
14758 fold_checksum_tree (op1, &ctx, &ht);
14759 md5_finish_ctx (&ctx, checksum_after_op1);
14761 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14762 fold_check_failed (op1, tem);
14763 #endif
14764 return tem;
14767 /* Fold a ternary tree expression with code CODE of type TYPE with
14768 operands OP0, OP1, and OP2. Return a folded expression if
14769 successful. Otherwise, return a tree expression with code CODE of
14770 type TYPE with operands OP0, OP1, and OP2. */
14772 tree
14773 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14774 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14776 tree tem;
14777 #ifdef ENABLE_FOLD_CHECKING
14778 unsigned char checksum_before_op0[16],
14779 checksum_before_op1[16],
14780 checksum_before_op2[16],
14781 checksum_after_op0[16],
14782 checksum_after_op1[16],
14783 checksum_after_op2[16];
14784 struct md5_ctx ctx;
14785 hash_table<pointer_hash<const tree_node> > ht (32);
14787 md5_init_ctx (&ctx);
14788 fold_checksum_tree (op0, &ctx, &ht);
14789 md5_finish_ctx (&ctx, checksum_before_op0);
14790 ht.empty ();
14792 md5_init_ctx (&ctx);
14793 fold_checksum_tree (op1, &ctx, &ht);
14794 md5_finish_ctx (&ctx, checksum_before_op1);
14795 ht.empty ();
14797 md5_init_ctx (&ctx);
14798 fold_checksum_tree (op2, &ctx, &ht);
14799 md5_finish_ctx (&ctx, checksum_before_op2);
14800 ht.empty ();
14801 #endif
14803 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14804 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14805 if (!tem)
14806 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14808 #ifdef ENABLE_FOLD_CHECKING
14809 md5_init_ctx (&ctx);
14810 fold_checksum_tree (op0, &ctx, &ht);
14811 md5_finish_ctx (&ctx, checksum_after_op0);
14812 ht.empty ();
14814 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14815 fold_check_failed (op0, tem);
14817 md5_init_ctx (&ctx);
14818 fold_checksum_tree (op1, &ctx, &ht);
14819 md5_finish_ctx (&ctx, checksum_after_op1);
14820 ht.empty ();
14822 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14823 fold_check_failed (op1, tem);
14825 md5_init_ctx (&ctx);
14826 fold_checksum_tree (op2, &ctx, &ht);
14827 md5_finish_ctx (&ctx, checksum_after_op2);
14829 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14830 fold_check_failed (op2, tem);
14831 #endif
14832 return tem;
14835 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14836 arguments in ARGARRAY, and a null static chain.
14837 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14838 of type TYPE from the given operands as constructed by build_call_array. */
14840 tree
14841 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14842 int nargs, tree *argarray)
14844 tree tem;
14845 #ifdef ENABLE_FOLD_CHECKING
14846 unsigned char checksum_before_fn[16],
14847 checksum_before_arglist[16],
14848 checksum_after_fn[16],
14849 checksum_after_arglist[16];
14850 struct md5_ctx ctx;
14851 hash_table<pointer_hash<const tree_node> > ht (32);
14852 int i;
14854 md5_init_ctx (&ctx);
14855 fold_checksum_tree (fn, &ctx, &ht);
14856 md5_finish_ctx (&ctx, checksum_before_fn);
14857 ht.empty ();
14859 md5_init_ctx (&ctx);
14860 for (i = 0; i < nargs; i++)
14861 fold_checksum_tree (argarray[i], &ctx, &ht);
14862 md5_finish_ctx (&ctx, checksum_before_arglist);
14863 ht.empty ();
14864 #endif
14866 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14868 #ifdef ENABLE_FOLD_CHECKING
14869 md5_init_ctx (&ctx);
14870 fold_checksum_tree (fn, &ctx, &ht);
14871 md5_finish_ctx (&ctx, checksum_after_fn);
14872 ht.empty ();
14874 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14875 fold_check_failed (fn, tem);
14877 md5_init_ctx (&ctx);
14878 for (i = 0; i < nargs; i++)
14879 fold_checksum_tree (argarray[i], &ctx, &ht);
14880 md5_finish_ctx (&ctx, checksum_after_arglist);
14882 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14883 fold_check_failed (NULL_TREE, tem);
14884 #endif
14885 return tem;
14888 /* Perform constant folding and related simplification of initializer
14889 expression EXPR. These behave identically to "fold_buildN" but ignore
14890 potential run-time traps and exceptions that fold must preserve. */
14892 #define START_FOLD_INIT \
14893 int saved_signaling_nans = flag_signaling_nans;\
14894 int saved_trapping_math = flag_trapping_math;\
14895 int saved_rounding_math = flag_rounding_math;\
14896 int saved_trapv = flag_trapv;\
14897 int saved_folding_initializer = folding_initializer;\
14898 flag_signaling_nans = 0;\
14899 flag_trapping_math = 0;\
14900 flag_rounding_math = 0;\
14901 flag_trapv = 0;\
14902 folding_initializer = 1;
14904 #define END_FOLD_INIT \
14905 flag_signaling_nans = saved_signaling_nans;\
14906 flag_trapping_math = saved_trapping_math;\
14907 flag_rounding_math = saved_rounding_math;\
14908 flag_trapv = saved_trapv;\
14909 folding_initializer = saved_folding_initializer;
14911 tree
14912 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14913 tree type, tree op)
14915 tree result;
14916 START_FOLD_INIT;
14918 result = fold_build1_loc (loc, code, type, op);
14920 END_FOLD_INIT;
14921 return result;
14924 tree
14925 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14926 tree type, tree op0, tree op1)
14928 tree result;
14929 START_FOLD_INIT;
14931 result = fold_build2_loc (loc, code, type, op0, op1);
14933 END_FOLD_INIT;
14934 return result;
14937 tree
14938 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14939 int nargs, tree *argarray)
14941 tree result;
14942 START_FOLD_INIT;
14944 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14946 END_FOLD_INIT;
14947 return result;
14950 #undef START_FOLD_INIT
14951 #undef END_FOLD_INIT
14953 /* Determine if first argument is a multiple of second argument. Return 0 if
14954 it is not, or we cannot easily determined it to be.
14956 An example of the sort of thing we care about (at this point; this routine
14957 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14958 fold cases do now) is discovering that
14960 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14962 is a multiple of
14964 SAVE_EXPR (J * 8)
14966 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14968 This code also handles discovering that
14970 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14972 is a multiple of 8 so we don't have to worry about dealing with a
14973 possible remainder.
14975 Note that we *look* inside a SAVE_EXPR only to determine how it was
14976 calculated; it is not safe for fold to do much of anything else with the
14977 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14978 at run time. For example, the latter example above *cannot* be implemented
14979 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14980 evaluation time of the original SAVE_EXPR is not necessarily the same at
14981 the time the new expression is evaluated. The only optimization of this
14982 sort that would be valid is changing
14984 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14986 divided by 8 to
14988 SAVE_EXPR (I) * SAVE_EXPR (J)
14990 (where the same SAVE_EXPR (J) is used in the original and the
14991 transformed version). */
14994 multiple_of_p (tree type, const_tree top, const_tree bottom)
14996 if (operand_equal_p (top, bottom, 0))
14997 return 1;
14999 if (TREE_CODE (type) != INTEGER_TYPE)
15000 return 0;
15002 switch (TREE_CODE (top))
15004 case BIT_AND_EXPR:
15005 /* Bitwise and provides a power of two multiple. If the mask is
15006 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15007 if (!integer_pow2p (bottom))
15008 return 0;
15009 /* FALLTHRU */
15011 case MULT_EXPR:
15012 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15013 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15015 case PLUS_EXPR:
15016 case MINUS_EXPR:
15017 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15018 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15020 case LSHIFT_EXPR:
15021 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15023 tree op1, t1;
15025 op1 = TREE_OPERAND (top, 1);
15026 /* const_binop may not detect overflow correctly,
15027 so check for it explicitly here. */
15028 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
15029 && 0 != (t1 = fold_convert (type,
15030 const_binop (LSHIFT_EXPR,
15031 size_one_node,
15032 op1)))
15033 && !TREE_OVERFLOW (t1))
15034 return multiple_of_p (type, t1, bottom);
15036 return 0;
15038 case NOP_EXPR:
15039 /* Can't handle conversions from non-integral or wider integral type. */
15040 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15041 || (TYPE_PRECISION (type)
15042 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15043 return 0;
15045 /* .. fall through ... */
15047 case SAVE_EXPR:
15048 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15050 case COND_EXPR:
15051 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15052 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15054 case INTEGER_CST:
15055 if (TREE_CODE (bottom) != INTEGER_CST
15056 || integer_zerop (bottom)
15057 || (TYPE_UNSIGNED (type)
15058 && (tree_int_cst_sgn (top) < 0
15059 || tree_int_cst_sgn (bottom) < 0)))
15060 return 0;
15061 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
15062 SIGNED);
15064 default:
15065 return 0;
15069 /* Return true if CODE or TYPE is known to be non-negative. */
15071 static bool
15072 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15074 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15075 && truth_value_p (code))
15076 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15077 have a signed:1 type (where the value is -1 and 0). */
15078 return true;
15079 return false;
15082 /* Return true if (CODE OP0) is known to be non-negative. If the return
15083 value is based on the assumption that signed overflow is undefined,
15084 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15085 *STRICT_OVERFLOW_P. */
15087 bool
15088 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15089 bool *strict_overflow_p)
15091 if (TYPE_UNSIGNED (type))
15092 return true;
15094 switch (code)
15096 case ABS_EXPR:
15097 /* We can't return 1 if flag_wrapv is set because
15098 ABS_EXPR<INT_MIN> = INT_MIN. */
15099 if (!INTEGRAL_TYPE_P (type))
15100 return true;
15101 if (TYPE_OVERFLOW_UNDEFINED (type))
15103 *strict_overflow_p = true;
15104 return true;
15106 break;
15108 case NON_LVALUE_EXPR:
15109 case FLOAT_EXPR:
15110 case FIX_TRUNC_EXPR:
15111 return tree_expr_nonnegative_warnv_p (op0,
15112 strict_overflow_p);
15114 case NOP_EXPR:
15116 tree inner_type = TREE_TYPE (op0);
15117 tree outer_type = type;
15119 if (TREE_CODE (outer_type) == REAL_TYPE)
15121 if (TREE_CODE (inner_type) == REAL_TYPE)
15122 return tree_expr_nonnegative_warnv_p (op0,
15123 strict_overflow_p);
15124 if (INTEGRAL_TYPE_P (inner_type))
15126 if (TYPE_UNSIGNED (inner_type))
15127 return true;
15128 return tree_expr_nonnegative_warnv_p (op0,
15129 strict_overflow_p);
15132 else if (INTEGRAL_TYPE_P (outer_type))
15134 if (TREE_CODE (inner_type) == REAL_TYPE)
15135 return tree_expr_nonnegative_warnv_p (op0,
15136 strict_overflow_p);
15137 if (INTEGRAL_TYPE_P (inner_type))
15138 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15139 && TYPE_UNSIGNED (inner_type);
15142 break;
15144 default:
15145 return tree_simple_nonnegative_warnv_p (code, type);
15148 /* We don't know sign of `t', so be conservative and return false. */
15149 return false;
15152 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15153 value is based on the assumption that signed overflow is undefined,
15154 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15155 *STRICT_OVERFLOW_P. */
15157 bool
15158 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15159 tree op1, bool *strict_overflow_p)
15161 if (TYPE_UNSIGNED (type))
15162 return true;
15164 switch (code)
15166 case POINTER_PLUS_EXPR:
15167 case PLUS_EXPR:
15168 if (FLOAT_TYPE_P (type))
15169 return (tree_expr_nonnegative_warnv_p (op0,
15170 strict_overflow_p)
15171 && tree_expr_nonnegative_warnv_p (op1,
15172 strict_overflow_p));
15174 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15175 both unsigned and at least 2 bits shorter than the result. */
15176 if (TREE_CODE (type) == INTEGER_TYPE
15177 && TREE_CODE (op0) == NOP_EXPR
15178 && TREE_CODE (op1) == NOP_EXPR)
15180 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15181 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15182 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15183 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15185 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15186 TYPE_PRECISION (inner2)) + 1;
15187 return prec < TYPE_PRECISION (type);
15190 break;
15192 case MULT_EXPR:
15193 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15195 /* x * x is always non-negative for floating point x
15196 or without overflow. */
15197 if (operand_equal_p (op0, op1, 0)
15198 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15199 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15201 if (TYPE_OVERFLOW_UNDEFINED (type))
15202 *strict_overflow_p = true;
15203 return true;
15207 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15208 both unsigned and their total bits is shorter than the result. */
15209 if (TREE_CODE (type) == INTEGER_TYPE
15210 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15211 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15213 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15214 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15215 : TREE_TYPE (op0);
15216 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15217 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15218 : TREE_TYPE (op1);
15220 bool unsigned0 = TYPE_UNSIGNED (inner0);
15221 bool unsigned1 = TYPE_UNSIGNED (inner1);
15223 if (TREE_CODE (op0) == INTEGER_CST)
15224 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15226 if (TREE_CODE (op1) == INTEGER_CST)
15227 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15229 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15230 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15232 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15233 ? tree_int_cst_min_precision (op0, UNSIGNED)
15234 : TYPE_PRECISION (inner0);
15236 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15237 ? tree_int_cst_min_precision (op1, UNSIGNED)
15238 : TYPE_PRECISION (inner1);
15240 return precision0 + precision1 < TYPE_PRECISION (type);
15243 return false;
15245 case BIT_AND_EXPR:
15246 case MAX_EXPR:
15247 return (tree_expr_nonnegative_warnv_p (op0,
15248 strict_overflow_p)
15249 || tree_expr_nonnegative_warnv_p (op1,
15250 strict_overflow_p));
15252 case BIT_IOR_EXPR:
15253 case BIT_XOR_EXPR:
15254 case MIN_EXPR:
15255 case RDIV_EXPR:
15256 case TRUNC_DIV_EXPR:
15257 case CEIL_DIV_EXPR:
15258 case FLOOR_DIV_EXPR:
15259 case ROUND_DIV_EXPR:
15260 return (tree_expr_nonnegative_warnv_p (op0,
15261 strict_overflow_p)
15262 && tree_expr_nonnegative_warnv_p (op1,
15263 strict_overflow_p));
15265 case TRUNC_MOD_EXPR:
15266 case CEIL_MOD_EXPR:
15267 case FLOOR_MOD_EXPR:
15268 case ROUND_MOD_EXPR:
15269 return tree_expr_nonnegative_warnv_p (op0,
15270 strict_overflow_p);
15271 default:
15272 return tree_simple_nonnegative_warnv_p (code, type);
15275 /* We don't know sign of `t', so be conservative and return false. */
15276 return false;
15279 /* Return true if T is known to be non-negative. If the return
15280 value is based on the assumption that signed overflow is undefined,
15281 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15282 *STRICT_OVERFLOW_P. */
15284 bool
15285 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15287 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15288 return true;
15290 switch (TREE_CODE (t))
15292 case INTEGER_CST:
15293 return tree_int_cst_sgn (t) >= 0;
15295 case REAL_CST:
15296 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15298 case FIXED_CST:
15299 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15301 case COND_EXPR:
15302 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15303 strict_overflow_p)
15304 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15305 strict_overflow_p));
15306 default:
15307 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15308 TREE_TYPE (t));
15310 /* We don't know sign of `t', so be conservative and return false. */
15311 return false;
15314 /* Return true if T is known to be non-negative. If the return
15315 value is based on the assumption that signed overflow is undefined,
15316 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15317 *STRICT_OVERFLOW_P. */
15319 bool
15320 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15321 tree arg0, tree arg1, bool *strict_overflow_p)
15323 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15324 switch (DECL_FUNCTION_CODE (fndecl))
15326 CASE_FLT_FN (BUILT_IN_ACOS):
15327 CASE_FLT_FN (BUILT_IN_ACOSH):
15328 CASE_FLT_FN (BUILT_IN_CABS):
15329 CASE_FLT_FN (BUILT_IN_COSH):
15330 CASE_FLT_FN (BUILT_IN_ERFC):
15331 CASE_FLT_FN (BUILT_IN_EXP):
15332 CASE_FLT_FN (BUILT_IN_EXP10):
15333 CASE_FLT_FN (BUILT_IN_EXP2):
15334 CASE_FLT_FN (BUILT_IN_FABS):
15335 CASE_FLT_FN (BUILT_IN_FDIM):
15336 CASE_FLT_FN (BUILT_IN_HYPOT):
15337 CASE_FLT_FN (BUILT_IN_POW10):
15338 CASE_INT_FN (BUILT_IN_FFS):
15339 CASE_INT_FN (BUILT_IN_PARITY):
15340 CASE_INT_FN (BUILT_IN_POPCOUNT):
15341 CASE_INT_FN (BUILT_IN_CLZ):
15342 CASE_INT_FN (BUILT_IN_CLRSB):
15343 case BUILT_IN_BSWAP32:
15344 case BUILT_IN_BSWAP64:
15345 /* Always true. */
15346 return true;
15348 CASE_FLT_FN (BUILT_IN_SQRT):
15349 /* sqrt(-0.0) is -0.0. */
15350 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15351 return true;
15352 return tree_expr_nonnegative_warnv_p (arg0,
15353 strict_overflow_p);
15355 CASE_FLT_FN (BUILT_IN_ASINH):
15356 CASE_FLT_FN (BUILT_IN_ATAN):
15357 CASE_FLT_FN (BUILT_IN_ATANH):
15358 CASE_FLT_FN (BUILT_IN_CBRT):
15359 CASE_FLT_FN (BUILT_IN_CEIL):
15360 CASE_FLT_FN (BUILT_IN_ERF):
15361 CASE_FLT_FN (BUILT_IN_EXPM1):
15362 CASE_FLT_FN (BUILT_IN_FLOOR):
15363 CASE_FLT_FN (BUILT_IN_FMOD):
15364 CASE_FLT_FN (BUILT_IN_FREXP):
15365 CASE_FLT_FN (BUILT_IN_ICEIL):
15366 CASE_FLT_FN (BUILT_IN_IFLOOR):
15367 CASE_FLT_FN (BUILT_IN_IRINT):
15368 CASE_FLT_FN (BUILT_IN_IROUND):
15369 CASE_FLT_FN (BUILT_IN_LCEIL):
15370 CASE_FLT_FN (BUILT_IN_LDEXP):
15371 CASE_FLT_FN (BUILT_IN_LFLOOR):
15372 CASE_FLT_FN (BUILT_IN_LLCEIL):
15373 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15374 CASE_FLT_FN (BUILT_IN_LLRINT):
15375 CASE_FLT_FN (BUILT_IN_LLROUND):
15376 CASE_FLT_FN (BUILT_IN_LRINT):
15377 CASE_FLT_FN (BUILT_IN_LROUND):
15378 CASE_FLT_FN (BUILT_IN_MODF):
15379 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15380 CASE_FLT_FN (BUILT_IN_RINT):
15381 CASE_FLT_FN (BUILT_IN_ROUND):
15382 CASE_FLT_FN (BUILT_IN_SCALB):
15383 CASE_FLT_FN (BUILT_IN_SCALBLN):
15384 CASE_FLT_FN (BUILT_IN_SCALBN):
15385 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15386 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15387 CASE_FLT_FN (BUILT_IN_SINH):
15388 CASE_FLT_FN (BUILT_IN_TANH):
15389 CASE_FLT_FN (BUILT_IN_TRUNC):
15390 /* True if the 1st argument is nonnegative. */
15391 return tree_expr_nonnegative_warnv_p (arg0,
15392 strict_overflow_p);
15394 CASE_FLT_FN (BUILT_IN_FMAX):
15395 /* True if the 1st OR 2nd arguments are nonnegative. */
15396 return (tree_expr_nonnegative_warnv_p (arg0,
15397 strict_overflow_p)
15398 || (tree_expr_nonnegative_warnv_p (arg1,
15399 strict_overflow_p)));
15401 CASE_FLT_FN (BUILT_IN_FMIN):
15402 /* True if the 1st AND 2nd arguments are nonnegative. */
15403 return (tree_expr_nonnegative_warnv_p (arg0,
15404 strict_overflow_p)
15405 && (tree_expr_nonnegative_warnv_p (arg1,
15406 strict_overflow_p)));
15408 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15409 /* True if the 2nd argument is nonnegative. */
15410 return tree_expr_nonnegative_warnv_p (arg1,
15411 strict_overflow_p);
15413 CASE_FLT_FN (BUILT_IN_POWI):
15414 /* True if the 1st argument is nonnegative or the second
15415 argument is an even integer. */
15416 if (TREE_CODE (arg1) == INTEGER_CST
15417 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15418 return true;
15419 return tree_expr_nonnegative_warnv_p (arg0,
15420 strict_overflow_p);
15422 CASE_FLT_FN (BUILT_IN_POW):
15423 /* True if the 1st argument is nonnegative or the second
15424 argument is an even integer valued real. */
15425 if (TREE_CODE (arg1) == REAL_CST)
15427 REAL_VALUE_TYPE c;
15428 HOST_WIDE_INT n;
15430 c = TREE_REAL_CST (arg1);
15431 n = real_to_integer (&c);
15432 if ((n & 1) == 0)
15434 REAL_VALUE_TYPE cint;
15435 real_from_integer (&cint, VOIDmode, n, SIGNED);
15436 if (real_identical (&c, &cint))
15437 return true;
15440 return tree_expr_nonnegative_warnv_p (arg0,
15441 strict_overflow_p);
15443 default:
15444 break;
15446 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15447 type);
15450 /* Return true if T is known to be non-negative. If the return
15451 value is based on the assumption that signed overflow is undefined,
15452 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15453 *STRICT_OVERFLOW_P. */
15455 static bool
15456 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15458 enum tree_code code = TREE_CODE (t);
15459 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15460 return true;
15462 switch (code)
15464 case TARGET_EXPR:
15466 tree temp = TARGET_EXPR_SLOT (t);
15467 t = TARGET_EXPR_INITIAL (t);
15469 /* If the initializer is non-void, then it's a normal expression
15470 that will be assigned to the slot. */
15471 if (!VOID_TYPE_P (t))
15472 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15474 /* Otherwise, the initializer sets the slot in some way. One common
15475 way is an assignment statement at the end of the initializer. */
15476 while (1)
15478 if (TREE_CODE (t) == BIND_EXPR)
15479 t = expr_last (BIND_EXPR_BODY (t));
15480 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15481 || TREE_CODE (t) == TRY_CATCH_EXPR)
15482 t = expr_last (TREE_OPERAND (t, 0));
15483 else if (TREE_CODE (t) == STATEMENT_LIST)
15484 t = expr_last (t);
15485 else
15486 break;
15488 if (TREE_CODE (t) == MODIFY_EXPR
15489 && TREE_OPERAND (t, 0) == temp)
15490 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15491 strict_overflow_p);
15493 return false;
15496 case CALL_EXPR:
15498 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15499 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15501 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15502 get_callee_fndecl (t),
15503 arg0,
15504 arg1,
15505 strict_overflow_p);
15507 case COMPOUND_EXPR:
15508 case MODIFY_EXPR:
15509 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15510 strict_overflow_p);
15511 case BIND_EXPR:
15512 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15513 strict_overflow_p);
15514 case SAVE_EXPR:
15515 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15516 strict_overflow_p);
15518 default:
15519 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15520 TREE_TYPE (t));
15523 /* We don't know sign of `t', so be conservative and return false. */
15524 return false;
15527 /* Return true if T is known to be non-negative. If the return
15528 value is based on the assumption that signed overflow is undefined,
15529 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15530 *STRICT_OVERFLOW_P. */
15532 bool
15533 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15535 enum tree_code code;
15536 if (t == error_mark_node)
15537 return false;
15539 code = TREE_CODE (t);
15540 switch (TREE_CODE_CLASS (code))
15542 case tcc_binary:
15543 case tcc_comparison:
15544 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15545 TREE_TYPE (t),
15546 TREE_OPERAND (t, 0),
15547 TREE_OPERAND (t, 1),
15548 strict_overflow_p);
15550 case tcc_unary:
15551 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15552 TREE_TYPE (t),
15553 TREE_OPERAND (t, 0),
15554 strict_overflow_p);
15556 case tcc_constant:
15557 case tcc_declaration:
15558 case tcc_reference:
15559 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15561 default:
15562 break;
15565 switch (code)
15567 case TRUTH_AND_EXPR:
15568 case TRUTH_OR_EXPR:
15569 case TRUTH_XOR_EXPR:
15570 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15571 TREE_TYPE (t),
15572 TREE_OPERAND (t, 0),
15573 TREE_OPERAND (t, 1),
15574 strict_overflow_p);
15575 case TRUTH_NOT_EXPR:
15576 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15577 TREE_TYPE (t),
15578 TREE_OPERAND (t, 0),
15579 strict_overflow_p);
15581 case COND_EXPR:
15582 case CONSTRUCTOR:
15583 case OBJ_TYPE_REF:
15584 case ASSERT_EXPR:
15585 case ADDR_EXPR:
15586 case WITH_SIZE_EXPR:
15587 case SSA_NAME:
15588 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15590 default:
15591 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15595 /* Return true if `t' is known to be non-negative. Handle warnings
15596 about undefined signed overflow. */
15598 bool
15599 tree_expr_nonnegative_p (tree t)
15601 bool ret, strict_overflow_p;
15603 strict_overflow_p = false;
15604 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15605 if (strict_overflow_p)
15606 fold_overflow_warning (("assuming signed overflow does not occur when "
15607 "determining that expression is always "
15608 "non-negative"),
15609 WARN_STRICT_OVERFLOW_MISC);
15610 return ret;
15614 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15615 For floating point we further ensure that T is not denormal.
15616 Similar logic is present in nonzero_address in rtlanal.h.
15618 If the return value is based on the assumption that signed overflow
15619 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15620 change *STRICT_OVERFLOW_P. */
15622 bool
15623 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15624 bool *strict_overflow_p)
15626 switch (code)
15628 case ABS_EXPR:
15629 return tree_expr_nonzero_warnv_p (op0,
15630 strict_overflow_p);
15632 case NOP_EXPR:
15634 tree inner_type = TREE_TYPE (op0);
15635 tree outer_type = type;
15637 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15638 && tree_expr_nonzero_warnv_p (op0,
15639 strict_overflow_p));
15641 break;
15643 case NON_LVALUE_EXPR:
15644 return tree_expr_nonzero_warnv_p (op0,
15645 strict_overflow_p);
15647 default:
15648 break;
15651 return false;
15654 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15655 For floating point we further ensure that T is not denormal.
15656 Similar logic is present in nonzero_address in rtlanal.h.
15658 If the return value is based on the assumption that signed overflow
15659 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15660 change *STRICT_OVERFLOW_P. */
15662 bool
15663 tree_binary_nonzero_warnv_p (enum tree_code code,
15664 tree type,
15665 tree op0,
15666 tree op1, bool *strict_overflow_p)
15668 bool sub_strict_overflow_p;
15669 switch (code)
15671 case POINTER_PLUS_EXPR:
15672 case PLUS_EXPR:
15673 if (TYPE_OVERFLOW_UNDEFINED (type))
15675 /* With the presence of negative values it is hard
15676 to say something. */
15677 sub_strict_overflow_p = false;
15678 if (!tree_expr_nonnegative_warnv_p (op0,
15679 &sub_strict_overflow_p)
15680 || !tree_expr_nonnegative_warnv_p (op1,
15681 &sub_strict_overflow_p))
15682 return false;
15683 /* One of operands must be positive and the other non-negative. */
15684 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15685 overflows, on a twos-complement machine the sum of two
15686 nonnegative numbers can never be zero. */
15687 return (tree_expr_nonzero_warnv_p (op0,
15688 strict_overflow_p)
15689 || tree_expr_nonzero_warnv_p (op1,
15690 strict_overflow_p));
15692 break;
15694 case MULT_EXPR:
15695 if (TYPE_OVERFLOW_UNDEFINED (type))
15697 if (tree_expr_nonzero_warnv_p (op0,
15698 strict_overflow_p)
15699 && tree_expr_nonzero_warnv_p (op1,
15700 strict_overflow_p))
15702 *strict_overflow_p = true;
15703 return true;
15706 break;
15708 case MIN_EXPR:
15709 sub_strict_overflow_p = false;
15710 if (tree_expr_nonzero_warnv_p (op0,
15711 &sub_strict_overflow_p)
15712 && tree_expr_nonzero_warnv_p (op1,
15713 &sub_strict_overflow_p))
15715 if (sub_strict_overflow_p)
15716 *strict_overflow_p = true;
15718 break;
15720 case MAX_EXPR:
15721 sub_strict_overflow_p = false;
15722 if (tree_expr_nonzero_warnv_p (op0,
15723 &sub_strict_overflow_p))
15725 if (sub_strict_overflow_p)
15726 *strict_overflow_p = true;
15728 /* When both operands are nonzero, then MAX must be too. */
15729 if (tree_expr_nonzero_warnv_p (op1,
15730 strict_overflow_p))
15731 return true;
15733 /* MAX where operand 0 is positive is positive. */
15734 return tree_expr_nonnegative_warnv_p (op0,
15735 strict_overflow_p);
15737 /* MAX where operand 1 is positive is positive. */
15738 else if (tree_expr_nonzero_warnv_p (op1,
15739 &sub_strict_overflow_p)
15740 && tree_expr_nonnegative_warnv_p (op1,
15741 &sub_strict_overflow_p))
15743 if (sub_strict_overflow_p)
15744 *strict_overflow_p = true;
15745 return true;
15747 break;
15749 case BIT_IOR_EXPR:
15750 return (tree_expr_nonzero_warnv_p (op1,
15751 strict_overflow_p)
15752 || tree_expr_nonzero_warnv_p (op0,
15753 strict_overflow_p));
15755 default:
15756 break;
15759 return false;
15762 /* Return true when T is an address and is known to be nonzero.
15763 For floating point we further ensure that T is not denormal.
15764 Similar logic is present in nonzero_address in rtlanal.h.
15766 If the return value is based on the assumption that signed overflow
15767 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15768 change *STRICT_OVERFLOW_P. */
15770 bool
15771 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15773 bool sub_strict_overflow_p;
15774 switch (TREE_CODE (t))
15776 case INTEGER_CST:
15777 return !integer_zerop (t);
15779 case ADDR_EXPR:
15781 tree base = TREE_OPERAND (t, 0);
15783 if (!DECL_P (base))
15784 base = get_base_address (base);
15786 if (!base)
15787 return false;
15789 /* For objects in symbol table check if we know they are non-zero.
15790 Don't do anything for variables and functions before symtab is built;
15791 it is quite possible that they will be declared weak later. */
15792 if (DECL_P (base) && decl_in_symtab_p (base))
15794 struct symtab_node *symbol;
15796 symbol = symtab_node::get_create (base);
15797 if (symbol)
15798 return symbol->nonzero_address ();
15799 else
15800 return false;
15803 /* Function local objects are never NULL. */
15804 if (DECL_P (base)
15805 && (DECL_CONTEXT (base)
15806 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15807 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15808 return true;
15810 /* Constants are never weak. */
15811 if (CONSTANT_CLASS_P (base))
15812 return true;
15814 return false;
15817 case COND_EXPR:
15818 sub_strict_overflow_p = false;
15819 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15820 &sub_strict_overflow_p)
15821 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15822 &sub_strict_overflow_p))
15824 if (sub_strict_overflow_p)
15825 *strict_overflow_p = true;
15826 return true;
15828 break;
15830 default:
15831 break;
15833 return false;
15836 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15837 attempt to fold the expression to a constant without modifying TYPE,
15838 OP0 or OP1.
15840 If the expression could be simplified to a constant, then return
15841 the constant. If the expression would not be simplified to a
15842 constant, then return NULL_TREE. */
15844 tree
15845 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15847 tree tem = fold_binary (code, type, op0, op1);
15848 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15851 /* Given the components of a unary expression CODE, TYPE and OP0,
15852 attempt to fold the expression to a constant without modifying
15853 TYPE or OP0.
15855 If the expression could be simplified to a constant, then return
15856 the constant. If the expression would not be simplified to a
15857 constant, then return NULL_TREE. */
15859 tree
15860 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15862 tree tem = fold_unary (code, type, op0);
15863 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15866 /* If EXP represents referencing an element in a constant string
15867 (either via pointer arithmetic or array indexing), return the
15868 tree representing the value accessed, otherwise return NULL. */
15870 tree
15871 fold_read_from_constant_string (tree exp)
15873 if ((TREE_CODE (exp) == INDIRECT_REF
15874 || TREE_CODE (exp) == ARRAY_REF)
15875 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15877 tree exp1 = TREE_OPERAND (exp, 0);
15878 tree index;
15879 tree string;
15880 location_t loc = EXPR_LOCATION (exp);
15882 if (TREE_CODE (exp) == INDIRECT_REF)
15883 string = string_constant (exp1, &index);
15884 else
15886 tree low_bound = array_ref_low_bound (exp);
15887 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15889 /* Optimize the special-case of a zero lower bound.
15891 We convert the low_bound to sizetype to avoid some problems
15892 with constant folding. (E.g. suppose the lower bound is 1,
15893 and its mode is QI. Without the conversion,l (ARRAY
15894 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15895 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15896 if (! integer_zerop (low_bound))
15897 index = size_diffop_loc (loc, index,
15898 fold_convert_loc (loc, sizetype, low_bound));
15900 string = exp1;
15903 if (string
15904 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15905 && TREE_CODE (string) == STRING_CST
15906 && TREE_CODE (index) == INTEGER_CST
15907 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15908 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15909 == MODE_INT)
15910 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15911 return build_int_cst_type (TREE_TYPE (exp),
15912 (TREE_STRING_POINTER (string)
15913 [TREE_INT_CST_LOW (index)]));
15915 return NULL;
15918 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15919 an integer constant, real, or fixed-point constant.
15921 TYPE is the type of the result. */
15923 static tree
15924 fold_negate_const (tree arg0, tree type)
15926 tree t = NULL_TREE;
15928 switch (TREE_CODE (arg0))
15930 case INTEGER_CST:
15932 bool overflow;
15933 wide_int val = wi::neg (arg0, &overflow);
15934 t = force_fit_type (type, val, 1,
15935 (overflow | TREE_OVERFLOW (arg0))
15936 && !TYPE_UNSIGNED (type));
15937 break;
15940 case REAL_CST:
15941 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15942 break;
15944 case FIXED_CST:
15946 FIXED_VALUE_TYPE f;
15947 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15948 &(TREE_FIXED_CST (arg0)), NULL,
15949 TYPE_SATURATING (type));
15950 t = build_fixed (type, f);
15951 /* Propagate overflow flags. */
15952 if (overflow_p | TREE_OVERFLOW (arg0))
15953 TREE_OVERFLOW (t) = 1;
15954 break;
15957 default:
15958 gcc_unreachable ();
15961 return t;
15964 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15965 an integer constant or real constant.
15967 TYPE is the type of the result. */
15969 tree
15970 fold_abs_const (tree arg0, tree type)
15972 tree t = NULL_TREE;
15974 switch (TREE_CODE (arg0))
15976 case INTEGER_CST:
15978 /* If the value is unsigned or non-negative, then the absolute value
15979 is the same as the ordinary value. */
15980 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15981 t = arg0;
15983 /* If the value is negative, then the absolute value is
15984 its negation. */
15985 else
15987 bool overflow;
15988 wide_int val = wi::neg (arg0, &overflow);
15989 t = force_fit_type (type, val, -1,
15990 overflow | TREE_OVERFLOW (arg0));
15993 break;
15995 case REAL_CST:
15996 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15997 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15998 else
15999 t = arg0;
16000 break;
16002 default:
16003 gcc_unreachable ();
16006 return t;
16009 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16010 constant. TYPE is the type of the result. */
16012 static tree
16013 fold_not_const (const_tree arg0, tree type)
16015 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16017 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
16020 /* Given CODE, a relational operator, the target type, TYPE and two
16021 constant operands OP0 and OP1, return the result of the
16022 relational operation. If the result is not a compile time
16023 constant, then return NULL_TREE. */
16025 static tree
16026 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16028 int result, invert;
16030 /* From here on, the only cases we handle are when the result is
16031 known to be a constant. */
16033 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16035 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16036 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16038 /* Handle the cases where either operand is a NaN. */
16039 if (real_isnan (c0) || real_isnan (c1))
16041 switch (code)
16043 case EQ_EXPR:
16044 case ORDERED_EXPR:
16045 result = 0;
16046 break;
16048 case NE_EXPR:
16049 case UNORDERED_EXPR:
16050 case UNLT_EXPR:
16051 case UNLE_EXPR:
16052 case UNGT_EXPR:
16053 case UNGE_EXPR:
16054 case UNEQ_EXPR:
16055 result = 1;
16056 break;
16058 case LT_EXPR:
16059 case LE_EXPR:
16060 case GT_EXPR:
16061 case GE_EXPR:
16062 case LTGT_EXPR:
16063 if (flag_trapping_math)
16064 return NULL_TREE;
16065 result = 0;
16066 break;
16068 default:
16069 gcc_unreachable ();
16072 return constant_boolean_node (result, type);
16075 return constant_boolean_node (real_compare (code, c0, c1), type);
16078 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16080 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16081 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16082 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16085 /* Handle equality/inequality of complex constants. */
16086 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16088 tree rcond = fold_relational_const (code, type,
16089 TREE_REALPART (op0),
16090 TREE_REALPART (op1));
16091 tree icond = fold_relational_const (code, type,
16092 TREE_IMAGPART (op0),
16093 TREE_IMAGPART (op1));
16094 if (code == EQ_EXPR)
16095 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16096 else if (code == NE_EXPR)
16097 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16098 else
16099 return NULL_TREE;
16102 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16104 unsigned count = VECTOR_CST_NELTS (op0);
16105 tree *elts = XALLOCAVEC (tree, count);
16106 gcc_assert (VECTOR_CST_NELTS (op1) == count
16107 && TYPE_VECTOR_SUBPARTS (type) == count);
16109 for (unsigned i = 0; i < count; i++)
16111 tree elem_type = TREE_TYPE (type);
16112 tree elem0 = VECTOR_CST_ELT (op0, i);
16113 tree elem1 = VECTOR_CST_ELT (op1, i);
16115 tree tem = fold_relational_const (code, elem_type,
16116 elem0, elem1);
16118 if (tem == NULL_TREE)
16119 return NULL_TREE;
16121 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16124 return build_vector (type, elts);
16127 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16129 To compute GT, swap the arguments and do LT.
16130 To compute GE, do LT and invert the result.
16131 To compute LE, swap the arguments, do LT and invert the result.
16132 To compute NE, do EQ and invert the result.
16134 Therefore, the code below must handle only EQ and LT. */
16136 if (code == LE_EXPR || code == GT_EXPR)
16138 tree tem = op0;
16139 op0 = op1;
16140 op1 = tem;
16141 code = swap_tree_comparison (code);
16144 /* Note that it is safe to invert for real values here because we
16145 have already handled the one case that it matters. */
16147 invert = 0;
16148 if (code == NE_EXPR || code == GE_EXPR)
16150 invert = 1;
16151 code = invert_tree_comparison (code, false);
16154 /* Compute a result for LT or EQ if args permit;
16155 Otherwise return T. */
16156 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16158 if (code == EQ_EXPR)
16159 result = tree_int_cst_equal (op0, op1);
16160 else
16161 result = tree_int_cst_lt (op0, op1);
16163 else
16164 return NULL_TREE;
16166 if (invert)
16167 result ^= 1;
16168 return constant_boolean_node (result, type);
16171 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16172 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16173 itself. */
16175 tree
16176 fold_build_cleanup_point_expr (tree type, tree expr)
16178 /* If the expression does not have side effects then we don't have to wrap
16179 it with a cleanup point expression. */
16180 if (!TREE_SIDE_EFFECTS (expr))
16181 return expr;
16183 /* If the expression is a return, check to see if the expression inside the
16184 return has no side effects or the right hand side of the modify expression
16185 inside the return. If either don't have side effects set we don't need to
16186 wrap the expression in a cleanup point expression. Note we don't check the
16187 left hand side of the modify because it should always be a return decl. */
16188 if (TREE_CODE (expr) == RETURN_EXPR)
16190 tree op = TREE_OPERAND (expr, 0);
16191 if (!op || !TREE_SIDE_EFFECTS (op))
16192 return expr;
16193 op = TREE_OPERAND (op, 1);
16194 if (!TREE_SIDE_EFFECTS (op))
16195 return expr;
16198 return build1 (CLEANUP_POINT_EXPR, type, expr);
16201 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16202 of an indirection through OP0, or NULL_TREE if no simplification is
16203 possible. */
16205 tree
16206 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16208 tree sub = op0;
16209 tree subtype;
16211 STRIP_NOPS (sub);
16212 subtype = TREE_TYPE (sub);
16213 if (!POINTER_TYPE_P (subtype))
16214 return NULL_TREE;
16216 if (TREE_CODE (sub) == ADDR_EXPR)
16218 tree op = TREE_OPERAND (sub, 0);
16219 tree optype = TREE_TYPE (op);
16220 /* *&CONST_DECL -> to the value of the const decl. */
16221 if (TREE_CODE (op) == CONST_DECL)
16222 return DECL_INITIAL (op);
16223 /* *&p => p; make sure to handle *&"str"[cst] here. */
16224 if (type == optype)
16226 tree fop = fold_read_from_constant_string (op);
16227 if (fop)
16228 return fop;
16229 else
16230 return op;
16232 /* *(foo *)&fooarray => fooarray[0] */
16233 else if (TREE_CODE (optype) == ARRAY_TYPE
16234 && type == TREE_TYPE (optype)
16235 && (!in_gimple_form
16236 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16238 tree type_domain = TYPE_DOMAIN (optype);
16239 tree min_val = size_zero_node;
16240 if (type_domain && TYPE_MIN_VALUE (type_domain))
16241 min_val = TYPE_MIN_VALUE (type_domain);
16242 if (in_gimple_form
16243 && TREE_CODE (min_val) != INTEGER_CST)
16244 return NULL_TREE;
16245 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16246 NULL_TREE, NULL_TREE);
16248 /* *(foo *)&complexfoo => __real__ complexfoo */
16249 else if (TREE_CODE (optype) == COMPLEX_TYPE
16250 && type == TREE_TYPE (optype))
16251 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16252 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16253 else if (TREE_CODE (optype) == VECTOR_TYPE
16254 && type == TREE_TYPE (optype))
16256 tree part_width = TYPE_SIZE (type);
16257 tree index = bitsize_int (0);
16258 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16262 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16263 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16265 tree op00 = TREE_OPERAND (sub, 0);
16266 tree op01 = TREE_OPERAND (sub, 1);
16268 STRIP_NOPS (op00);
16269 if (TREE_CODE (op00) == ADDR_EXPR)
16271 tree op00type;
16272 op00 = TREE_OPERAND (op00, 0);
16273 op00type = TREE_TYPE (op00);
16275 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16276 if (TREE_CODE (op00type) == VECTOR_TYPE
16277 && type == TREE_TYPE (op00type))
16279 HOST_WIDE_INT offset = tree_to_shwi (op01);
16280 tree part_width = TYPE_SIZE (type);
16281 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16282 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16283 tree index = bitsize_int (indexi);
16285 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16286 return fold_build3_loc (loc,
16287 BIT_FIELD_REF, type, op00,
16288 part_width, index);
16291 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16292 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16293 && type == TREE_TYPE (op00type))
16295 tree size = TYPE_SIZE_UNIT (type);
16296 if (tree_int_cst_equal (size, op01))
16297 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16299 /* ((foo *)&fooarray)[1] => fooarray[1] */
16300 else if (TREE_CODE (op00type) == ARRAY_TYPE
16301 && type == TREE_TYPE (op00type))
16303 tree type_domain = TYPE_DOMAIN (op00type);
16304 tree min_val = size_zero_node;
16305 if (type_domain && TYPE_MIN_VALUE (type_domain))
16306 min_val = TYPE_MIN_VALUE (type_domain);
16307 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16308 TYPE_SIZE_UNIT (type));
16309 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16310 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16311 NULL_TREE, NULL_TREE);
16316 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16317 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16318 && type == TREE_TYPE (TREE_TYPE (subtype))
16319 && (!in_gimple_form
16320 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16322 tree type_domain;
16323 tree min_val = size_zero_node;
16324 sub = build_fold_indirect_ref_loc (loc, sub);
16325 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16326 if (type_domain && TYPE_MIN_VALUE (type_domain))
16327 min_val = TYPE_MIN_VALUE (type_domain);
16328 if (in_gimple_form
16329 && TREE_CODE (min_val) != INTEGER_CST)
16330 return NULL_TREE;
16331 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16332 NULL_TREE);
16335 return NULL_TREE;
16338 /* Builds an expression for an indirection through T, simplifying some
16339 cases. */
16341 tree
16342 build_fold_indirect_ref_loc (location_t loc, tree t)
16344 tree type = TREE_TYPE (TREE_TYPE (t));
16345 tree sub = fold_indirect_ref_1 (loc, type, t);
16347 if (sub)
16348 return sub;
16350 return build1_loc (loc, INDIRECT_REF, type, t);
16353 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16355 tree
16356 fold_indirect_ref_loc (location_t loc, tree t)
16358 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16360 if (sub)
16361 return sub;
16362 else
16363 return t;
16366 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16367 whose result is ignored. The type of the returned tree need not be
16368 the same as the original expression. */
16370 tree
16371 fold_ignored_result (tree t)
16373 if (!TREE_SIDE_EFFECTS (t))
16374 return integer_zero_node;
16376 for (;;)
16377 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16379 case tcc_unary:
16380 t = TREE_OPERAND (t, 0);
16381 break;
16383 case tcc_binary:
16384 case tcc_comparison:
16385 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16386 t = TREE_OPERAND (t, 0);
16387 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16388 t = TREE_OPERAND (t, 1);
16389 else
16390 return t;
16391 break;
16393 case tcc_expression:
16394 switch (TREE_CODE (t))
16396 case COMPOUND_EXPR:
16397 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16398 return t;
16399 t = TREE_OPERAND (t, 0);
16400 break;
16402 case COND_EXPR:
16403 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16404 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16405 return t;
16406 t = TREE_OPERAND (t, 0);
16407 break;
16409 default:
16410 return t;
16412 break;
16414 default:
16415 return t;
16419 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16421 tree
16422 round_up_loc (location_t loc, tree value, unsigned int divisor)
16424 tree div = NULL_TREE;
16426 if (divisor == 1)
16427 return value;
16429 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16430 have to do anything. Only do this when we are not given a const,
16431 because in that case, this check is more expensive than just
16432 doing it. */
16433 if (TREE_CODE (value) != INTEGER_CST)
16435 div = build_int_cst (TREE_TYPE (value), divisor);
16437 if (multiple_of_p (TREE_TYPE (value), value, div))
16438 return value;
16441 /* If divisor is a power of two, simplify this to bit manipulation. */
16442 if (divisor == (divisor & -divisor))
16444 if (TREE_CODE (value) == INTEGER_CST)
16446 wide_int val = value;
16447 bool overflow_p;
16449 if ((val & (divisor - 1)) == 0)
16450 return value;
16452 overflow_p = TREE_OVERFLOW (value);
16453 val &= ~(divisor - 1);
16454 val += divisor;
16455 if (val == 0)
16456 overflow_p = true;
16458 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16460 else
16462 tree t;
16464 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16465 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16466 t = build_int_cst (TREE_TYPE (value), -divisor);
16467 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16470 else
16472 if (!div)
16473 div = build_int_cst (TREE_TYPE (value), divisor);
16474 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16475 value = size_binop_loc (loc, MULT_EXPR, value, div);
16478 return value;
16481 /* Likewise, but round down. */
16483 tree
16484 round_down_loc (location_t loc, tree value, int divisor)
16486 tree div = NULL_TREE;
16488 gcc_assert (divisor > 0);
16489 if (divisor == 1)
16490 return value;
16492 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16493 have to do anything. Only do this when we are not given a const,
16494 because in that case, this check is more expensive than just
16495 doing it. */
16496 if (TREE_CODE (value) != INTEGER_CST)
16498 div = build_int_cst (TREE_TYPE (value), divisor);
16500 if (multiple_of_p (TREE_TYPE (value), value, div))
16501 return value;
16504 /* If divisor is a power of two, simplify this to bit manipulation. */
16505 if (divisor == (divisor & -divisor))
16507 tree t;
16509 t = build_int_cst (TREE_TYPE (value), -divisor);
16510 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16512 else
16514 if (!div)
16515 div = build_int_cst (TREE_TYPE (value), divisor);
16516 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16517 value = size_binop_loc (loc, MULT_EXPR, value, div);
16520 return value;
16523 /* Returns the pointer to the base of the object addressed by EXP and
16524 extracts the information about the offset of the access, storing it
16525 to PBITPOS and POFFSET. */
16527 static tree
16528 split_address_to_core_and_offset (tree exp,
16529 HOST_WIDE_INT *pbitpos, tree *poffset)
16531 tree core;
16532 machine_mode mode;
16533 int unsignedp, volatilep;
16534 HOST_WIDE_INT bitsize;
16535 location_t loc = EXPR_LOCATION (exp);
16537 if (TREE_CODE (exp) == ADDR_EXPR)
16539 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16540 poffset, &mode, &unsignedp, &volatilep,
16541 false);
16542 core = build_fold_addr_expr_loc (loc, core);
16544 else
16546 core = exp;
16547 *pbitpos = 0;
16548 *poffset = NULL_TREE;
16551 return core;
16554 /* Returns true if addresses of E1 and E2 differ by a constant, false
16555 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16557 bool
16558 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16560 tree core1, core2;
16561 HOST_WIDE_INT bitpos1, bitpos2;
16562 tree toffset1, toffset2, tdiff, type;
16564 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16565 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16567 if (bitpos1 % BITS_PER_UNIT != 0
16568 || bitpos2 % BITS_PER_UNIT != 0
16569 || !operand_equal_p (core1, core2, 0))
16570 return false;
16572 if (toffset1 && toffset2)
16574 type = TREE_TYPE (toffset1);
16575 if (type != TREE_TYPE (toffset2))
16576 toffset2 = fold_convert (type, toffset2);
16578 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16579 if (!cst_and_fits_in_hwi (tdiff))
16580 return false;
16582 *diff = int_cst_value (tdiff);
16584 else if (toffset1 || toffset2)
16586 /* If only one of the offsets is non-constant, the difference cannot
16587 be a constant. */
16588 return false;
16590 else
16591 *diff = 0;
16593 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16594 return true;
16597 /* Simplify the floating point expression EXP when the sign of the
16598 result is not significant. Return NULL_TREE if no simplification
16599 is possible. */
16601 tree
16602 fold_strip_sign_ops (tree exp)
16604 tree arg0, arg1;
16605 location_t loc = EXPR_LOCATION (exp);
16607 switch (TREE_CODE (exp))
16609 case ABS_EXPR:
16610 case NEGATE_EXPR:
16611 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16612 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16614 case MULT_EXPR:
16615 case RDIV_EXPR:
16616 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16617 return NULL_TREE;
16618 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16619 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16620 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16621 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16622 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16623 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16624 break;
16626 case COMPOUND_EXPR:
16627 arg0 = TREE_OPERAND (exp, 0);
16628 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16629 if (arg1)
16630 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16631 break;
16633 case COND_EXPR:
16634 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16635 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16636 if (arg0 || arg1)
16637 return fold_build3_loc (loc,
16638 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16639 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16640 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16641 break;
16643 case CALL_EXPR:
16645 const enum built_in_function fcode = builtin_mathfn_code (exp);
16646 switch (fcode)
16648 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16649 /* Strip copysign function call, return the 1st argument. */
16650 arg0 = CALL_EXPR_ARG (exp, 0);
16651 arg1 = CALL_EXPR_ARG (exp, 1);
16652 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16654 default:
16655 /* Strip sign ops from the argument of "odd" math functions. */
16656 if (negate_mathfn_p (fcode))
16658 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16659 if (arg0)
16660 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16662 break;
16665 break;
16667 default:
16668 break;
16670 return NULL_TREE;