2014-10-30 Hristian Kirtchev <kirtchev@adacore.com>
[official-gcc.git] / gcc / fold-const.c
blob9b9754db7bcafa0a0cf32133ea1f9767995d8d88
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "predict.h"
62 #include "vec.h"
63 #include "hashtab.h"
64 #include "hash-set.h"
65 #include "machmode.h"
66 #include "hard-reg-set.h"
67 #include "input.h"
68 #include "function.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "tree-eh.h"
73 #include "gimple-expr.h"
74 #include "is-a.h"
75 #include "gimple.h"
76 #include "gimplify.h"
77 #include "tree-dfa.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
79 #include "builtins.h"
80 #include "hash-map.h"
81 #include "plugin-api.h"
82 #include "ipa-ref.h"
83 #include "cgraph.h"
84 #include "generic-match.h"
86 /* Nonzero if we are folding constants inside an initializer; zero
87 otherwise. */
88 int folding_initializer = 0;
90 /* The following constants represent a bit based encoding of GCC's
91 comparison operators. This encoding simplifies transformations
92 on relational comparison operators, such as AND and OR. */
93 enum comparison_code {
94 COMPCODE_FALSE = 0,
95 COMPCODE_LT = 1,
96 COMPCODE_EQ = 2,
97 COMPCODE_LE = 3,
98 COMPCODE_GT = 4,
99 COMPCODE_LTGT = 5,
100 COMPCODE_GE = 6,
101 COMPCODE_ORD = 7,
102 COMPCODE_UNORD = 8,
103 COMPCODE_UNLT = 9,
104 COMPCODE_UNEQ = 10,
105 COMPCODE_UNLE = 11,
106 COMPCODE_UNGT = 12,
107 COMPCODE_NE = 13,
108 COMPCODE_UNGE = 14,
109 COMPCODE_TRUE = 15
112 static bool negate_mathfn_p (enum built_in_function);
113 static bool negate_expr_p (tree);
114 static tree negate_expr (tree);
115 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
116 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
117 static tree const_binop (enum tree_code, tree, tree);
118 static enum comparison_code comparison_to_compcode (enum tree_code);
119 static enum tree_code compcode_to_comparison (enum comparison_code);
120 static int operand_equal_for_comparison_p (tree, tree, tree);
121 static int twoval_comparison_p (tree, tree *, tree *, int *);
122 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
123 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
124 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
125 static tree make_bit_field_ref (location_t, tree, tree,
126 HOST_WIDE_INT, HOST_WIDE_INT, int);
127 static tree optimize_bit_field_compare (location_t, enum tree_code,
128 tree, tree, tree);
129 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
130 HOST_WIDE_INT *,
131 machine_mode *, int *, int *,
132 tree *, tree *);
133 static tree sign_bit_p (tree, const_tree);
134 static int simple_operand_p (const_tree);
135 static bool simple_operand_p_2 (tree);
136 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
137 static tree range_predecessor (tree);
138 static tree range_successor (tree);
139 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
140 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
141 static tree unextend (tree, int, int, tree);
142 static tree optimize_minmax_comparison (location_t, enum tree_code,
143 tree, tree, tree);
144 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
145 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
146 static tree fold_binary_op_with_conditional_arg (location_t,
147 enum tree_code, tree,
148 tree, tree,
149 tree, tree, int);
150 static tree fold_mathfn_compare (location_t,
151 enum built_in_function, enum tree_code,
152 tree, tree, tree);
153 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
154 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
155 static bool reorder_operands_p (const_tree, const_tree);
156 static tree fold_negate_const (tree, tree);
157 static tree fold_not_const (const_tree, tree);
158 static tree fold_relational_const (enum tree_code, tree, tree, tree);
159 static tree fold_convert_const (enum tree_code, tree, tree);
161 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
162 Otherwise, return LOC. */
164 static location_t
165 expr_location_or (tree t, location_t loc)
167 location_t tloc = EXPR_LOCATION (t);
168 return tloc == UNKNOWN_LOCATION ? loc : tloc;
171 /* Similar to protected_set_expr_location, but never modify x in place,
172 if location can and needs to be set, unshare it. */
174 static inline tree
175 protected_set_expr_location_unshare (tree x, location_t loc)
177 if (CAN_HAVE_LOCATION_P (x)
178 && EXPR_LOCATION (x) != loc
179 && !(TREE_CODE (x) == SAVE_EXPR
180 || TREE_CODE (x) == TARGET_EXPR
181 || TREE_CODE (x) == BIND_EXPR))
183 x = copy_node (x);
184 SET_EXPR_LOCATION (x, loc);
186 return x;
189 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
190 division and returns the quotient. Otherwise returns
191 NULL_TREE. */
193 tree
194 div_if_zero_remainder (const_tree arg1, const_tree arg2)
196 widest_int quo;
198 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
199 SIGNED, &quo))
200 return wide_int_to_tree (TREE_TYPE (arg1), quo);
202 return NULL_TREE;
205 /* This is nonzero if we should defer warnings about undefined
206 overflow. This facility exists because these warnings are a
207 special case. The code to estimate loop iterations does not want
208 to issue any warnings, since it works with expressions which do not
209 occur in user code. Various bits of cleanup code call fold(), but
210 only use the result if it has certain characteristics (e.g., is a
211 constant); that code only wants to issue a warning if the result is
212 used. */
214 static int fold_deferring_overflow_warnings;
216 /* If a warning about undefined overflow is deferred, this is the
217 warning. Note that this may cause us to turn two warnings into
218 one, but that is fine since it is sufficient to only give one
219 warning per expression. */
221 static const char* fold_deferred_overflow_warning;
223 /* If a warning about undefined overflow is deferred, this is the
224 level at which the warning should be emitted. */
226 static enum warn_strict_overflow_code fold_deferred_overflow_code;
228 /* Start deferring overflow warnings. We could use a stack here to
229 permit nested calls, but at present it is not necessary. */
231 void
232 fold_defer_overflow_warnings (void)
234 ++fold_deferring_overflow_warnings;
237 /* Stop deferring overflow warnings. If there is a pending warning,
238 and ISSUE is true, then issue the warning if appropriate. STMT is
239 the statement with which the warning should be associated (used for
240 location information); STMT may be NULL. CODE is the level of the
241 warning--a warn_strict_overflow_code value. This function will use
242 the smaller of CODE and the deferred code when deciding whether to
243 issue the warning. CODE may be zero to mean to always use the
244 deferred code. */
246 void
247 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
249 const char *warnmsg;
250 location_t locus;
252 gcc_assert (fold_deferring_overflow_warnings > 0);
253 --fold_deferring_overflow_warnings;
254 if (fold_deferring_overflow_warnings > 0)
256 if (fold_deferred_overflow_warning != NULL
257 && code != 0
258 && code < (int) fold_deferred_overflow_code)
259 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
260 return;
263 warnmsg = fold_deferred_overflow_warning;
264 fold_deferred_overflow_warning = NULL;
266 if (!issue || warnmsg == NULL)
267 return;
269 if (gimple_no_warning_p (stmt))
270 return;
272 /* Use the smallest code level when deciding to issue the
273 warning. */
274 if (code == 0 || code > (int) fold_deferred_overflow_code)
275 code = fold_deferred_overflow_code;
277 if (!issue_strict_overflow_warning (code))
278 return;
280 if (stmt == NULL)
281 locus = input_location;
282 else
283 locus = gimple_location (stmt);
284 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
287 /* Stop deferring overflow warnings, ignoring any deferred
288 warnings. */
290 void
291 fold_undefer_and_ignore_overflow_warnings (void)
293 fold_undefer_overflow_warnings (false, NULL, 0);
296 /* Whether we are deferring overflow warnings. */
298 bool
299 fold_deferring_overflow_warnings_p (void)
301 return fold_deferring_overflow_warnings > 0;
304 /* This is called when we fold something based on the fact that signed
305 overflow is undefined. */
307 static void
308 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
310 if (fold_deferring_overflow_warnings > 0)
312 if (fold_deferred_overflow_warning == NULL
313 || wc < fold_deferred_overflow_code)
315 fold_deferred_overflow_warning = gmsgid;
316 fold_deferred_overflow_code = wc;
319 else if (issue_strict_overflow_warning (wc))
320 warning (OPT_Wstrict_overflow, gmsgid);
323 /* Return true if the built-in mathematical function specified by CODE
324 is odd, i.e. -f(x) == f(-x). */
326 static bool
327 negate_mathfn_p (enum built_in_function code)
329 switch (code)
331 CASE_FLT_FN (BUILT_IN_ASIN):
332 CASE_FLT_FN (BUILT_IN_ASINH):
333 CASE_FLT_FN (BUILT_IN_ATAN):
334 CASE_FLT_FN (BUILT_IN_ATANH):
335 CASE_FLT_FN (BUILT_IN_CASIN):
336 CASE_FLT_FN (BUILT_IN_CASINH):
337 CASE_FLT_FN (BUILT_IN_CATAN):
338 CASE_FLT_FN (BUILT_IN_CATANH):
339 CASE_FLT_FN (BUILT_IN_CBRT):
340 CASE_FLT_FN (BUILT_IN_CPROJ):
341 CASE_FLT_FN (BUILT_IN_CSIN):
342 CASE_FLT_FN (BUILT_IN_CSINH):
343 CASE_FLT_FN (BUILT_IN_CTAN):
344 CASE_FLT_FN (BUILT_IN_CTANH):
345 CASE_FLT_FN (BUILT_IN_ERF):
346 CASE_FLT_FN (BUILT_IN_LLROUND):
347 CASE_FLT_FN (BUILT_IN_LROUND):
348 CASE_FLT_FN (BUILT_IN_ROUND):
349 CASE_FLT_FN (BUILT_IN_SIN):
350 CASE_FLT_FN (BUILT_IN_SINH):
351 CASE_FLT_FN (BUILT_IN_TAN):
352 CASE_FLT_FN (BUILT_IN_TANH):
353 CASE_FLT_FN (BUILT_IN_TRUNC):
354 return true;
356 CASE_FLT_FN (BUILT_IN_LLRINT):
357 CASE_FLT_FN (BUILT_IN_LRINT):
358 CASE_FLT_FN (BUILT_IN_NEARBYINT):
359 CASE_FLT_FN (BUILT_IN_RINT):
360 return !flag_rounding_math;
362 default:
363 break;
365 return false;
368 /* Check whether we may negate an integer constant T without causing
369 overflow. */
371 bool
372 may_negate_without_overflow_p (const_tree t)
374 tree type;
376 gcc_assert (TREE_CODE (t) == INTEGER_CST);
378 type = TREE_TYPE (t);
379 if (TYPE_UNSIGNED (type))
380 return false;
382 return !wi::only_sign_bit_p (t);
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
388 static bool
389 negate_expr_p (tree t)
391 tree type;
393 if (t == 0)
394 return false;
396 type = TREE_TYPE (t);
398 STRIP_SIGN_NOPS (t);
399 switch (TREE_CODE (t))
401 case INTEGER_CST:
402 if (TYPE_OVERFLOW_WRAPS (type))
403 return true;
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
407 case BIT_NOT_EXPR:
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
411 case FIXED_CST:
412 case NEGATE_EXPR:
413 return true;
415 case REAL_CST:
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
420 case COMPLEX_CST:
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
424 case VECTOR_CST:
426 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
427 return true;
429 int count = TYPE_VECTOR_SUBPARTS (type), i;
431 for (i = 0; i < count; i++)
432 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
433 return false;
435 return true;
438 case COMPLEX_EXPR:
439 return negate_expr_p (TREE_OPERAND (t, 0))
440 && negate_expr_p (TREE_OPERAND (t, 1));
442 case CONJ_EXPR:
443 return negate_expr_p (TREE_OPERAND (t, 0));
445 case PLUS_EXPR:
446 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
447 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
448 return false;
449 /* -(A + B) -> (-B) - A. */
450 if (negate_expr_p (TREE_OPERAND (t, 1))
451 && reorder_operands_p (TREE_OPERAND (t, 0),
452 TREE_OPERAND (t, 1)))
453 return true;
454 /* -(A + B) -> (-A) - B. */
455 return negate_expr_p (TREE_OPERAND (t, 0));
457 case MINUS_EXPR:
458 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
459 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
460 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
461 && reorder_operands_p (TREE_OPERAND (t, 0),
462 TREE_OPERAND (t, 1));
464 case MULT_EXPR:
465 if (TYPE_UNSIGNED (TREE_TYPE (t)))
466 break;
468 /* Fall through. */
470 case RDIV_EXPR:
471 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
472 return negate_expr_p (TREE_OPERAND (t, 1))
473 || negate_expr_p (TREE_OPERAND (t, 0));
474 break;
476 case TRUNC_DIV_EXPR:
477 case ROUND_DIV_EXPR:
478 case EXACT_DIV_EXPR:
479 /* In general we can't negate A / B, because if A is INT_MIN and
480 B is 1, we may turn this into INT_MIN / -1 which is undefined
481 and actually traps on some architectures. But if overflow is
482 undefined, we can negate, because - (INT_MIN / 1) is an
483 overflow. */
484 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
486 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
487 break;
488 /* If overflow is undefined then we have to be careful because
489 we ask whether it's ok to associate the negate with the
490 division which is not ok for example for
491 -((a - b) / c) where (-(a - b)) / c may invoke undefined
492 overflow because of negating INT_MIN. So do not use
493 negate_expr_p here but open-code the two important cases. */
494 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
495 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
496 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
497 return true;
499 else if (negate_expr_p (TREE_OPERAND (t, 0)))
500 return true;
501 return negate_expr_p (TREE_OPERAND (t, 1));
503 case NOP_EXPR:
504 /* Negate -((double)float) as (double)(-float). */
505 if (TREE_CODE (type) == REAL_TYPE)
507 tree tem = strip_float_extensions (t);
508 if (tem != t)
509 return negate_expr_p (tem);
511 break;
513 case CALL_EXPR:
514 /* Negate -f(x) as f(-x). */
515 if (negate_mathfn_p (builtin_mathfn_code (t)))
516 return negate_expr_p (CALL_EXPR_ARG (t, 0));
517 break;
519 case RSHIFT_EXPR:
520 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
521 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
523 tree op1 = TREE_OPERAND (t, 1);
524 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
525 return true;
527 break;
529 default:
530 break;
532 return false;
535 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
536 simplification is possible.
537 If negate_expr_p would return true for T, NULL_TREE will never be
538 returned. */
540 static tree
541 fold_negate_expr (location_t loc, tree t)
543 tree type = TREE_TYPE (t);
544 tree tem;
546 switch (TREE_CODE (t))
548 /* Convert - (~A) to A + 1. */
549 case BIT_NOT_EXPR:
550 if (INTEGRAL_TYPE_P (type))
551 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
552 build_one_cst (type));
553 break;
555 case INTEGER_CST:
556 tem = fold_negate_const (t, type);
557 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
558 || !TYPE_OVERFLOW_TRAPS (type))
559 return tem;
560 break;
562 case REAL_CST:
563 tem = fold_negate_const (t, type);
564 /* Two's complement FP formats, such as c4x, may overflow. */
565 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
566 return tem;
567 break;
569 case FIXED_CST:
570 tem = fold_negate_const (t, type);
571 return tem;
573 case COMPLEX_CST:
575 tree rpart = negate_expr (TREE_REALPART (t));
576 tree ipart = negate_expr (TREE_IMAGPART (t));
578 if ((TREE_CODE (rpart) == REAL_CST
579 && TREE_CODE (ipart) == REAL_CST)
580 || (TREE_CODE (rpart) == INTEGER_CST
581 && TREE_CODE (ipart) == INTEGER_CST))
582 return build_complex (type, rpart, ipart);
584 break;
586 case VECTOR_CST:
588 int count = TYPE_VECTOR_SUBPARTS (type), i;
589 tree *elts = XALLOCAVEC (tree, count);
591 for (i = 0; i < count; i++)
593 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
594 if (elts[i] == NULL_TREE)
595 return NULL_TREE;
598 return build_vector (type, elts);
601 case COMPLEX_EXPR:
602 if (negate_expr_p (t))
603 return fold_build2_loc (loc, COMPLEX_EXPR, type,
604 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
605 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
606 break;
608 case CONJ_EXPR:
609 if (negate_expr_p (t))
610 return fold_build1_loc (loc, CONJ_EXPR, type,
611 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
612 break;
614 case NEGATE_EXPR:
615 return TREE_OPERAND (t, 0);
617 case PLUS_EXPR:
618 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
619 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
621 /* -(A + B) -> (-B) - A. */
622 if (negate_expr_p (TREE_OPERAND (t, 1))
623 && reorder_operands_p (TREE_OPERAND (t, 0),
624 TREE_OPERAND (t, 1)))
626 tem = negate_expr (TREE_OPERAND (t, 1));
627 return fold_build2_loc (loc, MINUS_EXPR, type,
628 tem, TREE_OPERAND (t, 0));
631 /* -(A + B) -> (-A) - B. */
632 if (negate_expr_p (TREE_OPERAND (t, 0)))
634 tem = negate_expr (TREE_OPERAND (t, 0));
635 return fold_build2_loc (loc, MINUS_EXPR, type,
636 tem, TREE_OPERAND (t, 1));
639 break;
641 case MINUS_EXPR:
642 /* - (A - B) -> B - A */
643 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
644 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
645 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
646 return fold_build2_loc (loc, MINUS_EXPR, type,
647 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
648 break;
650 case MULT_EXPR:
651 if (TYPE_UNSIGNED (type))
652 break;
654 /* Fall through. */
656 case RDIV_EXPR:
657 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
659 tem = TREE_OPERAND (t, 1);
660 if (negate_expr_p (tem))
661 return fold_build2_loc (loc, TREE_CODE (t), type,
662 TREE_OPERAND (t, 0), negate_expr (tem));
663 tem = TREE_OPERAND (t, 0);
664 if (negate_expr_p (tem))
665 return fold_build2_loc (loc, TREE_CODE (t), type,
666 negate_expr (tem), TREE_OPERAND (t, 1));
668 break;
670 case TRUNC_DIV_EXPR:
671 case ROUND_DIV_EXPR:
672 case EXACT_DIV_EXPR:
673 /* In general we can't negate A / B, because if A is INT_MIN and
674 B is 1, we may turn this into INT_MIN / -1 which is undefined
675 and actually traps on some architectures. But if overflow is
676 undefined, we can negate, because - (INT_MIN / 1) is an
677 overflow. */
678 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
680 const char * const warnmsg = G_("assuming signed overflow does not "
681 "occur when negating a division");
682 tem = TREE_OPERAND (t, 1);
683 if (negate_expr_p (tem))
685 if (INTEGRAL_TYPE_P (type)
686 && (TREE_CODE (tem) != INTEGER_CST
687 || integer_onep (tem)))
688 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
689 return fold_build2_loc (loc, TREE_CODE (t), type,
690 TREE_OPERAND (t, 0), negate_expr (tem));
692 /* If overflow is undefined then we have to be careful because
693 we ask whether it's ok to associate the negate with the
694 division which is not ok for example for
695 -((a - b) / c) where (-(a - b)) / c may invoke undefined
696 overflow because of negating INT_MIN. So do not use
697 negate_expr_p here but open-code the two important cases. */
698 tem = TREE_OPERAND (t, 0);
699 if ((INTEGRAL_TYPE_P (type)
700 && (TREE_CODE (tem) == NEGATE_EXPR
701 || (TREE_CODE (tem) == INTEGER_CST
702 && may_negate_without_overflow_p (tem))))
703 || !INTEGRAL_TYPE_P (type))
704 return fold_build2_loc (loc, TREE_CODE (t), type,
705 negate_expr (tem), TREE_OPERAND (t, 1));
707 break;
709 case NOP_EXPR:
710 /* Convert -((double)float) into (double)(-float). */
711 if (TREE_CODE (type) == REAL_TYPE)
713 tem = strip_float_extensions (t);
714 if (tem != t && negate_expr_p (tem))
715 return fold_convert_loc (loc, type, negate_expr (tem));
717 break;
719 case CALL_EXPR:
720 /* Negate -f(x) as f(-x). */
721 if (negate_mathfn_p (builtin_mathfn_code (t))
722 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
724 tree fndecl, arg;
726 fndecl = get_callee_fndecl (t);
727 arg = negate_expr (CALL_EXPR_ARG (t, 0));
728 return build_call_expr_loc (loc, fndecl, 1, arg);
730 break;
732 case RSHIFT_EXPR:
733 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
734 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
736 tree op1 = TREE_OPERAND (t, 1);
737 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
739 tree ntype = TYPE_UNSIGNED (type)
740 ? signed_type_for (type)
741 : unsigned_type_for (type);
742 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
743 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
744 return fold_convert_loc (loc, type, temp);
747 break;
749 default:
750 break;
753 return NULL_TREE;
756 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
757 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
758 return NULL_TREE. */
760 static tree
761 negate_expr (tree t)
763 tree type, tem;
764 location_t loc;
766 if (t == NULL_TREE)
767 return NULL_TREE;
769 loc = EXPR_LOCATION (t);
770 type = TREE_TYPE (t);
771 STRIP_SIGN_NOPS (t);
773 tem = fold_negate_expr (loc, t);
774 if (!tem)
775 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
776 return fold_convert_loc (loc, type, tem);
779 /* Split a tree IN into a constant, literal and variable parts that could be
780 combined with CODE to make IN. "constant" means an expression with
781 TREE_CONSTANT but that isn't an actual constant. CODE must be a
782 commutative arithmetic operation. Store the constant part into *CONP,
783 the literal in *LITP and return the variable part. If a part isn't
784 present, set it to null. If the tree does not decompose in this way,
785 return the entire tree as the variable part and the other parts as null.
787 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
788 case, we negate an operand that was subtracted. Except if it is a
789 literal for which we use *MINUS_LITP instead.
791 If NEGATE_P is true, we are negating all of IN, again except a literal
792 for which we use *MINUS_LITP instead.
794 If IN is itself a literal or constant, return it as appropriate.
796 Note that we do not guarantee that any of the three values will be the
797 same type as IN, but they will have the same signedness and mode. */
799 static tree
800 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
801 tree *minus_litp, int negate_p)
803 tree var = 0;
805 *conp = 0;
806 *litp = 0;
807 *minus_litp = 0;
809 /* Strip any conversions that don't change the machine mode or signedness. */
810 STRIP_SIGN_NOPS (in);
812 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
813 || TREE_CODE (in) == FIXED_CST)
814 *litp = in;
815 else if (TREE_CODE (in) == code
816 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
817 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
818 /* We can associate addition and subtraction together (even
819 though the C standard doesn't say so) for integers because
820 the value is not affected. For reals, the value might be
821 affected, so we can't. */
822 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
823 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
825 tree op0 = TREE_OPERAND (in, 0);
826 tree op1 = TREE_OPERAND (in, 1);
827 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
828 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
830 /* First see if either of the operands is a literal, then a constant. */
831 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
832 || TREE_CODE (op0) == FIXED_CST)
833 *litp = op0, op0 = 0;
834 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
835 || TREE_CODE (op1) == FIXED_CST)
836 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
838 if (op0 != 0 && TREE_CONSTANT (op0))
839 *conp = op0, op0 = 0;
840 else if (op1 != 0 && TREE_CONSTANT (op1))
841 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
843 /* If we haven't dealt with either operand, this is not a case we can
844 decompose. Otherwise, VAR is either of the ones remaining, if any. */
845 if (op0 != 0 && op1 != 0)
846 var = in;
847 else if (op0 != 0)
848 var = op0;
849 else
850 var = op1, neg_var_p = neg1_p;
852 /* Now do any needed negations. */
853 if (neg_litp_p)
854 *minus_litp = *litp, *litp = 0;
855 if (neg_conp_p)
856 *conp = negate_expr (*conp);
857 if (neg_var_p)
858 var = negate_expr (var);
860 else if (TREE_CODE (in) == BIT_NOT_EXPR
861 && code == PLUS_EXPR)
863 /* -X - 1 is folded to ~X, undo that here. */
864 *minus_litp = build_one_cst (TREE_TYPE (in));
865 var = negate_expr (TREE_OPERAND (in, 0));
867 else if (TREE_CONSTANT (in))
868 *conp = in;
869 else
870 var = in;
872 if (negate_p)
874 if (*litp)
875 *minus_litp = *litp, *litp = 0;
876 else if (*minus_litp)
877 *litp = *minus_litp, *minus_litp = 0;
878 *conp = negate_expr (*conp);
879 var = negate_expr (var);
882 return var;
885 /* Re-associate trees split by the above function. T1 and T2 are
886 either expressions to associate or null. Return the new
887 expression, if any. LOC is the location of the new expression. If
888 we build an operation, do it in TYPE and with CODE. */
890 static tree
891 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
893 if (t1 == 0)
894 return t2;
895 else if (t2 == 0)
896 return t1;
898 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
899 try to fold this since we will have infinite recursion. But do
900 deal with any NEGATE_EXPRs. */
901 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
902 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
904 if (code == PLUS_EXPR)
906 if (TREE_CODE (t1) == NEGATE_EXPR)
907 return build2_loc (loc, MINUS_EXPR, type,
908 fold_convert_loc (loc, type, t2),
909 fold_convert_loc (loc, type,
910 TREE_OPERAND (t1, 0)));
911 else if (TREE_CODE (t2) == NEGATE_EXPR)
912 return build2_loc (loc, MINUS_EXPR, type,
913 fold_convert_loc (loc, type, t1),
914 fold_convert_loc (loc, type,
915 TREE_OPERAND (t2, 0)));
916 else if (integer_zerop (t2))
917 return fold_convert_loc (loc, type, t1);
919 else if (code == MINUS_EXPR)
921 if (integer_zerop (t2))
922 return fold_convert_loc (loc, type, t1);
925 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
926 fold_convert_loc (loc, type, t2));
929 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
930 fold_convert_loc (loc, type, t2));
933 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
934 for use in int_const_binop, size_binop and size_diffop. */
936 static bool
937 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
939 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
940 return false;
941 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
942 return false;
944 switch (code)
946 case LSHIFT_EXPR:
947 case RSHIFT_EXPR:
948 case LROTATE_EXPR:
949 case RROTATE_EXPR:
950 return true;
952 default:
953 break;
956 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
957 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
958 && TYPE_MODE (type1) == TYPE_MODE (type2);
962 /* Combine two integer constants ARG1 and ARG2 under operation CODE
963 to produce a new constant. Return NULL_TREE if we don't know how
964 to evaluate CODE at compile-time. */
966 static tree
967 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
968 int overflowable)
970 wide_int res;
971 tree t;
972 tree type = TREE_TYPE (arg1);
973 signop sign = TYPE_SIGN (type);
974 bool overflow = false;
976 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
977 TYPE_SIGN (TREE_TYPE (parg2)));
979 switch (code)
981 case BIT_IOR_EXPR:
982 res = wi::bit_or (arg1, arg2);
983 break;
985 case BIT_XOR_EXPR:
986 res = wi::bit_xor (arg1, arg2);
987 break;
989 case BIT_AND_EXPR:
990 res = wi::bit_and (arg1, arg2);
991 break;
993 case RSHIFT_EXPR:
994 case LSHIFT_EXPR:
995 if (wi::neg_p (arg2))
997 arg2 = -arg2;
998 if (code == RSHIFT_EXPR)
999 code = LSHIFT_EXPR;
1000 else
1001 code = RSHIFT_EXPR;
1004 if (code == RSHIFT_EXPR)
1005 /* It's unclear from the C standard whether shifts can overflow.
1006 The following code ignores overflow; perhaps a C standard
1007 interpretation ruling is needed. */
1008 res = wi::rshift (arg1, arg2, sign);
1009 else
1010 res = wi::lshift (arg1, arg2);
1011 break;
1013 case RROTATE_EXPR:
1014 case LROTATE_EXPR:
1015 if (wi::neg_p (arg2))
1017 arg2 = -arg2;
1018 if (code == RROTATE_EXPR)
1019 code = LROTATE_EXPR;
1020 else
1021 code = RROTATE_EXPR;
1024 if (code == RROTATE_EXPR)
1025 res = wi::rrotate (arg1, arg2);
1026 else
1027 res = wi::lrotate (arg1, arg2);
1028 break;
1030 case PLUS_EXPR:
1031 res = wi::add (arg1, arg2, sign, &overflow);
1032 break;
1034 case MINUS_EXPR:
1035 res = wi::sub (arg1, arg2, sign, &overflow);
1036 break;
1038 case MULT_EXPR:
1039 res = wi::mul (arg1, arg2, sign, &overflow);
1040 break;
1042 case MULT_HIGHPART_EXPR:
1043 res = wi::mul_high (arg1, arg2, sign);
1044 break;
1046 case TRUNC_DIV_EXPR:
1047 case EXACT_DIV_EXPR:
1048 if (arg2 == 0)
1049 return NULL_TREE;
1050 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1051 break;
1053 case FLOOR_DIV_EXPR:
1054 if (arg2 == 0)
1055 return NULL_TREE;
1056 res = wi::div_floor (arg1, arg2, sign, &overflow);
1057 break;
1059 case CEIL_DIV_EXPR:
1060 if (arg2 == 0)
1061 return NULL_TREE;
1062 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1063 break;
1065 case ROUND_DIV_EXPR:
1066 if (arg2 == 0)
1067 return NULL_TREE;
1068 res = wi::div_round (arg1, arg2, sign, &overflow);
1069 break;
1071 case TRUNC_MOD_EXPR:
1072 if (arg2 == 0)
1073 return NULL_TREE;
1074 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1075 break;
1077 case FLOOR_MOD_EXPR:
1078 if (arg2 == 0)
1079 return NULL_TREE;
1080 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1081 break;
1083 case CEIL_MOD_EXPR:
1084 if (arg2 == 0)
1085 return NULL_TREE;
1086 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1087 break;
1089 case ROUND_MOD_EXPR:
1090 if (arg2 == 0)
1091 return NULL_TREE;
1092 res = wi::mod_round (arg1, arg2, sign, &overflow);
1093 break;
1095 case MIN_EXPR:
1096 res = wi::min (arg1, arg2, sign);
1097 break;
1099 case MAX_EXPR:
1100 res = wi::max (arg1, arg2, sign);
1101 break;
1103 default:
1104 return NULL_TREE;
1107 t = force_fit_type (type, res, overflowable,
1108 (((sign == SIGNED || overflowable == -1)
1109 && overflow)
1110 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1112 return t;
1115 tree
1116 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1118 return int_const_binop_1 (code, arg1, arg2, 1);
1121 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1122 constant. We assume ARG1 and ARG2 have the same data type, or at least
1123 are the same kind of constant and the same machine mode. Return zero if
1124 combining the constants is not allowed in the current operating mode. */
1126 static tree
1127 const_binop (enum tree_code code, tree arg1, tree arg2)
1129 /* Sanity check for the recursive cases. */
1130 if (!arg1 || !arg2)
1131 return NULL_TREE;
1133 STRIP_NOPS (arg1);
1134 STRIP_NOPS (arg2);
1136 if (TREE_CODE (arg1) == INTEGER_CST)
1137 return int_const_binop (code, arg1, arg2);
1139 if (TREE_CODE (arg1) == REAL_CST)
1141 machine_mode mode;
1142 REAL_VALUE_TYPE d1;
1143 REAL_VALUE_TYPE d2;
1144 REAL_VALUE_TYPE value;
1145 REAL_VALUE_TYPE result;
1146 bool inexact;
1147 tree t, type;
1149 /* The following codes are handled by real_arithmetic. */
1150 switch (code)
1152 case PLUS_EXPR:
1153 case MINUS_EXPR:
1154 case MULT_EXPR:
1155 case RDIV_EXPR:
1156 case MIN_EXPR:
1157 case MAX_EXPR:
1158 break;
1160 default:
1161 return NULL_TREE;
1164 d1 = TREE_REAL_CST (arg1);
1165 d2 = TREE_REAL_CST (arg2);
1167 type = TREE_TYPE (arg1);
1168 mode = TYPE_MODE (type);
1170 /* Don't perform operation if we honor signaling NaNs and
1171 either operand is a NaN. */
1172 if (HONOR_SNANS (mode)
1173 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1174 return NULL_TREE;
1176 /* Don't perform operation if it would raise a division
1177 by zero exception. */
1178 if (code == RDIV_EXPR
1179 && REAL_VALUES_EQUAL (d2, dconst0)
1180 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1181 return NULL_TREE;
1183 /* If either operand is a NaN, just return it. Otherwise, set up
1184 for floating-point trap; we return an overflow. */
1185 if (REAL_VALUE_ISNAN (d1))
1186 return arg1;
1187 else if (REAL_VALUE_ISNAN (d2))
1188 return arg2;
1190 inexact = real_arithmetic (&value, code, &d1, &d2);
1191 real_convert (&result, mode, &value);
1193 /* Don't constant fold this floating point operation if
1194 the result has overflowed and flag_trapping_math. */
1195 if (flag_trapping_math
1196 && MODE_HAS_INFINITIES (mode)
1197 && REAL_VALUE_ISINF (result)
1198 && !REAL_VALUE_ISINF (d1)
1199 && !REAL_VALUE_ISINF (d2))
1200 return NULL_TREE;
1202 /* Don't constant fold this floating point operation if the
1203 result may dependent upon the run-time rounding mode and
1204 flag_rounding_math is set, or if GCC's software emulation
1205 is unable to accurately represent the result. */
1206 if ((flag_rounding_math
1207 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1208 && (inexact || !real_identical (&result, &value)))
1209 return NULL_TREE;
1211 t = build_real (type, result);
1213 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1214 return t;
1217 if (TREE_CODE (arg1) == FIXED_CST)
1219 FIXED_VALUE_TYPE f1;
1220 FIXED_VALUE_TYPE f2;
1221 FIXED_VALUE_TYPE result;
1222 tree t, type;
1223 int sat_p;
1224 bool overflow_p;
1226 /* The following codes are handled by fixed_arithmetic. */
1227 switch (code)
1229 case PLUS_EXPR:
1230 case MINUS_EXPR:
1231 case MULT_EXPR:
1232 case TRUNC_DIV_EXPR:
1233 f2 = TREE_FIXED_CST (arg2);
1234 break;
1236 case LSHIFT_EXPR:
1237 case RSHIFT_EXPR:
1239 wide_int w2 = arg2;
1240 f2.data.high = w2.elt (1);
1241 f2.data.low = w2.elt (0);
1242 f2.mode = SImode;
1244 break;
1246 default:
1247 return NULL_TREE;
1250 f1 = TREE_FIXED_CST (arg1);
1251 type = TREE_TYPE (arg1);
1252 sat_p = TYPE_SATURATING (type);
1253 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1254 t = build_fixed (type, result);
1255 /* Propagate overflow flags. */
1256 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1257 TREE_OVERFLOW (t) = 1;
1258 return t;
1261 if (TREE_CODE (arg1) == COMPLEX_CST)
1263 tree type = TREE_TYPE (arg1);
1264 tree r1 = TREE_REALPART (arg1);
1265 tree i1 = TREE_IMAGPART (arg1);
1266 tree r2 = TREE_REALPART (arg2);
1267 tree i2 = TREE_IMAGPART (arg2);
1268 tree real, imag;
1270 switch (code)
1272 case PLUS_EXPR:
1273 case MINUS_EXPR:
1274 real = const_binop (code, r1, r2);
1275 imag = const_binop (code, i1, i2);
1276 break;
1278 case MULT_EXPR:
1279 if (COMPLEX_FLOAT_TYPE_P (type))
1280 return do_mpc_arg2 (arg1, arg2, type,
1281 /* do_nonfinite= */ folding_initializer,
1282 mpc_mul);
1284 real = const_binop (MINUS_EXPR,
1285 const_binop (MULT_EXPR, r1, r2),
1286 const_binop (MULT_EXPR, i1, i2));
1287 imag = const_binop (PLUS_EXPR,
1288 const_binop (MULT_EXPR, r1, i2),
1289 const_binop (MULT_EXPR, i1, r2));
1290 break;
1292 case RDIV_EXPR:
1293 if (COMPLEX_FLOAT_TYPE_P (type))
1294 return do_mpc_arg2 (arg1, arg2, type,
1295 /* do_nonfinite= */ folding_initializer,
1296 mpc_div);
1297 /* Fallthru ... */
1298 case TRUNC_DIV_EXPR:
1299 case CEIL_DIV_EXPR:
1300 case FLOOR_DIV_EXPR:
1301 case ROUND_DIV_EXPR:
1302 if (flag_complex_method == 0)
1304 /* Keep this algorithm in sync with
1305 tree-complex.c:expand_complex_div_straight().
1307 Expand complex division to scalars, straightforward algorithm.
1308 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1309 t = br*br + bi*bi
1311 tree magsquared
1312 = const_binop (PLUS_EXPR,
1313 const_binop (MULT_EXPR, r2, r2),
1314 const_binop (MULT_EXPR, i2, i2));
1315 tree t1
1316 = const_binop (PLUS_EXPR,
1317 const_binop (MULT_EXPR, r1, r2),
1318 const_binop (MULT_EXPR, i1, i2));
1319 tree t2
1320 = const_binop (MINUS_EXPR,
1321 const_binop (MULT_EXPR, i1, r2),
1322 const_binop (MULT_EXPR, r1, i2));
1324 real = const_binop (code, t1, magsquared);
1325 imag = const_binop (code, t2, magsquared);
1327 else
1329 /* Keep this algorithm in sync with
1330 tree-complex.c:expand_complex_div_wide().
1332 Expand complex division to scalars, modified algorithm to minimize
1333 overflow with wide input ranges. */
1334 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1335 fold_abs_const (r2, TREE_TYPE (type)),
1336 fold_abs_const (i2, TREE_TYPE (type)));
1338 if (integer_nonzerop (compare))
1340 /* In the TRUE branch, we compute
1341 ratio = br/bi;
1342 div = (br * ratio) + bi;
1343 tr = (ar * ratio) + ai;
1344 ti = (ai * ratio) - ar;
1345 tr = tr / div;
1346 ti = ti / div; */
1347 tree ratio = const_binop (code, r2, i2);
1348 tree div = const_binop (PLUS_EXPR, i2,
1349 const_binop (MULT_EXPR, r2, ratio));
1350 real = const_binop (MULT_EXPR, r1, ratio);
1351 real = const_binop (PLUS_EXPR, real, i1);
1352 real = const_binop (code, real, div);
1354 imag = const_binop (MULT_EXPR, i1, ratio);
1355 imag = const_binop (MINUS_EXPR, imag, r1);
1356 imag = const_binop (code, imag, div);
1358 else
1360 /* In the FALSE branch, we compute
1361 ratio = d/c;
1362 divisor = (d * ratio) + c;
1363 tr = (b * ratio) + a;
1364 ti = b - (a * ratio);
1365 tr = tr / div;
1366 ti = ti / div; */
1367 tree ratio = const_binop (code, i2, r2);
1368 tree div = const_binop (PLUS_EXPR, r2,
1369 const_binop (MULT_EXPR, i2, ratio));
1371 real = const_binop (MULT_EXPR, i1, ratio);
1372 real = const_binop (PLUS_EXPR, real, r1);
1373 real = const_binop (code, real, div);
1375 imag = const_binop (MULT_EXPR, r1, ratio);
1376 imag = const_binop (MINUS_EXPR, i1, imag);
1377 imag = const_binop (code, imag, div);
1380 break;
1382 default:
1383 return NULL_TREE;
1386 if (real && imag)
1387 return build_complex (type, real, imag);
1390 if (TREE_CODE (arg1) == VECTOR_CST
1391 && TREE_CODE (arg2) == VECTOR_CST)
1393 tree type = TREE_TYPE (arg1);
1394 int count = TYPE_VECTOR_SUBPARTS (type), i;
1395 tree *elts = XALLOCAVEC (tree, count);
1397 for (i = 0; i < count; i++)
1399 tree elem1 = VECTOR_CST_ELT (arg1, i);
1400 tree elem2 = VECTOR_CST_ELT (arg2, i);
1402 elts[i] = const_binop (code, elem1, elem2);
1404 /* It is possible that const_binop cannot handle the given
1405 code and return NULL_TREE */
1406 if (elts[i] == NULL_TREE)
1407 return NULL_TREE;
1410 return build_vector (type, elts);
1413 /* Shifts allow a scalar offset for a vector. */
1414 if (TREE_CODE (arg1) == VECTOR_CST
1415 && TREE_CODE (arg2) == INTEGER_CST)
1417 tree type = TREE_TYPE (arg1);
1418 int count = TYPE_VECTOR_SUBPARTS (type), i;
1419 tree *elts = XALLOCAVEC (tree, count);
1421 if (code == VEC_RSHIFT_EXPR)
1423 if (!tree_fits_uhwi_p (arg2))
1424 return NULL_TREE;
1426 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1427 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1428 unsigned HOST_WIDE_INT innerc
1429 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1430 if (shiftc >= outerc || (shiftc % innerc) != 0)
1431 return NULL_TREE;
1432 int offset = shiftc / innerc;
1433 /* The direction of VEC_RSHIFT_EXPR is endian dependent.
1434 For reductions, if !BYTES_BIG_ENDIAN then compiler picks first
1435 vector element, but last element if BYTES_BIG_ENDIAN. */
1436 if (BYTES_BIG_ENDIAN)
1437 offset = -offset;
1438 tree zero = build_zero_cst (TREE_TYPE (type));
1439 for (i = 0; i < count; i++)
1441 if (i + offset < 0 || i + offset >= count)
1442 elts[i] = zero;
1443 else
1444 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1447 else
1448 for (i = 0; i < count; i++)
1450 tree elem1 = VECTOR_CST_ELT (arg1, i);
1452 elts[i] = const_binop (code, elem1, arg2);
1454 /* It is possible that const_binop cannot handle the given
1455 code and return NULL_TREE */
1456 if (elts[i] == NULL_TREE)
1457 return NULL_TREE;
1460 return build_vector (type, elts);
1462 return NULL_TREE;
1465 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1466 indicates which particular sizetype to create. */
1468 tree
1469 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1471 return build_int_cst (sizetype_tab[(int) kind], number);
1474 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1475 is a tree code. The type of the result is taken from the operands.
1476 Both must be equivalent integer types, ala int_binop_types_match_p.
1477 If the operands are constant, so is the result. */
1479 tree
1480 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1482 tree type = TREE_TYPE (arg0);
1484 if (arg0 == error_mark_node || arg1 == error_mark_node)
1485 return error_mark_node;
1487 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1488 TREE_TYPE (arg1)));
1490 /* Handle the special case of two integer constants faster. */
1491 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1493 /* And some specific cases even faster than that. */
1494 if (code == PLUS_EXPR)
1496 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1497 return arg1;
1498 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1499 return arg0;
1501 else if (code == MINUS_EXPR)
1503 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1504 return arg0;
1506 else if (code == MULT_EXPR)
1508 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1509 return arg1;
1512 /* Handle general case of two integer constants. For sizetype
1513 constant calculations we always want to know about overflow,
1514 even in the unsigned case. */
1515 return int_const_binop_1 (code, arg0, arg1, -1);
1518 return fold_build2_loc (loc, code, type, arg0, arg1);
1521 /* Given two values, either both of sizetype or both of bitsizetype,
1522 compute the difference between the two values. Return the value
1523 in signed type corresponding to the type of the operands. */
1525 tree
1526 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1528 tree type = TREE_TYPE (arg0);
1529 tree ctype;
1531 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1532 TREE_TYPE (arg1)));
1534 /* If the type is already signed, just do the simple thing. */
1535 if (!TYPE_UNSIGNED (type))
1536 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1538 if (type == sizetype)
1539 ctype = ssizetype;
1540 else if (type == bitsizetype)
1541 ctype = sbitsizetype;
1542 else
1543 ctype = signed_type_for (type);
1545 /* If either operand is not a constant, do the conversions to the signed
1546 type and subtract. The hardware will do the right thing with any
1547 overflow in the subtraction. */
1548 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1549 return size_binop_loc (loc, MINUS_EXPR,
1550 fold_convert_loc (loc, ctype, arg0),
1551 fold_convert_loc (loc, ctype, arg1));
1553 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1554 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1555 overflow) and negate (which can't either). Special-case a result
1556 of zero while we're here. */
1557 if (tree_int_cst_equal (arg0, arg1))
1558 return build_int_cst (ctype, 0);
1559 else if (tree_int_cst_lt (arg1, arg0))
1560 return fold_convert_loc (loc, ctype,
1561 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1562 else
1563 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1564 fold_convert_loc (loc, ctype,
1565 size_binop_loc (loc,
1566 MINUS_EXPR,
1567 arg1, arg0)));
1570 /* A subroutine of fold_convert_const handling conversions of an
1571 INTEGER_CST to another integer type. */
1573 static tree
1574 fold_convert_const_int_from_int (tree type, const_tree arg1)
1576 /* Given an integer constant, make new constant with new type,
1577 appropriately sign-extended or truncated. Use widest_int
1578 so that any extension is done according ARG1's type. */
1579 return force_fit_type (type, wi::to_widest (arg1),
1580 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1581 TREE_OVERFLOW (arg1));
1584 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1585 to an integer type. */
1587 static tree
1588 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1590 bool overflow = false;
1591 tree t;
1593 /* The following code implements the floating point to integer
1594 conversion rules required by the Java Language Specification,
1595 that IEEE NaNs are mapped to zero and values that overflow
1596 the target precision saturate, i.e. values greater than
1597 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1598 are mapped to INT_MIN. These semantics are allowed by the
1599 C and C++ standards that simply state that the behavior of
1600 FP-to-integer conversion is unspecified upon overflow. */
1602 wide_int val;
1603 REAL_VALUE_TYPE r;
1604 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1606 switch (code)
1608 case FIX_TRUNC_EXPR:
1609 real_trunc (&r, VOIDmode, &x);
1610 break;
1612 default:
1613 gcc_unreachable ();
1616 /* If R is NaN, return zero and show we have an overflow. */
1617 if (REAL_VALUE_ISNAN (r))
1619 overflow = true;
1620 val = wi::zero (TYPE_PRECISION (type));
1623 /* See if R is less than the lower bound or greater than the
1624 upper bound. */
1626 if (! overflow)
1628 tree lt = TYPE_MIN_VALUE (type);
1629 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1630 if (REAL_VALUES_LESS (r, l))
1632 overflow = true;
1633 val = lt;
1637 if (! overflow)
1639 tree ut = TYPE_MAX_VALUE (type);
1640 if (ut)
1642 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1643 if (REAL_VALUES_LESS (u, r))
1645 overflow = true;
1646 val = ut;
1651 if (! overflow)
1652 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1654 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1655 return t;
1658 /* A subroutine of fold_convert_const handling conversions of a
1659 FIXED_CST to an integer type. */
1661 static tree
1662 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1664 tree t;
1665 double_int temp, temp_trunc;
1666 unsigned int mode;
1668 /* Right shift FIXED_CST to temp by fbit. */
1669 temp = TREE_FIXED_CST (arg1).data;
1670 mode = TREE_FIXED_CST (arg1).mode;
1671 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1673 temp = temp.rshift (GET_MODE_FBIT (mode),
1674 HOST_BITS_PER_DOUBLE_INT,
1675 SIGNED_FIXED_POINT_MODE_P (mode));
1677 /* Left shift temp to temp_trunc by fbit. */
1678 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1679 HOST_BITS_PER_DOUBLE_INT,
1680 SIGNED_FIXED_POINT_MODE_P (mode));
1682 else
1684 temp = double_int_zero;
1685 temp_trunc = double_int_zero;
1688 /* If FIXED_CST is negative, we need to round the value toward 0.
1689 By checking if the fractional bits are not zero to add 1 to temp. */
1690 if (SIGNED_FIXED_POINT_MODE_P (mode)
1691 && temp_trunc.is_negative ()
1692 && TREE_FIXED_CST (arg1).data != temp_trunc)
1693 temp += double_int_one;
1695 /* Given a fixed-point constant, make new constant with new type,
1696 appropriately sign-extended or truncated. */
1697 t = force_fit_type (type, temp, -1,
1698 (temp.is_negative ()
1699 && (TYPE_UNSIGNED (type)
1700 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1701 | TREE_OVERFLOW (arg1));
1703 return t;
1706 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1707 to another floating point type. */
1709 static tree
1710 fold_convert_const_real_from_real (tree type, const_tree arg1)
1712 REAL_VALUE_TYPE value;
1713 tree t;
1715 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1716 t = build_real (type, value);
1718 /* If converting an infinity or NAN to a representation that doesn't
1719 have one, set the overflow bit so that we can produce some kind of
1720 error message at the appropriate point if necessary. It's not the
1721 most user-friendly message, but it's better than nothing. */
1722 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1723 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1724 TREE_OVERFLOW (t) = 1;
1725 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1726 && !MODE_HAS_NANS (TYPE_MODE (type)))
1727 TREE_OVERFLOW (t) = 1;
1728 /* Regular overflow, conversion produced an infinity in a mode that
1729 can't represent them. */
1730 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1731 && REAL_VALUE_ISINF (value)
1732 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1733 TREE_OVERFLOW (t) = 1;
1734 else
1735 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1736 return t;
1739 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1740 to a floating point type. */
1742 static tree
1743 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1745 REAL_VALUE_TYPE value;
1746 tree t;
1748 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1749 t = build_real (type, value);
1751 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1752 return t;
1755 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1756 to another fixed-point type. */
1758 static tree
1759 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1761 FIXED_VALUE_TYPE value;
1762 tree t;
1763 bool overflow_p;
1765 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1766 TYPE_SATURATING (type));
1767 t = build_fixed (type, value);
1769 /* Propagate overflow flags. */
1770 if (overflow_p | TREE_OVERFLOW (arg1))
1771 TREE_OVERFLOW (t) = 1;
1772 return t;
1775 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1776 to a fixed-point type. */
1778 static tree
1779 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1781 FIXED_VALUE_TYPE value;
1782 tree t;
1783 bool overflow_p;
1784 double_int di;
1786 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1788 di.low = TREE_INT_CST_ELT (arg1, 0);
1789 if (TREE_INT_CST_NUNITS (arg1) == 1)
1790 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1791 else
1792 di.high = TREE_INT_CST_ELT (arg1, 1);
1794 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1795 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1796 TYPE_SATURATING (type));
1797 t = build_fixed (type, value);
1799 /* Propagate overflow flags. */
1800 if (overflow_p | TREE_OVERFLOW (arg1))
1801 TREE_OVERFLOW (t) = 1;
1802 return t;
1805 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1806 to a fixed-point type. */
1808 static tree
1809 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1811 FIXED_VALUE_TYPE value;
1812 tree t;
1813 bool overflow_p;
1815 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1816 &TREE_REAL_CST (arg1),
1817 TYPE_SATURATING (type));
1818 t = build_fixed (type, value);
1820 /* Propagate overflow flags. */
1821 if (overflow_p | TREE_OVERFLOW (arg1))
1822 TREE_OVERFLOW (t) = 1;
1823 return t;
1826 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1827 type TYPE. If no simplification can be done return NULL_TREE. */
1829 static tree
1830 fold_convert_const (enum tree_code code, tree type, tree arg1)
1832 if (TREE_TYPE (arg1) == type)
1833 return arg1;
1835 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1836 || TREE_CODE (type) == OFFSET_TYPE)
1838 if (TREE_CODE (arg1) == INTEGER_CST)
1839 return fold_convert_const_int_from_int (type, arg1);
1840 else if (TREE_CODE (arg1) == REAL_CST)
1841 return fold_convert_const_int_from_real (code, type, arg1);
1842 else if (TREE_CODE (arg1) == FIXED_CST)
1843 return fold_convert_const_int_from_fixed (type, arg1);
1845 else if (TREE_CODE (type) == REAL_TYPE)
1847 if (TREE_CODE (arg1) == INTEGER_CST)
1848 return build_real_from_int_cst (type, arg1);
1849 else if (TREE_CODE (arg1) == REAL_CST)
1850 return fold_convert_const_real_from_real (type, arg1);
1851 else if (TREE_CODE (arg1) == FIXED_CST)
1852 return fold_convert_const_real_from_fixed (type, arg1);
1854 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1856 if (TREE_CODE (arg1) == FIXED_CST)
1857 return fold_convert_const_fixed_from_fixed (type, arg1);
1858 else if (TREE_CODE (arg1) == INTEGER_CST)
1859 return fold_convert_const_fixed_from_int (type, arg1);
1860 else if (TREE_CODE (arg1) == REAL_CST)
1861 return fold_convert_const_fixed_from_real (type, arg1);
1863 return NULL_TREE;
1866 /* Construct a vector of zero elements of vector type TYPE. */
1868 static tree
1869 build_zero_vector (tree type)
1871 tree t;
1873 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1874 return build_vector_from_val (type, t);
1877 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1879 bool
1880 fold_convertible_p (const_tree type, const_tree arg)
1882 tree orig = TREE_TYPE (arg);
1884 if (type == orig)
1885 return true;
1887 if (TREE_CODE (arg) == ERROR_MARK
1888 || TREE_CODE (type) == ERROR_MARK
1889 || TREE_CODE (orig) == ERROR_MARK)
1890 return false;
1892 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1893 return true;
1895 switch (TREE_CODE (type))
1897 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1898 case POINTER_TYPE: case REFERENCE_TYPE:
1899 case OFFSET_TYPE:
1900 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1901 || TREE_CODE (orig) == OFFSET_TYPE)
1902 return true;
1903 return (TREE_CODE (orig) == VECTOR_TYPE
1904 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1906 case REAL_TYPE:
1907 case FIXED_POINT_TYPE:
1908 case COMPLEX_TYPE:
1909 case VECTOR_TYPE:
1910 case VOID_TYPE:
1911 return TREE_CODE (type) == TREE_CODE (orig);
1913 default:
1914 return false;
1918 /* Convert expression ARG to type TYPE. Used by the middle-end for
1919 simple conversions in preference to calling the front-end's convert. */
1921 tree
1922 fold_convert_loc (location_t loc, tree type, tree arg)
1924 tree orig = TREE_TYPE (arg);
1925 tree tem;
1927 if (type == orig)
1928 return arg;
1930 if (TREE_CODE (arg) == ERROR_MARK
1931 || TREE_CODE (type) == ERROR_MARK
1932 || TREE_CODE (orig) == ERROR_MARK)
1933 return error_mark_node;
1935 switch (TREE_CODE (type))
1937 case POINTER_TYPE:
1938 case REFERENCE_TYPE:
1939 /* Handle conversions between pointers to different address spaces. */
1940 if (POINTER_TYPE_P (orig)
1941 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1942 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1943 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1944 /* fall through */
1946 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1947 case OFFSET_TYPE:
1948 if (TREE_CODE (arg) == INTEGER_CST)
1950 tem = fold_convert_const (NOP_EXPR, type, arg);
1951 if (tem != NULL_TREE)
1952 return tem;
1954 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1955 || TREE_CODE (orig) == OFFSET_TYPE)
1956 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1957 if (TREE_CODE (orig) == COMPLEX_TYPE)
1958 return fold_convert_loc (loc, type,
1959 fold_build1_loc (loc, REALPART_EXPR,
1960 TREE_TYPE (orig), arg));
1961 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1962 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1963 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1965 case REAL_TYPE:
1966 if (TREE_CODE (arg) == INTEGER_CST)
1968 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1969 if (tem != NULL_TREE)
1970 return tem;
1972 else if (TREE_CODE (arg) == REAL_CST)
1974 tem = fold_convert_const (NOP_EXPR, type, arg);
1975 if (tem != NULL_TREE)
1976 return tem;
1978 else if (TREE_CODE (arg) == FIXED_CST)
1980 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1981 if (tem != NULL_TREE)
1982 return tem;
1985 switch (TREE_CODE (orig))
1987 case INTEGER_TYPE:
1988 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1989 case POINTER_TYPE: case REFERENCE_TYPE:
1990 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1992 case REAL_TYPE:
1993 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1995 case FIXED_POINT_TYPE:
1996 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1998 case COMPLEX_TYPE:
1999 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2000 return fold_convert_loc (loc, type, tem);
2002 default:
2003 gcc_unreachable ();
2006 case FIXED_POINT_TYPE:
2007 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2008 || TREE_CODE (arg) == REAL_CST)
2010 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2011 if (tem != NULL_TREE)
2012 goto fold_convert_exit;
2015 switch (TREE_CODE (orig))
2017 case FIXED_POINT_TYPE:
2018 case INTEGER_TYPE:
2019 case ENUMERAL_TYPE:
2020 case BOOLEAN_TYPE:
2021 case REAL_TYPE:
2022 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2024 case COMPLEX_TYPE:
2025 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2026 return fold_convert_loc (loc, type, tem);
2028 default:
2029 gcc_unreachable ();
2032 case COMPLEX_TYPE:
2033 switch (TREE_CODE (orig))
2035 case INTEGER_TYPE:
2036 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2037 case POINTER_TYPE: case REFERENCE_TYPE:
2038 case REAL_TYPE:
2039 case FIXED_POINT_TYPE:
2040 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2041 fold_convert_loc (loc, TREE_TYPE (type), arg),
2042 fold_convert_loc (loc, TREE_TYPE (type),
2043 integer_zero_node));
2044 case COMPLEX_TYPE:
2046 tree rpart, ipart;
2048 if (TREE_CODE (arg) == COMPLEX_EXPR)
2050 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2051 TREE_OPERAND (arg, 0));
2052 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2053 TREE_OPERAND (arg, 1));
2054 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2057 arg = save_expr (arg);
2058 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2059 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2060 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2061 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2062 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2065 default:
2066 gcc_unreachable ();
2069 case VECTOR_TYPE:
2070 if (integer_zerop (arg))
2071 return build_zero_vector (type);
2072 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2073 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2074 || TREE_CODE (orig) == VECTOR_TYPE);
2075 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2077 case VOID_TYPE:
2078 tem = fold_ignored_result (arg);
2079 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2081 default:
2082 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2083 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2084 gcc_unreachable ();
2086 fold_convert_exit:
2087 protected_set_expr_location_unshare (tem, loc);
2088 return tem;
2091 /* Return false if expr can be assumed not to be an lvalue, true
2092 otherwise. */
2094 static bool
2095 maybe_lvalue_p (const_tree x)
2097 /* We only need to wrap lvalue tree codes. */
2098 switch (TREE_CODE (x))
2100 case VAR_DECL:
2101 case PARM_DECL:
2102 case RESULT_DECL:
2103 case LABEL_DECL:
2104 case FUNCTION_DECL:
2105 case SSA_NAME:
2107 case COMPONENT_REF:
2108 case MEM_REF:
2109 case INDIRECT_REF:
2110 case ARRAY_REF:
2111 case ARRAY_RANGE_REF:
2112 case BIT_FIELD_REF:
2113 case OBJ_TYPE_REF:
2115 case REALPART_EXPR:
2116 case IMAGPART_EXPR:
2117 case PREINCREMENT_EXPR:
2118 case PREDECREMENT_EXPR:
2119 case SAVE_EXPR:
2120 case TRY_CATCH_EXPR:
2121 case WITH_CLEANUP_EXPR:
2122 case COMPOUND_EXPR:
2123 case MODIFY_EXPR:
2124 case TARGET_EXPR:
2125 case COND_EXPR:
2126 case BIND_EXPR:
2127 break;
2129 default:
2130 /* Assume the worst for front-end tree codes. */
2131 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2132 break;
2133 return false;
2136 return true;
2139 /* Return an expr equal to X but certainly not valid as an lvalue. */
2141 tree
2142 non_lvalue_loc (location_t loc, tree x)
2144 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2145 us. */
2146 if (in_gimple_form)
2147 return x;
2149 if (! maybe_lvalue_p (x))
2150 return x;
2151 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2154 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2155 Zero means allow extended lvalues. */
2157 int pedantic_lvalues;
2159 /* When pedantic, return an expr equal to X but certainly not valid as a
2160 pedantic lvalue. Otherwise, return X. */
2162 static tree
2163 pedantic_non_lvalue_loc (location_t loc, tree x)
2165 if (pedantic_lvalues)
2166 return non_lvalue_loc (loc, x);
2168 return protected_set_expr_location_unshare (x, loc);
2171 /* Given a tree comparison code, return the code that is the logical inverse.
2172 It is generally not safe to do this for floating-point comparisons, except
2173 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2174 ERROR_MARK in this case. */
2176 enum tree_code
2177 invert_tree_comparison (enum tree_code code, bool honor_nans)
2179 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2180 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2181 return ERROR_MARK;
2183 switch (code)
2185 case EQ_EXPR:
2186 return NE_EXPR;
2187 case NE_EXPR:
2188 return EQ_EXPR;
2189 case GT_EXPR:
2190 return honor_nans ? UNLE_EXPR : LE_EXPR;
2191 case GE_EXPR:
2192 return honor_nans ? UNLT_EXPR : LT_EXPR;
2193 case LT_EXPR:
2194 return honor_nans ? UNGE_EXPR : GE_EXPR;
2195 case LE_EXPR:
2196 return honor_nans ? UNGT_EXPR : GT_EXPR;
2197 case LTGT_EXPR:
2198 return UNEQ_EXPR;
2199 case UNEQ_EXPR:
2200 return LTGT_EXPR;
2201 case UNGT_EXPR:
2202 return LE_EXPR;
2203 case UNGE_EXPR:
2204 return LT_EXPR;
2205 case UNLT_EXPR:
2206 return GE_EXPR;
2207 case UNLE_EXPR:
2208 return GT_EXPR;
2209 case ORDERED_EXPR:
2210 return UNORDERED_EXPR;
2211 case UNORDERED_EXPR:
2212 return ORDERED_EXPR;
2213 default:
2214 gcc_unreachable ();
2218 /* Similar, but return the comparison that results if the operands are
2219 swapped. This is safe for floating-point. */
2221 enum tree_code
2222 swap_tree_comparison (enum tree_code code)
2224 switch (code)
2226 case EQ_EXPR:
2227 case NE_EXPR:
2228 case ORDERED_EXPR:
2229 case UNORDERED_EXPR:
2230 case LTGT_EXPR:
2231 case UNEQ_EXPR:
2232 return code;
2233 case GT_EXPR:
2234 return LT_EXPR;
2235 case GE_EXPR:
2236 return LE_EXPR;
2237 case LT_EXPR:
2238 return GT_EXPR;
2239 case LE_EXPR:
2240 return GE_EXPR;
2241 case UNGT_EXPR:
2242 return UNLT_EXPR;
2243 case UNGE_EXPR:
2244 return UNLE_EXPR;
2245 case UNLT_EXPR:
2246 return UNGT_EXPR;
2247 case UNLE_EXPR:
2248 return UNGE_EXPR;
2249 default:
2250 gcc_unreachable ();
2255 /* Convert a comparison tree code from an enum tree_code representation
2256 into a compcode bit-based encoding. This function is the inverse of
2257 compcode_to_comparison. */
2259 static enum comparison_code
2260 comparison_to_compcode (enum tree_code code)
2262 switch (code)
2264 case LT_EXPR:
2265 return COMPCODE_LT;
2266 case EQ_EXPR:
2267 return COMPCODE_EQ;
2268 case LE_EXPR:
2269 return COMPCODE_LE;
2270 case GT_EXPR:
2271 return COMPCODE_GT;
2272 case NE_EXPR:
2273 return COMPCODE_NE;
2274 case GE_EXPR:
2275 return COMPCODE_GE;
2276 case ORDERED_EXPR:
2277 return COMPCODE_ORD;
2278 case UNORDERED_EXPR:
2279 return COMPCODE_UNORD;
2280 case UNLT_EXPR:
2281 return COMPCODE_UNLT;
2282 case UNEQ_EXPR:
2283 return COMPCODE_UNEQ;
2284 case UNLE_EXPR:
2285 return COMPCODE_UNLE;
2286 case UNGT_EXPR:
2287 return COMPCODE_UNGT;
2288 case LTGT_EXPR:
2289 return COMPCODE_LTGT;
2290 case UNGE_EXPR:
2291 return COMPCODE_UNGE;
2292 default:
2293 gcc_unreachable ();
2297 /* Convert a compcode bit-based encoding of a comparison operator back
2298 to GCC's enum tree_code representation. This function is the
2299 inverse of comparison_to_compcode. */
2301 static enum tree_code
2302 compcode_to_comparison (enum comparison_code code)
2304 switch (code)
2306 case COMPCODE_LT:
2307 return LT_EXPR;
2308 case COMPCODE_EQ:
2309 return EQ_EXPR;
2310 case COMPCODE_LE:
2311 return LE_EXPR;
2312 case COMPCODE_GT:
2313 return GT_EXPR;
2314 case COMPCODE_NE:
2315 return NE_EXPR;
2316 case COMPCODE_GE:
2317 return GE_EXPR;
2318 case COMPCODE_ORD:
2319 return ORDERED_EXPR;
2320 case COMPCODE_UNORD:
2321 return UNORDERED_EXPR;
2322 case COMPCODE_UNLT:
2323 return UNLT_EXPR;
2324 case COMPCODE_UNEQ:
2325 return UNEQ_EXPR;
2326 case COMPCODE_UNLE:
2327 return UNLE_EXPR;
2328 case COMPCODE_UNGT:
2329 return UNGT_EXPR;
2330 case COMPCODE_LTGT:
2331 return LTGT_EXPR;
2332 case COMPCODE_UNGE:
2333 return UNGE_EXPR;
2334 default:
2335 gcc_unreachable ();
2339 /* Return a tree for the comparison which is the combination of
2340 doing the AND or OR (depending on CODE) of the two operations LCODE
2341 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2342 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2343 if this makes the transformation invalid. */
2345 tree
2346 combine_comparisons (location_t loc,
2347 enum tree_code code, enum tree_code lcode,
2348 enum tree_code rcode, tree truth_type,
2349 tree ll_arg, tree lr_arg)
2351 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2352 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2353 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2354 int compcode;
2356 switch (code)
2358 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2359 compcode = lcompcode & rcompcode;
2360 break;
2362 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2363 compcode = lcompcode | rcompcode;
2364 break;
2366 default:
2367 return NULL_TREE;
2370 if (!honor_nans)
2372 /* Eliminate unordered comparisons, as well as LTGT and ORD
2373 which are not used unless the mode has NaNs. */
2374 compcode &= ~COMPCODE_UNORD;
2375 if (compcode == COMPCODE_LTGT)
2376 compcode = COMPCODE_NE;
2377 else if (compcode == COMPCODE_ORD)
2378 compcode = COMPCODE_TRUE;
2380 else if (flag_trapping_math)
2382 /* Check that the original operation and the optimized ones will trap
2383 under the same condition. */
2384 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2385 && (lcompcode != COMPCODE_EQ)
2386 && (lcompcode != COMPCODE_ORD);
2387 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2388 && (rcompcode != COMPCODE_EQ)
2389 && (rcompcode != COMPCODE_ORD);
2390 bool trap = (compcode & COMPCODE_UNORD) == 0
2391 && (compcode != COMPCODE_EQ)
2392 && (compcode != COMPCODE_ORD);
2394 /* In a short-circuited boolean expression the LHS might be
2395 such that the RHS, if evaluated, will never trap. For
2396 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2397 if neither x nor y is NaN. (This is a mixed blessing: for
2398 example, the expression above will never trap, hence
2399 optimizing it to x < y would be invalid). */
2400 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2401 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2402 rtrap = false;
2404 /* If the comparison was short-circuited, and only the RHS
2405 trapped, we may now generate a spurious trap. */
2406 if (rtrap && !ltrap
2407 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2408 return NULL_TREE;
2410 /* If we changed the conditions that cause a trap, we lose. */
2411 if ((ltrap || rtrap) != trap)
2412 return NULL_TREE;
2415 if (compcode == COMPCODE_TRUE)
2416 return constant_boolean_node (true, truth_type);
2417 else if (compcode == COMPCODE_FALSE)
2418 return constant_boolean_node (false, truth_type);
2419 else
2421 enum tree_code tcode;
2423 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2424 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2428 /* Return nonzero if two operands (typically of the same tree node)
2429 are necessarily equal. If either argument has side-effects this
2430 function returns zero. FLAGS modifies behavior as follows:
2432 If OEP_ONLY_CONST is set, only return nonzero for constants.
2433 This function tests whether the operands are indistinguishable;
2434 it does not test whether they are equal using C's == operation.
2435 The distinction is important for IEEE floating point, because
2436 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2437 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2439 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2440 even though it may hold multiple values during a function.
2441 This is because a GCC tree node guarantees that nothing else is
2442 executed between the evaluation of its "operands" (which may often
2443 be evaluated in arbitrary order). Hence if the operands themselves
2444 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2445 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2446 unset means assuming isochronic (or instantaneous) tree equivalence.
2447 Unless comparing arbitrary expression trees, such as from different
2448 statements, this flag can usually be left unset.
2450 If OEP_PURE_SAME is set, then pure functions with identical arguments
2451 are considered the same. It is used when the caller has other ways
2452 to ensure that global memory is unchanged in between. */
2455 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2457 /* If either is ERROR_MARK, they aren't equal. */
2458 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2459 || TREE_TYPE (arg0) == error_mark_node
2460 || TREE_TYPE (arg1) == error_mark_node)
2461 return 0;
2463 /* Similar, if either does not have a type (like a released SSA name),
2464 they aren't equal. */
2465 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2466 return 0;
2468 /* Check equality of integer constants before bailing out due to
2469 precision differences. */
2470 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2471 return tree_int_cst_equal (arg0, arg1);
2473 /* If both types don't have the same signedness, then we can't consider
2474 them equal. We must check this before the STRIP_NOPS calls
2475 because they may change the signedness of the arguments. As pointers
2476 strictly don't have a signedness, require either two pointers or
2477 two non-pointers as well. */
2478 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2479 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2480 return 0;
2482 /* We cannot consider pointers to different address space equal. */
2483 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2484 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2485 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2486 return 0;
2488 /* If both types don't have the same precision, then it is not safe
2489 to strip NOPs. */
2490 if (element_precision (TREE_TYPE (arg0))
2491 != element_precision (TREE_TYPE (arg1)))
2492 return 0;
2494 STRIP_NOPS (arg0);
2495 STRIP_NOPS (arg1);
2497 /* In case both args are comparisons but with different comparison
2498 code, try to swap the comparison operands of one arg to produce
2499 a match and compare that variant. */
2500 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2501 && COMPARISON_CLASS_P (arg0)
2502 && COMPARISON_CLASS_P (arg1))
2504 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2506 if (TREE_CODE (arg0) == swap_code)
2507 return operand_equal_p (TREE_OPERAND (arg0, 0),
2508 TREE_OPERAND (arg1, 1), flags)
2509 && operand_equal_p (TREE_OPERAND (arg0, 1),
2510 TREE_OPERAND (arg1, 0), flags);
2513 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2514 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2515 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2516 return 0;
2518 /* This is needed for conversions and for COMPONENT_REF.
2519 Might as well play it safe and always test this. */
2520 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2521 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2522 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2523 return 0;
2525 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2526 We don't care about side effects in that case because the SAVE_EXPR
2527 takes care of that for us. In all other cases, two expressions are
2528 equal if they have no side effects. If we have two identical
2529 expressions with side effects that should be treated the same due
2530 to the only side effects being identical SAVE_EXPR's, that will
2531 be detected in the recursive calls below.
2532 If we are taking an invariant address of two identical objects
2533 they are necessarily equal as well. */
2534 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2535 && (TREE_CODE (arg0) == SAVE_EXPR
2536 || (flags & OEP_CONSTANT_ADDRESS_OF)
2537 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2538 return 1;
2540 /* Next handle constant cases, those for which we can return 1 even
2541 if ONLY_CONST is set. */
2542 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2543 switch (TREE_CODE (arg0))
2545 case INTEGER_CST:
2546 return tree_int_cst_equal (arg0, arg1);
2548 case FIXED_CST:
2549 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2550 TREE_FIXED_CST (arg1));
2552 case REAL_CST:
2553 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2554 TREE_REAL_CST (arg1)))
2555 return 1;
2558 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2560 /* If we do not distinguish between signed and unsigned zero,
2561 consider them equal. */
2562 if (real_zerop (arg0) && real_zerop (arg1))
2563 return 1;
2565 return 0;
2567 case VECTOR_CST:
2569 unsigned i;
2571 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2572 return 0;
2574 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2576 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2577 VECTOR_CST_ELT (arg1, i), flags))
2578 return 0;
2580 return 1;
2583 case COMPLEX_CST:
2584 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2585 flags)
2586 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2587 flags));
2589 case STRING_CST:
2590 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2591 && ! memcmp (TREE_STRING_POINTER (arg0),
2592 TREE_STRING_POINTER (arg1),
2593 TREE_STRING_LENGTH (arg0)));
2595 case ADDR_EXPR:
2596 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2597 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2598 ? OEP_CONSTANT_ADDRESS_OF : 0);
2599 default:
2600 break;
2603 if (flags & OEP_ONLY_CONST)
2604 return 0;
2606 /* Define macros to test an operand from arg0 and arg1 for equality and a
2607 variant that allows null and views null as being different from any
2608 non-null value. In the latter case, if either is null, the both
2609 must be; otherwise, do the normal comparison. */
2610 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2611 TREE_OPERAND (arg1, N), flags)
2613 #define OP_SAME_WITH_NULL(N) \
2614 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2615 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2617 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2619 case tcc_unary:
2620 /* Two conversions are equal only if signedness and modes match. */
2621 switch (TREE_CODE (arg0))
2623 CASE_CONVERT:
2624 case FIX_TRUNC_EXPR:
2625 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2626 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2627 return 0;
2628 break;
2629 default:
2630 break;
2633 return OP_SAME (0);
2636 case tcc_comparison:
2637 case tcc_binary:
2638 if (OP_SAME (0) && OP_SAME (1))
2639 return 1;
2641 /* For commutative ops, allow the other order. */
2642 return (commutative_tree_code (TREE_CODE (arg0))
2643 && operand_equal_p (TREE_OPERAND (arg0, 0),
2644 TREE_OPERAND (arg1, 1), flags)
2645 && operand_equal_p (TREE_OPERAND (arg0, 1),
2646 TREE_OPERAND (arg1, 0), flags));
2648 case tcc_reference:
2649 /* If either of the pointer (or reference) expressions we are
2650 dereferencing contain a side effect, these cannot be equal,
2651 but their addresses can be. */
2652 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2653 && (TREE_SIDE_EFFECTS (arg0)
2654 || TREE_SIDE_EFFECTS (arg1)))
2655 return 0;
2657 switch (TREE_CODE (arg0))
2659 case INDIRECT_REF:
2660 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2661 return OP_SAME (0);
2663 case REALPART_EXPR:
2664 case IMAGPART_EXPR:
2665 return OP_SAME (0);
2667 case TARGET_MEM_REF:
2668 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2669 /* Require equal extra operands and then fall through to MEM_REF
2670 handling of the two common operands. */
2671 if (!OP_SAME_WITH_NULL (2)
2672 || !OP_SAME_WITH_NULL (3)
2673 || !OP_SAME_WITH_NULL (4))
2674 return 0;
2675 /* Fallthru. */
2676 case MEM_REF:
2677 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2678 /* Require equal access sizes, and similar pointer types.
2679 We can have incomplete types for array references of
2680 variable-sized arrays from the Fortran frontend
2681 though. Also verify the types are compatible. */
2682 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2683 || (TYPE_SIZE (TREE_TYPE (arg0))
2684 && TYPE_SIZE (TREE_TYPE (arg1))
2685 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2686 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2687 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2688 && alias_ptr_types_compatible_p
2689 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2690 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2691 && OP_SAME (0) && OP_SAME (1));
2693 case ARRAY_REF:
2694 case ARRAY_RANGE_REF:
2695 /* Operands 2 and 3 may be null.
2696 Compare the array index by value if it is constant first as we
2697 may have different types but same value here. */
2698 if (!OP_SAME (0))
2699 return 0;
2700 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2701 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2702 TREE_OPERAND (arg1, 1))
2703 || OP_SAME (1))
2704 && OP_SAME_WITH_NULL (2)
2705 && OP_SAME_WITH_NULL (3));
2707 case COMPONENT_REF:
2708 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2709 may be NULL when we're called to compare MEM_EXPRs. */
2710 if (!OP_SAME_WITH_NULL (0)
2711 || !OP_SAME (1))
2712 return 0;
2713 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2714 return OP_SAME_WITH_NULL (2);
2716 case BIT_FIELD_REF:
2717 if (!OP_SAME (0))
2718 return 0;
2719 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2720 return OP_SAME (1) && OP_SAME (2);
2722 default:
2723 return 0;
2726 case tcc_expression:
2727 switch (TREE_CODE (arg0))
2729 case ADDR_EXPR:
2730 case TRUTH_NOT_EXPR:
2731 return OP_SAME (0);
2733 case TRUTH_ANDIF_EXPR:
2734 case TRUTH_ORIF_EXPR:
2735 return OP_SAME (0) && OP_SAME (1);
2737 case FMA_EXPR:
2738 case WIDEN_MULT_PLUS_EXPR:
2739 case WIDEN_MULT_MINUS_EXPR:
2740 if (!OP_SAME (2))
2741 return 0;
2742 /* The multiplcation operands are commutative. */
2743 /* FALLTHRU */
2745 case TRUTH_AND_EXPR:
2746 case TRUTH_OR_EXPR:
2747 case TRUTH_XOR_EXPR:
2748 if (OP_SAME (0) && OP_SAME (1))
2749 return 1;
2751 /* Otherwise take into account this is a commutative operation. */
2752 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2753 TREE_OPERAND (arg1, 1), flags)
2754 && operand_equal_p (TREE_OPERAND (arg0, 1),
2755 TREE_OPERAND (arg1, 0), flags));
2757 case COND_EXPR:
2758 case VEC_COND_EXPR:
2759 case DOT_PROD_EXPR:
2760 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2762 default:
2763 return 0;
2766 case tcc_vl_exp:
2767 switch (TREE_CODE (arg0))
2769 case CALL_EXPR:
2770 /* If the CALL_EXPRs call different functions, then they
2771 clearly can not be equal. */
2772 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2773 flags))
2774 return 0;
2777 unsigned int cef = call_expr_flags (arg0);
2778 if (flags & OEP_PURE_SAME)
2779 cef &= ECF_CONST | ECF_PURE;
2780 else
2781 cef &= ECF_CONST;
2782 if (!cef)
2783 return 0;
2786 /* Now see if all the arguments are the same. */
2788 const_call_expr_arg_iterator iter0, iter1;
2789 const_tree a0, a1;
2790 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2791 a1 = first_const_call_expr_arg (arg1, &iter1);
2792 a0 && a1;
2793 a0 = next_const_call_expr_arg (&iter0),
2794 a1 = next_const_call_expr_arg (&iter1))
2795 if (! operand_equal_p (a0, a1, flags))
2796 return 0;
2798 /* If we get here and both argument lists are exhausted
2799 then the CALL_EXPRs are equal. */
2800 return ! (a0 || a1);
2802 default:
2803 return 0;
2806 case tcc_declaration:
2807 /* Consider __builtin_sqrt equal to sqrt. */
2808 return (TREE_CODE (arg0) == FUNCTION_DECL
2809 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2810 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2811 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2813 default:
2814 return 0;
2817 #undef OP_SAME
2818 #undef OP_SAME_WITH_NULL
2821 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2822 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2824 When in doubt, return 0. */
2826 static int
2827 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2829 int unsignedp1, unsignedpo;
2830 tree primarg0, primarg1, primother;
2831 unsigned int correct_width;
2833 if (operand_equal_p (arg0, arg1, 0))
2834 return 1;
2836 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2837 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2838 return 0;
2840 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2841 and see if the inner values are the same. This removes any
2842 signedness comparison, which doesn't matter here. */
2843 primarg0 = arg0, primarg1 = arg1;
2844 STRIP_NOPS (primarg0);
2845 STRIP_NOPS (primarg1);
2846 if (operand_equal_p (primarg0, primarg1, 0))
2847 return 1;
2849 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2850 actual comparison operand, ARG0.
2852 First throw away any conversions to wider types
2853 already present in the operands. */
2855 primarg1 = get_narrower (arg1, &unsignedp1);
2856 primother = get_narrower (other, &unsignedpo);
2858 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2859 if (unsignedp1 == unsignedpo
2860 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2861 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2863 tree type = TREE_TYPE (arg0);
2865 /* Make sure shorter operand is extended the right way
2866 to match the longer operand. */
2867 primarg1 = fold_convert (signed_or_unsigned_type_for
2868 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2870 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2871 return 1;
2874 return 0;
2877 /* See if ARG is an expression that is either a comparison or is performing
2878 arithmetic on comparisons. The comparisons must only be comparing
2879 two different values, which will be stored in *CVAL1 and *CVAL2; if
2880 they are nonzero it means that some operands have already been found.
2881 No variables may be used anywhere else in the expression except in the
2882 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2883 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2885 If this is true, return 1. Otherwise, return zero. */
2887 static int
2888 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2890 enum tree_code code = TREE_CODE (arg);
2891 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2893 /* We can handle some of the tcc_expression cases here. */
2894 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2895 tclass = tcc_unary;
2896 else if (tclass == tcc_expression
2897 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2898 || code == COMPOUND_EXPR))
2899 tclass = tcc_binary;
2901 else if (tclass == tcc_expression && code == SAVE_EXPR
2902 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2904 /* If we've already found a CVAL1 or CVAL2, this expression is
2905 two complex to handle. */
2906 if (*cval1 || *cval2)
2907 return 0;
2909 tclass = tcc_unary;
2910 *save_p = 1;
2913 switch (tclass)
2915 case tcc_unary:
2916 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2918 case tcc_binary:
2919 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2920 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2921 cval1, cval2, save_p));
2923 case tcc_constant:
2924 return 1;
2926 case tcc_expression:
2927 if (code == COND_EXPR)
2928 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2929 cval1, cval2, save_p)
2930 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2931 cval1, cval2, save_p)
2932 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2933 cval1, cval2, save_p));
2934 return 0;
2936 case tcc_comparison:
2937 /* First see if we can handle the first operand, then the second. For
2938 the second operand, we know *CVAL1 can't be zero. It must be that
2939 one side of the comparison is each of the values; test for the
2940 case where this isn't true by failing if the two operands
2941 are the same. */
2943 if (operand_equal_p (TREE_OPERAND (arg, 0),
2944 TREE_OPERAND (arg, 1), 0))
2945 return 0;
2947 if (*cval1 == 0)
2948 *cval1 = TREE_OPERAND (arg, 0);
2949 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2951 else if (*cval2 == 0)
2952 *cval2 = TREE_OPERAND (arg, 0);
2953 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2955 else
2956 return 0;
2958 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2960 else if (*cval2 == 0)
2961 *cval2 = TREE_OPERAND (arg, 1);
2962 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2964 else
2965 return 0;
2967 return 1;
2969 default:
2970 return 0;
2974 /* ARG is a tree that is known to contain just arithmetic operations and
2975 comparisons. Evaluate the operations in the tree substituting NEW0 for
2976 any occurrence of OLD0 as an operand of a comparison and likewise for
2977 NEW1 and OLD1. */
2979 static tree
2980 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2981 tree old1, tree new1)
2983 tree type = TREE_TYPE (arg);
2984 enum tree_code code = TREE_CODE (arg);
2985 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2987 /* We can handle some of the tcc_expression cases here. */
2988 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2989 tclass = tcc_unary;
2990 else if (tclass == tcc_expression
2991 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2992 tclass = tcc_binary;
2994 switch (tclass)
2996 case tcc_unary:
2997 return fold_build1_loc (loc, code, type,
2998 eval_subst (loc, TREE_OPERAND (arg, 0),
2999 old0, new0, old1, new1));
3001 case tcc_binary:
3002 return fold_build2_loc (loc, code, type,
3003 eval_subst (loc, TREE_OPERAND (arg, 0),
3004 old0, new0, old1, new1),
3005 eval_subst (loc, TREE_OPERAND (arg, 1),
3006 old0, new0, old1, new1));
3008 case tcc_expression:
3009 switch (code)
3011 case SAVE_EXPR:
3012 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3013 old1, new1);
3015 case COMPOUND_EXPR:
3016 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3017 old1, new1);
3019 case COND_EXPR:
3020 return fold_build3_loc (loc, code, type,
3021 eval_subst (loc, TREE_OPERAND (arg, 0),
3022 old0, new0, old1, new1),
3023 eval_subst (loc, TREE_OPERAND (arg, 1),
3024 old0, new0, old1, new1),
3025 eval_subst (loc, TREE_OPERAND (arg, 2),
3026 old0, new0, old1, new1));
3027 default:
3028 break;
3030 /* Fall through - ??? */
3032 case tcc_comparison:
3034 tree arg0 = TREE_OPERAND (arg, 0);
3035 tree arg1 = TREE_OPERAND (arg, 1);
3037 /* We need to check both for exact equality and tree equality. The
3038 former will be true if the operand has a side-effect. In that
3039 case, we know the operand occurred exactly once. */
3041 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3042 arg0 = new0;
3043 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3044 arg0 = new1;
3046 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3047 arg1 = new0;
3048 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3049 arg1 = new1;
3051 return fold_build2_loc (loc, code, type, arg0, arg1);
3054 default:
3055 return arg;
3059 /* Return a tree for the case when the result of an expression is RESULT
3060 converted to TYPE and OMITTED was previously an operand of the expression
3061 but is now not needed (e.g., we folded OMITTED * 0).
3063 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3064 the conversion of RESULT to TYPE. */
3066 tree
3067 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3069 tree t = fold_convert_loc (loc, type, result);
3071 /* If the resulting operand is an empty statement, just return the omitted
3072 statement casted to void. */
3073 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3074 return build1_loc (loc, NOP_EXPR, void_type_node,
3075 fold_ignored_result (omitted));
3077 if (TREE_SIDE_EFFECTS (omitted))
3078 return build2_loc (loc, COMPOUND_EXPR, type,
3079 fold_ignored_result (omitted), t);
3081 return non_lvalue_loc (loc, t);
3084 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3086 static tree
3087 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3088 tree omitted)
3090 tree t = fold_convert_loc (loc, type, result);
3092 /* If the resulting operand is an empty statement, just return the omitted
3093 statement casted to void. */
3094 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3095 return build1_loc (loc, NOP_EXPR, void_type_node,
3096 fold_ignored_result (omitted));
3098 if (TREE_SIDE_EFFECTS (omitted))
3099 return build2_loc (loc, COMPOUND_EXPR, type,
3100 fold_ignored_result (omitted), t);
3102 return pedantic_non_lvalue_loc (loc, t);
3105 /* Return a tree for the case when the result of an expression is RESULT
3106 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3107 of the expression but are now not needed.
3109 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3110 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3111 evaluated before OMITTED2. Otherwise, if neither has side effects,
3112 just do the conversion of RESULT to TYPE. */
3114 tree
3115 omit_two_operands_loc (location_t loc, tree type, tree result,
3116 tree omitted1, tree omitted2)
3118 tree t = fold_convert_loc (loc, type, result);
3120 if (TREE_SIDE_EFFECTS (omitted2))
3121 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3122 if (TREE_SIDE_EFFECTS (omitted1))
3123 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3125 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3129 /* Return a simplified tree node for the truth-negation of ARG. This
3130 never alters ARG itself. We assume that ARG is an operation that
3131 returns a truth value (0 or 1).
3133 FIXME: one would think we would fold the result, but it causes
3134 problems with the dominator optimizer. */
3136 static tree
3137 fold_truth_not_expr (location_t loc, tree arg)
3139 tree type = TREE_TYPE (arg);
3140 enum tree_code code = TREE_CODE (arg);
3141 location_t loc1, loc2;
3143 /* If this is a comparison, we can simply invert it, except for
3144 floating-point non-equality comparisons, in which case we just
3145 enclose a TRUTH_NOT_EXPR around what we have. */
3147 if (TREE_CODE_CLASS (code) == tcc_comparison)
3149 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3150 if (FLOAT_TYPE_P (op_type)
3151 && flag_trapping_math
3152 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3153 && code != NE_EXPR && code != EQ_EXPR)
3154 return NULL_TREE;
3156 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3157 if (code == ERROR_MARK)
3158 return NULL_TREE;
3160 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3161 TREE_OPERAND (arg, 1));
3164 switch (code)
3166 case INTEGER_CST:
3167 return constant_boolean_node (integer_zerop (arg), type);
3169 case TRUTH_AND_EXPR:
3170 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3171 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3172 return build2_loc (loc, TRUTH_OR_EXPR, type,
3173 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3174 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3176 case TRUTH_OR_EXPR:
3177 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3178 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3179 return build2_loc (loc, TRUTH_AND_EXPR, type,
3180 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3181 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3183 case TRUTH_XOR_EXPR:
3184 /* Here we can invert either operand. We invert the first operand
3185 unless the second operand is a TRUTH_NOT_EXPR in which case our
3186 result is the XOR of the first operand with the inside of the
3187 negation of the second operand. */
3189 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3190 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3191 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3192 else
3193 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3194 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3195 TREE_OPERAND (arg, 1));
3197 case TRUTH_ANDIF_EXPR:
3198 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3199 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3200 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3201 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3202 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3204 case TRUTH_ORIF_EXPR:
3205 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3206 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3207 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3208 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3209 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3211 case TRUTH_NOT_EXPR:
3212 return TREE_OPERAND (arg, 0);
3214 case COND_EXPR:
3216 tree arg1 = TREE_OPERAND (arg, 1);
3217 tree arg2 = TREE_OPERAND (arg, 2);
3219 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3220 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3222 /* A COND_EXPR may have a throw as one operand, which
3223 then has void type. Just leave void operands
3224 as they are. */
3225 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3226 VOID_TYPE_P (TREE_TYPE (arg1))
3227 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3228 VOID_TYPE_P (TREE_TYPE (arg2))
3229 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3232 case COMPOUND_EXPR:
3233 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3234 return build2_loc (loc, COMPOUND_EXPR, type,
3235 TREE_OPERAND (arg, 0),
3236 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3238 case NON_LVALUE_EXPR:
3239 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3240 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3242 CASE_CONVERT:
3243 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3244 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3246 /* ... fall through ... */
3248 case FLOAT_EXPR:
3249 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3250 return build1_loc (loc, TREE_CODE (arg), type,
3251 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3253 case BIT_AND_EXPR:
3254 if (!integer_onep (TREE_OPERAND (arg, 1)))
3255 return NULL_TREE;
3256 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3258 case SAVE_EXPR:
3259 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3261 case CLEANUP_POINT_EXPR:
3262 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3263 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3264 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3266 default:
3267 return NULL_TREE;
3271 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3272 assume that ARG is an operation that returns a truth value (0 or 1
3273 for scalars, 0 or -1 for vectors). Return the folded expression if
3274 folding is successful. Otherwise, return NULL_TREE. */
3276 static tree
3277 fold_invert_truthvalue (location_t loc, tree arg)
3279 tree type = TREE_TYPE (arg);
3280 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3281 ? BIT_NOT_EXPR
3282 : TRUTH_NOT_EXPR,
3283 type, arg);
3286 /* Return a simplified tree node for the truth-negation of ARG. This
3287 never alters ARG itself. We assume that ARG is an operation that
3288 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3290 tree
3291 invert_truthvalue_loc (location_t loc, tree arg)
3293 if (TREE_CODE (arg) == ERROR_MARK)
3294 return arg;
3296 tree type = TREE_TYPE (arg);
3297 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3298 ? BIT_NOT_EXPR
3299 : TRUTH_NOT_EXPR,
3300 type, arg);
3303 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3304 operands are another bit-wise operation with a common input. If so,
3305 distribute the bit operations to save an operation and possibly two if
3306 constants are involved. For example, convert
3307 (A | B) & (A | C) into A | (B & C)
3308 Further simplification will occur if B and C are constants.
3310 If this optimization cannot be done, 0 will be returned. */
3312 static tree
3313 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3314 tree arg0, tree arg1)
3316 tree common;
3317 tree left, right;
3319 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3320 || TREE_CODE (arg0) == code
3321 || (TREE_CODE (arg0) != BIT_AND_EXPR
3322 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3323 return 0;
3325 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3327 common = TREE_OPERAND (arg0, 0);
3328 left = TREE_OPERAND (arg0, 1);
3329 right = TREE_OPERAND (arg1, 1);
3331 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3333 common = TREE_OPERAND (arg0, 0);
3334 left = TREE_OPERAND (arg0, 1);
3335 right = TREE_OPERAND (arg1, 0);
3337 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3339 common = TREE_OPERAND (arg0, 1);
3340 left = TREE_OPERAND (arg0, 0);
3341 right = TREE_OPERAND (arg1, 1);
3343 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3345 common = TREE_OPERAND (arg0, 1);
3346 left = TREE_OPERAND (arg0, 0);
3347 right = TREE_OPERAND (arg1, 0);
3349 else
3350 return 0;
3352 common = fold_convert_loc (loc, type, common);
3353 left = fold_convert_loc (loc, type, left);
3354 right = fold_convert_loc (loc, type, right);
3355 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3356 fold_build2_loc (loc, code, type, left, right));
3359 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3360 with code CODE. This optimization is unsafe. */
3361 static tree
3362 distribute_real_division (location_t loc, enum tree_code code, tree type,
3363 tree arg0, tree arg1)
3365 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3366 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3368 /* (A / C) +- (B / C) -> (A +- B) / C. */
3369 if (mul0 == mul1
3370 && operand_equal_p (TREE_OPERAND (arg0, 1),
3371 TREE_OPERAND (arg1, 1), 0))
3372 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3373 fold_build2_loc (loc, code, type,
3374 TREE_OPERAND (arg0, 0),
3375 TREE_OPERAND (arg1, 0)),
3376 TREE_OPERAND (arg0, 1));
3378 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3379 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3380 TREE_OPERAND (arg1, 0), 0)
3381 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3382 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3384 REAL_VALUE_TYPE r0, r1;
3385 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3386 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3387 if (!mul0)
3388 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3389 if (!mul1)
3390 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3391 real_arithmetic (&r0, code, &r0, &r1);
3392 return fold_build2_loc (loc, MULT_EXPR, type,
3393 TREE_OPERAND (arg0, 0),
3394 build_real (type, r0));
3397 return NULL_TREE;
3400 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3401 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3403 static tree
3404 make_bit_field_ref (location_t loc, tree inner, tree type,
3405 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3407 tree result, bftype;
3409 if (bitpos == 0)
3411 tree size = TYPE_SIZE (TREE_TYPE (inner));
3412 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3413 || POINTER_TYPE_P (TREE_TYPE (inner)))
3414 && tree_fits_shwi_p (size)
3415 && tree_to_shwi (size) == bitsize)
3416 return fold_convert_loc (loc, type, inner);
3419 bftype = type;
3420 if (TYPE_PRECISION (bftype) != bitsize
3421 || TYPE_UNSIGNED (bftype) == !unsignedp)
3422 bftype = build_nonstandard_integer_type (bitsize, 0);
3424 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3425 size_int (bitsize), bitsize_int (bitpos));
3427 if (bftype != type)
3428 result = fold_convert_loc (loc, type, result);
3430 return result;
3433 /* Optimize a bit-field compare.
3435 There are two cases: First is a compare against a constant and the
3436 second is a comparison of two items where the fields are at the same
3437 bit position relative to the start of a chunk (byte, halfword, word)
3438 large enough to contain it. In these cases we can avoid the shift
3439 implicit in bitfield extractions.
3441 For constants, we emit a compare of the shifted constant with the
3442 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3443 compared. For two fields at the same position, we do the ANDs with the
3444 similar mask and compare the result of the ANDs.
3446 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3447 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3448 are the left and right operands of the comparison, respectively.
3450 If the optimization described above can be done, we return the resulting
3451 tree. Otherwise we return zero. */
3453 static tree
3454 optimize_bit_field_compare (location_t loc, enum tree_code code,
3455 tree compare_type, tree lhs, tree rhs)
3457 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3458 tree type = TREE_TYPE (lhs);
3459 tree unsigned_type;
3460 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3461 machine_mode lmode, rmode, nmode;
3462 int lunsignedp, runsignedp;
3463 int lvolatilep = 0, rvolatilep = 0;
3464 tree linner, rinner = NULL_TREE;
3465 tree mask;
3466 tree offset;
3468 /* Get all the information about the extractions being done. If the bit size
3469 if the same as the size of the underlying object, we aren't doing an
3470 extraction at all and so can do nothing. We also don't want to
3471 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3472 then will no longer be able to replace it. */
3473 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3474 &lunsignedp, &lvolatilep, false);
3475 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3476 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3477 return 0;
3479 if (!const_p)
3481 /* If this is not a constant, we can only do something if bit positions,
3482 sizes, and signedness are the same. */
3483 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3484 &runsignedp, &rvolatilep, false);
3486 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3487 || lunsignedp != runsignedp || offset != 0
3488 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3489 return 0;
3492 /* See if we can find a mode to refer to this field. We should be able to,
3493 but fail if we can't. */
3494 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3495 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3496 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3497 TYPE_ALIGN (TREE_TYPE (rinner))),
3498 word_mode, false);
3499 if (nmode == VOIDmode)
3500 return 0;
3502 /* Set signed and unsigned types of the precision of this mode for the
3503 shifts below. */
3504 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3506 /* Compute the bit position and size for the new reference and our offset
3507 within it. If the new reference is the same size as the original, we
3508 won't optimize anything, so return zero. */
3509 nbitsize = GET_MODE_BITSIZE (nmode);
3510 nbitpos = lbitpos & ~ (nbitsize - 1);
3511 lbitpos -= nbitpos;
3512 if (nbitsize == lbitsize)
3513 return 0;
3515 if (BYTES_BIG_ENDIAN)
3516 lbitpos = nbitsize - lbitsize - lbitpos;
3518 /* Make the mask to be used against the extracted field. */
3519 mask = build_int_cst_type (unsigned_type, -1);
3520 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3521 mask = const_binop (RSHIFT_EXPR, mask,
3522 size_int (nbitsize - lbitsize - lbitpos));
3524 if (! const_p)
3525 /* If not comparing with constant, just rework the comparison
3526 and return. */
3527 return fold_build2_loc (loc, code, compare_type,
3528 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3529 make_bit_field_ref (loc, linner,
3530 unsigned_type,
3531 nbitsize, nbitpos,
3533 mask),
3534 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3535 make_bit_field_ref (loc, rinner,
3536 unsigned_type,
3537 nbitsize, nbitpos,
3539 mask));
3541 /* Otherwise, we are handling the constant case. See if the constant is too
3542 big for the field. Warn and return a tree of for 0 (false) if so. We do
3543 this not only for its own sake, but to avoid having to test for this
3544 error case below. If we didn't, we might generate wrong code.
3546 For unsigned fields, the constant shifted right by the field length should
3547 be all zero. For signed fields, the high-order bits should agree with
3548 the sign bit. */
3550 if (lunsignedp)
3552 if (wi::lrshift (rhs, lbitsize) != 0)
3554 warning (0, "comparison is always %d due to width of bit-field",
3555 code == NE_EXPR);
3556 return constant_boolean_node (code == NE_EXPR, compare_type);
3559 else
3561 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3562 if (tem != 0 && tem != -1)
3564 warning (0, "comparison is always %d due to width of bit-field",
3565 code == NE_EXPR);
3566 return constant_boolean_node (code == NE_EXPR, compare_type);
3570 /* Single-bit compares should always be against zero. */
3571 if (lbitsize == 1 && ! integer_zerop (rhs))
3573 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3574 rhs = build_int_cst (type, 0);
3577 /* Make a new bitfield reference, shift the constant over the
3578 appropriate number of bits and mask it with the computed mask
3579 (in case this was a signed field). If we changed it, make a new one. */
3580 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3582 rhs = const_binop (BIT_AND_EXPR,
3583 const_binop (LSHIFT_EXPR,
3584 fold_convert_loc (loc, unsigned_type, rhs),
3585 size_int (lbitpos)),
3586 mask);
3588 lhs = build2_loc (loc, code, compare_type,
3589 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3590 return lhs;
3593 /* Subroutine for fold_truth_andor_1: decode a field reference.
3595 If EXP is a comparison reference, we return the innermost reference.
3597 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3598 set to the starting bit number.
3600 If the innermost field can be completely contained in a mode-sized
3601 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3603 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3604 otherwise it is not changed.
3606 *PUNSIGNEDP is set to the signedness of the field.
3608 *PMASK is set to the mask used. This is either contained in a
3609 BIT_AND_EXPR or derived from the width of the field.
3611 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3613 Return 0 if this is not a component reference or is one that we can't
3614 do anything with. */
3616 static tree
3617 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3618 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3619 int *punsignedp, int *pvolatilep,
3620 tree *pmask, tree *pand_mask)
3622 tree outer_type = 0;
3623 tree and_mask = 0;
3624 tree mask, inner, offset;
3625 tree unsigned_type;
3626 unsigned int precision;
3628 /* All the optimizations using this function assume integer fields.
3629 There are problems with FP fields since the type_for_size call
3630 below can fail for, e.g., XFmode. */
3631 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3632 return 0;
3634 /* We are interested in the bare arrangement of bits, so strip everything
3635 that doesn't affect the machine mode. However, record the type of the
3636 outermost expression if it may matter below. */
3637 if (CONVERT_EXPR_P (exp)
3638 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3639 outer_type = TREE_TYPE (exp);
3640 STRIP_NOPS (exp);
3642 if (TREE_CODE (exp) == BIT_AND_EXPR)
3644 and_mask = TREE_OPERAND (exp, 1);
3645 exp = TREE_OPERAND (exp, 0);
3646 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3647 if (TREE_CODE (and_mask) != INTEGER_CST)
3648 return 0;
3651 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3652 punsignedp, pvolatilep, false);
3653 if ((inner == exp && and_mask == 0)
3654 || *pbitsize < 0 || offset != 0
3655 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3656 return 0;
3658 /* If the number of bits in the reference is the same as the bitsize of
3659 the outer type, then the outer type gives the signedness. Otherwise
3660 (in case of a small bitfield) the signedness is unchanged. */
3661 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3662 *punsignedp = TYPE_UNSIGNED (outer_type);
3664 /* Compute the mask to access the bitfield. */
3665 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3666 precision = TYPE_PRECISION (unsigned_type);
3668 mask = build_int_cst_type (unsigned_type, -1);
3670 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3671 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3673 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3674 if (and_mask != 0)
3675 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3676 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3678 *pmask = mask;
3679 *pand_mask = and_mask;
3680 return inner;
3683 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3684 bit positions and MASK is SIGNED. */
3686 static int
3687 all_ones_mask_p (const_tree mask, unsigned int size)
3689 tree type = TREE_TYPE (mask);
3690 unsigned int precision = TYPE_PRECISION (type);
3692 /* If this function returns true when the type of the mask is
3693 UNSIGNED, then there will be errors. In particular see
3694 gcc.c-torture/execute/990326-1.c. There does not appear to be
3695 any documentation paper trail as to why this is so. But the pre
3696 wide-int worked with that restriction and it has been preserved
3697 here. */
3698 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3699 return false;
3701 return wi::mask (size, false, precision) == mask;
3704 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3705 represents the sign bit of EXP's type. If EXP represents a sign
3706 or zero extension, also test VAL against the unextended type.
3707 The return value is the (sub)expression whose sign bit is VAL,
3708 or NULL_TREE otherwise. */
3710 static tree
3711 sign_bit_p (tree exp, const_tree val)
3713 int width;
3714 tree t;
3716 /* Tree EXP must have an integral type. */
3717 t = TREE_TYPE (exp);
3718 if (! INTEGRAL_TYPE_P (t))
3719 return NULL_TREE;
3721 /* Tree VAL must be an integer constant. */
3722 if (TREE_CODE (val) != INTEGER_CST
3723 || TREE_OVERFLOW (val))
3724 return NULL_TREE;
3726 width = TYPE_PRECISION (t);
3727 if (wi::only_sign_bit_p (val, width))
3728 return exp;
3730 /* Handle extension from a narrower type. */
3731 if (TREE_CODE (exp) == NOP_EXPR
3732 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3733 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3735 return NULL_TREE;
3738 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3739 to be evaluated unconditionally. */
3741 static int
3742 simple_operand_p (const_tree exp)
3744 /* Strip any conversions that don't change the machine mode. */
3745 STRIP_NOPS (exp);
3747 return (CONSTANT_CLASS_P (exp)
3748 || TREE_CODE (exp) == SSA_NAME
3749 || (DECL_P (exp)
3750 && ! TREE_ADDRESSABLE (exp)
3751 && ! TREE_THIS_VOLATILE (exp)
3752 && ! DECL_NONLOCAL (exp)
3753 /* Don't regard global variables as simple. They may be
3754 allocated in ways unknown to the compiler (shared memory,
3755 #pragma weak, etc). */
3756 && ! TREE_PUBLIC (exp)
3757 && ! DECL_EXTERNAL (exp)
3758 /* Weakrefs are not safe to be read, since they can be NULL.
3759 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3760 have DECL_WEAK flag set. */
3761 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3762 /* Loading a static variable is unduly expensive, but global
3763 registers aren't expensive. */
3764 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3767 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3768 to be evaluated unconditionally.
3769 I addition to simple_operand_p, we assume that comparisons, conversions,
3770 and logic-not operations are simple, if their operands are simple, too. */
3772 static bool
3773 simple_operand_p_2 (tree exp)
3775 enum tree_code code;
3777 if (TREE_SIDE_EFFECTS (exp)
3778 || tree_could_trap_p (exp))
3779 return false;
3781 while (CONVERT_EXPR_P (exp))
3782 exp = TREE_OPERAND (exp, 0);
3784 code = TREE_CODE (exp);
3786 if (TREE_CODE_CLASS (code) == tcc_comparison)
3787 return (simple_operand_p (TREE_OPERAND (exp, 0))
3788 && simple_operand_p (TREE_OPERAND (exp, 1)));
3790 if (code == TRUTH_NOT_EXPR)
3791 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3793 return simple_operand_p (exp);
3797 /* The following functions are subroutines to fold_range_test and allow it to
3798 try to change a logical combination of comparisons into a range test.
3800 For example, both
3801 X == 2 || X == 3 || X == 4 || X == 5
3803 X >= 2 && X <= 5
3804 are converted to
3805 (unsigned) (X - 2) <= 3
3807 We describe each set of comparisons as being either inside or outside
3808 a range, using a variable named like IN_P, and then describe the
3809 range with a lower and upper bound. If one of the bounds is omitted,
3810 it represents either the highest or lowest value of the type.
3812 In the comments below, we represent a range by two numbers in brackets
3813 preceded by a "+" to designate being inside that range, or a "-" to
3814 designate being outside that range, so the condition can be inverted by
3815 flipping the prefix. An omitted bound is represented by a "-". For
3816 example, "- [-, 10]" means being outside the range starting at the lowest
3817 possible value and ending at 10, in other words, being greater than 10.
3818 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3819 always false.
3821 We set up things so that the missing bounds are handled in a consistent
3822 manner so neither a missing bound nor "true" and "false" need to be
3823 handled using a special case. */
3825 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3826 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3827 and UPPER1_P are nonzero if the respective argument is an upper bound
3828 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3829 must be specified for a comparison. ARG1 will be converted to ARG0's
3830 type if both are specified. */
3832 static tree
3833 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3834 tree arg1, int upper1_p)
3836 tree tem;
3837 int result;
3838 int sgn0, sgn1;
3840 /* If neither arg represents infinity, do the normal operation.
3841 Else, if not a comparison, return infinity. Else handle the special
3842 comparison rules. Note that most of the cases below won't occur, but
3843 are handled for consistency. */
3845 if (arg0 != 0 && arg1 != 0)
3847 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3848 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3849 STRIP_NOPS (tem);
3850 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3853 if (TREE_CODE_CLASS (code) != tcc_comparison)
3854 return 0;
3856 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3857 for neither. In real maths, we cannot assume open ended ranges are
3858 the same. But, this is computer arithmetic, where numbers are finite.
3859 We can therefore make the transformation of any unbounded range with
3860 the value Z, Z being greater than any representable number. This permits
3861 us to treat unbounded ranges as equal. */
3862 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3863 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3864 switch (code)
3866 case EQ_EXPR:
3867 result = sgn0 == sgn1;
3868 break;
3869 case NE_EXPR:
3870 result = sgn0 != sgn1;
3871 break;
3872 case LT_EXPR:
3873 result = sgn0 < sgn1;
3874 break;
3875 case LE_EXPR:
3876 result = sgn0 <= sgn1;
3877 break;
3878 case GT_EXPR:
3879 result = sgn0 > sgn1;
3880 break;
3881 case GE_EXPR:
3882 result = sgn0 >= sgn1;
3883 break;
3884 default:
3885 gcc_unreachable ();
3888 return constant_boolean_node (result, type);
3891 /* Helper routine for make_range. Perform one step for it, return
3892 new expression if the loop should continue or NULL_TREE if it should
3893 stop. */
3895 tree
3896 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3897 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3898 bool *strict_overflow_p)
3900 tree arg0_type = TREE_TYPE (arg0);
3901 tree n_low, n_high, low = *p_low, high = *p_high;
3902 int in_p = *p_in_p, n_in_p;
3904 switch (code)
3906 case TRUTH_NOT_EXPR:
3907 /* We can only do something if the range is testing for zero. */
3908 if (low == NULL_TREE || high == NULL_TREE
3909 || ! integer_zerop (low) || ! integer_zerop (high))
3910 return NULL_TREE;
3911 *p_in_p = ! in_p;
3912 return arg0;
3914 case EQ_EXPR: case NE_EXPR:
3915 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3916 /* We can only do something if the range is testing for zero
3917 and if the second operand is an integer constant. Note that
3918 saying something is "in" the range we make is done by
3919 complementing IN_P since it will set in the initial case of
3920 being not equal to zero; "out" is leaving it alone. */
3921 if (low == NULL_TREE || high == NULL_TREE
3922 || ! integer_zerop (low) || ! integer_zerop (high)
3923 || TREE_CODE (arg1) != INTEGER_CST)
3924 return NULL_TREE;
3926 switch (code)
3928 case NE_EXPR: /* - [c, c] */
3929 low = high = arg1;
3930 break;
3931 case EQ_EXPR: /* + [c, c] */
3932 in_p = ! in_p, low = high = arg1;
3933 break;
3934 case GT_EXPR: /* - [-, c] */
3935 low = 0, high = arg1;
3936 break;
3937 case GE_EXPR: /* + [c, -] */
3938 in_p = ! in_p, low = arg1, high = 0;
3939 break;
3940 case LT_EXPR: /* - [c, -] */
3941 low = arg1, high = 0;
3942 break;
3943 case LE_EXPR: /* + [-, c] */
3944 in_p = ! in_p, low = 0, high = arg1;
3945 break;
3946 default:
3947 gcc_unreachable ();
3950 /* If this is an unsigned comparison, we also know that EXP is
3951 greater than or equal to zero. We base the range tests we make
3952 on that fact, so we record it here so we can parse existing
3953 range tests. We test arg0_type since often the return type
3954 of, e.g. EQ_EXPR, is boolean. */
3955 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3957 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3958 in_p, low, high, 1,
3959 build_int_cst (arg0_type, 0),
3960 NULL_TREE))
3961 return NULL_TREE;
3963 in_p = n_in_p, low = n_low, high = n_high;
3965 /* If the high bound is missing, but we have a nonzero low
3966 bound, reverse the range so it goes from zero to the low bound
3967 minus 1. */
3968 if (high == 0 && low && ! integer_zerop (low))
3970 in_p = ! in_p;
3971 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3972 build_int_cst (TREE_TYPE (low), 1), 0);
3973 low = build_int_cst (arg0_type, 0);
3977 *p_low = low;
3978 *p_high = high;
3979 *p_in_p = in_p;
3980 return arg0;
3982 case NEGATE_EXPR:
3983 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3984 low and high are non-NULL, then normalize will DTRT. */
3985 if (!TYPE_UNSIGNED (arg0_type)
3986 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3988 if (low == NULL_TREE)
3989 low = TYPE_MIN_VALUE (arg0_type);
3990 if (high == NULL_TREE)
3991 high = TYPE_MAX_VALUE (arg0_type);
3994 /* (-x) IN [a,b] -> x in [-b, -a] */
3995 n_low = range_binop (MINUS_EXPR, exp_type,
3996 build_int_cst (exp_type, 0),
3997 0, high, 1);
3998 n_high = range_binop (MINUS_EXPR, exp_type,
3999 build_int_cst (exp_type, 0),
4000 0, low, 0);
4001 if (n_high != 0 && TREE_OVERFLOW (n_high))
4002 return NULL_TREE;
4003 goto normalize;
4005 case BIT_NOT_EXPR:
4006 /* ~ X -> -X - 1 */
4007 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4008 build_int_cst (exp_type, 1));
4010 case PLUS_EXPR:
4011 case MINUS_EXPR:
4012 if (TREE_CODE (arg1) != INTEGER_CST)
4013 return NULL_TREE;
4015 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4016 move a constant to the other side. */
4017 if (!TYPE_UNSIGNED (arg0_type)
4018 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4019 return NULL_TREE;
4021 /* If EXP is signed, any overflow in the computation is undefined,
4022 so we don't worry about it so long as our computations on
4023 the bounds don't overflow. For unsigned, overflow is defined
4024 and this is exactly the right thing. */
4025 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4026 arg0_type, low, 0, arg1, 0);
4027 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4028 arg0_type, high, 1, arg1, 0);
4029 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4030 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4031 return NULL_TREE;
4033 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4034 *strict_overflow_p = true;
4036 normalize:
4037 /* Check for an unsigned range which has wrapped around the maximum
4038 value thus making n_high < n_low, and normalize it. */
4039 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4041 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4042 build_int_cst (TREE_TYPE (n_high), 1), 0);
4043 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4044 build_int_cst (TREE_TYPE (n_low), 1), 0);
4046 /* If the range is of the form +/- [ x+1, x ], we won't
4047 be able to normalize it. But then, it represents the
4048 whole range or the empty set, so make it
4049 +/- [ -, - ]. */
4050 if (tree_int_cst_equal (n_low, low)
4051 && tree_int_cst_equal (n_high, high))
4052 low = high = 0;
4053 else
4054 in_p = ! in_p;
4056 else
4057 low = n_low, high = n_high;
4059 *p_low = low;
4060 *p_high = high;
4061 *p_in_p = in_p;
4062 return arg0;
4064 CASE_CONVERT:
4065 case NON_LVALUE_EXPR:
4066 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4067 return NULL_TREE;
4069 if (! INTEGRAL_TYPE_P (arg0_type)
4070 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4071 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4072 return NULL_TREE;
4074 n_low = low, n_high = high;
4076 if (n_low != 0)
4077 n_low = fold_convert_loc (loc, arg0_type, n_low);
4079 if (n_high != 0)
4080 n_high = fold_convert_loc (loc, arg0_type, n_high);
4082 /* If we're converting arg0 from an unsigned type, to exp,
4083 a signed type, we will be doing the comparison as unsigned.
4084 The tests above have already verified that LOW and HIGH
4085 are both positive.
4087 So we have to ensure that we will handle large unsigned
4088 values the same way that the current signed bounds treat
4089 negative values. */
4091 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4093 tree high_positive;
4094 tree equiv_type;
4095 /* For fixed-point modes, we need to pass the saturating flag
4096 as the 2nd parameter. */
4097 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4098 equiv_type
4099 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4100 TYPE_SATURATING (arg0_type));
4101 else
4102 equiv_type
4103 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4105 /* A range without an upper bound is, naturally, unbounded.
4106 Since convert would have cropped a very large value, use
4107 the max value for the destination type. */
4108 high_positive
4109 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4110 : TYPE_MAX_VALUE (arg0_type);
4112 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4113 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4114 fold_convert_loc (loc, arg0_type,
4115 high_positive),
4116 build_int_cst (arg0_type, 1));
4118 /* If the low bound is specified, "and" the range with the
4119 range for which the original unsigned value will be
4120 positive. */
4121 if (low != 0)
4123 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4124 1, fold_convert_loc (loc, arg0_type,
4125 integer_zero_node),
4126 high_positive))
4127 return NULL_TREE;
4129 in_p = (n_in_p == in_p);
4131 else
4133 /* Otherwise, "or" the range with the range of the input
4134 that will be interpreted as negative. */
4135 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4136 1, fold_convert_loc (loc, arg0_type,
4137 integer_zero_node),
4138 high_positive))
4139 return NULL_TREE;
4141 in_p = (in_p != n_in_p);
4145 *p_low = n_low;
4146 *p_high = n_high;
4147 *p_in_p = in_p;
4148 return arg0;
4150 default:
4151 return NULL_TREE;
4155 /* Given EXP, a logical expression, set the range it is testing into
4156 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4157 actually being tested. *PLOW and *PHIGH will be made of the same
4158 type as the returned expression. If EXP is not a comparison, we
4159 will most likely not be returning a useful value and range. Set
4160 *STRICT_OVERFLOW_P to true if the return value is only valid
4161 because signed overflow is undefined; otherwise, do not change
4162 *STRICT_OVERFLOW_P. */
4164 tree
4165 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4166 bool *strict_overflow_p)
4168 enum tree_code code;
4169 tree arg0, arg1 = NULL_TREE;
4170 tree exp_type, nexp;
4171 int in_p;
4172 tree low, high;
4173 location_t loc = EXPR_LOCATION (exp);
4175 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4176 and see if we can refine the range. Some of the cases below may not
4177 happen, but it doesn't seem worth worrying about this. We "continue"
4178 the outer loop when we've changed something; otherwise we "break"
4179 the switch, which will "break" the while. */
4181 in_p = 0;
4182 low = high = build_int_cst (TREE_TYPE (exp), 0);
4184 while (1)
4186 code = TREE_CODE (exp);
4187 exp_type = TREE_TYPE (exp);
4188 arg0 = NULL_TREE;
4190 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4192 if (TREE_OPERAND_LENGTH (exp) > 0)
4193 arg0 = TREE_OPERAND (exp, 0);
4194 if (TREE_CODE_CLASS (code) == tcc_binary
4195 || TREE_CODE_CLASS (code) == tcc_comparison
4196 || (TREE_CODE_CLASS (code) == tcc_expression
4197 && TREE_OPERAND_LENGTH (exp) > 1))
4198 arg1 = TREE_OPERAND (exp, 1);
4200 if (arg0 == NULL_TREE)
4201 break;
4203 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4204 &high, &in_p, strict_overflow_p);
4205 if (nexp == NULL_TREE)
4206 break;
4207 exp = nexp;
4210 /* If EXP is a constant, we can evaluate whether this is true or false. */
4211 if (TREE_CODE (exp) == INTEGER_CST)
4213 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4214 exp, 0, low, 0))
4215 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4216 exp, 1, high, 1)));
4217 low = high = 0;
4218 exp = 0;
4221 *pin_p = in_p, *plow = low, *phigh = high;
4222 return exp;
4225 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4226 type, TYPE, return an expression to test if EXP is in (or out of, depending
4227 on IN_P) the range. Return 0 if the test couldn't be created. */
4229 tree
4230 build_range_check (location_t loc, tree type, tree exp, int in_p,
4231 tree low, tree high)
4233 tree etype = TREE_TYPE (exp), value;
4235 #ifdef HAVE_canonicalize_funcptr_for_compare
4236 /* Disable this optimization for function pointer expressions
4237 on targets that require function pointer canonicalization. */
4238 if (HAVE_canonicalize_funcptr_for_compare
4239 && TREE_CODE (etype) == POINTER_TYPE
4240 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4241 return NULL_TREE;
4242 #endif
4244 if (! in_p)
4246 value = build_range_check (loc, type, exp, 1, low, high);
4247 if (value != 0)
4248 return invert_truthvalue_loc (loc, value);
4250 return 0;
4253 if (low == 0 && high == 0)
4254 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4256 if (low == 0)
4257 return fold_build2_loc (loc, LE_EXPR, type, exp,
4258 fold_convert_loc (loc, etype, high));
4260 if (high == 0)
4261 return fold_build2_loc (loc, GE_EXPR, type, exp,
4262 fold_convert_loc (loc, etype, low));
4264 if (operand_equal_p (low, high, 0))
4265 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4266 fold_convert_loc (loc, etype, low));
4268 if (integer_zerop (low))
4270 if (! TYPE_UNSIGNED (etype))
4272 etype = unsigned_type_for (etype);
4273 high = fold_convert_loc (loc, etype, high);
4274 exp = fold_convert_loc (loc, etype, exp);
4276 return build_range_check (loc, type, exp, 1, 0, high);
4279 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4280 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4282 int prec = TYPE_PRECISION (etype);
4284 if (wi::mask (prec - 1, false, prec) == high)
4286 if (TYPE_UNSIGNED (etype))
4288 tree signed_etype = signed_type_for (etype);
4289 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4290 etype
4291 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4292 else
4293 etype = signed_etype;
4294 exp = fold_convert_loc (loc, etype, exp);
4296 return fold_build2_loc (loc, GT_EXPR, type, exp,
4297 build_int_cst (etype, 0));
4301 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4302 This requires wrap-around arithmetics for the type of the expression.
4303 First make sure that arithmetics in this type is valid, then make sure
4304 that it wraps around. */
4305 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4306 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4307 TYPE_UNSIGNED (etype));
4309 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4311 tree utype, minv, maxv;
4313 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4314 for the type in question, as we rely on this here. */
4315 utype = unsigned_type_for (etype);
4316 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4317 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4318 build_int_cst (TREE_TYPE (maxv), 1), 1);
4319 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4321 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4322 minv, 1, maxv, 1)))
4323 etype = utype;
4324 else
4325 return 0;
4328 high = fold_convert_loc (loc, etype, high);
4329 low = fold_convert_loc (loc, etype, low);
4330 exp = fold_convert_loc (loc, etype, exp);
4332 value = const_binop (MINUS_EXPR, high, low);
4335 if (POINTER_TYPE_P (etype))
4337 if (value != 0 && !TREE_OVERFLOW (value))
4339 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4340 return build_range_check (loc, type,
4341 fold_build_pointer_plus_loc (loc, exp, low),
4342 1, build_int_cst (etype, 0), value);
4344 return 0;
4347 if (value != 0 && !TREE_OVERFLOW (value))
4348 return build_range_check (loc, type,
4349 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4350 1, build_int_cst (etype, 0), value);
4352 return 0;
4355 /* Return the predecessor of VAL in its type, handling the infinite case. */
4357 static tree
4358 range_predecessor (tree val)
4360 tree type = TREE_TYPE (val);
4362 if (INTEGRAL_TYPE_P (type)
4363 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4364 return 0;
4365 else
4366 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4367 build_int_cst (TREE_TYPE (val), 1), 0);
4370 /* Return the successor of VAL in its type, handling the infinite case. */
4372 static tree
4373 range_successor (tree val)
4375 tree type = TREE_TYPE (val);
4377 if (INTEGRAL_TYPE_P (type)
4378 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4379 return 0;
4380 else
4381 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4382 build_int_cst (TREE_TYPE (val), 1), 0);
4385 /* Given two ranges, see if we can merge them into one. Return 1 if we
4386 can, 0 if we can't. Set the output range into the specified parameters. */
4388 bool
4389 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4390 tree high0, int in1_p, tree low1, tree high1)
4392 int no_overlap;
4393 int subset;
4394 int temp;
4395 tree tem;
4396 int in_p;
4397 tree low, high;
4398 int lowequal = ((low0 == 0 && low1 == 0)
4399 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4400 low0, 0, low1, 0)));
4401 int highequal = ((high0 == 0 && high1 == 0)
4402 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4403 high0, 1, high1, 1)));
4405 /* Make range 0 be the range that starts first, or ends last if they
4406 start at the same value. Swap them if it isn't. */
4407 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4408 low0, 0, low1, 0))
4409 || (lowequal
4410 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4411 high1, 1, high0, 1))))
4413 temp = in0_p, in0_p = in1_p, in1_p = temp;
4414 tem = low0, low0 = low1, low1 = tem;
4415 tem = high0, high0 = high1, high1 = tem;
4418 /* Now flag two cases, whether the ranges are disjoint or whether the
4419 second range is totally subsumed in the first. Note that the tests
4420 below are simplified by the ones above. */
4421 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4422 high0, 1, low1, 0));
4423 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4424 high1, 1, high0, 1));
4426 /* We now have four cases, depending on whether we are including or
4427 excluding the two ranges. */
4428 if (in0_p && in1_p)
4430 /* If they don't overlap, the result is false. If the second range
4431 is a subset it is the result. Otherwise, the range is from the start
4432 of the second to the end of the first. */
4433 if (no_overlap)
4434 in_p = 0, low = high = 0;
4435 else if (subset)
4436 in_p = 1, low = low1, high = high1;
4437 else
4438 in_p = 1, low = low1, high = high0;
4441 else if (in0_p && ! in1_p)
4443 /* If they don't overlap, the result is the first range. If they are
4444 equal, the result is false. If the second range is a subset of the
4445 first, and the ranges begin at the same place, we go from just after
4446 the end of the second range to the end of the first. If the second
4447 range is not a subset of the first, or if it is a subset and both
4448 ranges end at the same place, the range starts at the start of the
4449 first range and ends just before the second range.
4450 Otherwise, we can't describe this as a single range. */
4451 if (no_overlap)
4452 in_p = 1, low = low0, high = high0;
4453 else if (lowequal && highequal)
4454 in_p = 0, low = high = 0;
4455 else if (subset && lowequal)
4457 low = range_successor (high1);
4458 high = high0;
4459 in_p = 1;
4460 if (low == 0)
4462 /* We are in the weird situation where high0 > high1 but
4463 high1 has no successor. Punt. */
4464 return 0;
4467 else if (! subset || highequal)
4469 low = low0;
4470 high = range_predecessor (low1);
4471 in_p = 1;
4472 if (high == 0)
4474 /* low0 < low1 but low1 has no predecessor. Punt. */
4475 return 0;
4478 else
4479 return 0;
4482 else if (! in0_p && in1_p)
4484 /* If they don't overlap, the result is the second range. If the second
4485 is a subset of the first, the result is false. Otherwise,
4486 the range starts just after the first range and ends at the
4487 end of the second. */
4488 if (no_overlap)
4489 in_p = 1, low = low1, high = high1;
4490 else if (subset || highequal)
4491 in_p = 0, low = high = 0;
4492 else
4494 low = range_successor (high0);
4495 high = high1;
4496 in_p = 1;
4497 if (low == 0)
4499 /* high1 > high0 but high0 has no successor. Punt. */
4500 return 0;
4505 else
4507 /* The case where we are excluding both ranges. Here the complex case
4508 is if they don't overlap. In that case, the only time we have a
4509 range is if they are adjacent. If the second is a subset of the
4510 first, the result is the first. Otherwise, the range to exclude
4511 starts at the beginning of the first range and ends at the end of the
4512 second. */
4513 if (no_overlap)
4515 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4516 range_successor (high0),
4517 1, low1, 0)))
4518 in_p = 0, low = low0, high = high1;
4519 else
4521 /* Canonicalize - [min, x] into - [-, x]. */
4522 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4523 switch (TREE_CODE (TREE_TYPE (low0)))
4525 case ENUMERAL_TYPE:
4526 if (TYPE_PRECISION (TREE_TYPE (low0))
4527 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4528 break;
4529 /* FALLTHROUGH */
4530 case INTEGER_TYPE:
4531 if (tree_int_cst_equal (low0,
4532 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4533 low0 = 0;
4534 break;
4535 case POINTER_TYPE:
4536 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4537 && integer_zerop (low0))
4538 low0 = 0;
4539 break;
4540 default:
4541 break;
4544 /* Canonicalize - [x, max] into - [x, -]. */
4545 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4546 switch (TREE_CODE (TREE_TYPE (high1)))
4548 case ENUMERAL_TYPE:
4549 if (TYPE_PRECISION (TREE_TYPE (high1))
4550 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4551 break;
4552 /* FALLTHROUGH */
4553 case INTEGER_TYPE:
4554 if (tree_int_cst_equal (high1,
4555 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4556 high1 = 0;
4557 break;
4558 case POINTER_TYPE:
4559 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4560 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4561 high1, 1,
4562 build_int_cst (TREE_TYPE (high1), 1),
4563 1)))
4564 high1 = 0;
4565 break;
4566 default:
4567 break;
4570 /* The ranges might be also adjacent between the maximum and
4571 minimum values of the given type. For
4572 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4573 return + [x + 1, y - 1]. */
4574 if (low0 == 0 && high1 == 0)
4576 low = range_successor (high0);
4577 high = range_predecessor (low1);
4578 if (low == 0 || high == 0)
4579 return 0;
4581 in_p = 1;
4583 else
4584 return 0;
4587 else if (subset)
4588 in_p = 0, low = low0, high = high0;
4589 else
4590 in_p = 0, low = low0, high = high1;
4593 *pin_p = in_p, *plow = low, *phigh = high;
4594 return 1;
4598 /* Subroutine of fold, looking inside expressions of the form
4599 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4600 of the COND_EXPR. This function is being used also to optimize
4601 A op B ? C : A, by reversing the comparison first.
4603 Return a folded expression whose code is not a COND_EXPR
4604 anymore, or NULL_TREE if no folding opportunity is found. */
4606 static tree
4607 fold_cond_expr_with_comparison (location_t loc, tree type,
4608 tree arg0, tree arg1, tree arg2)
4610 enum tree_code comp_code = TREE_CODE (arg0);
4611 tree arg00 = TREE_OPERAND (arg0, 0);
4612 tree arg01 = TREE_OPERAND (arg0, 1);
4613 tree arg1_type = TREE_TYPE (arg1);
4614 tree tem;
4616 STRIP_NOPS (arg1);
4617 STRIP_NOPS (arg2);
4619 /* If we have A op 0 ? A : -A, consider applying the following
4620 transformations:
4622 A == 0? A : -A same as -A
4623 A != 0? A : -A same as A
4624 A >= 0? A : -A same as abs (A)
4625 A > 0? A : -A same as abs (A)
4626 A <= 0? A : -A same as -abs (A)
4627 A < 0? A : -A same as -abs (A)
4629 None of these transformations work for modes with signed
4630 zeros. If A is +/-0, the first two transformations will
4631 change the sign of the result (from +0 to -0, or vice
4632 versa). The last four will fix the sign of the result,
4633 even though the original expressions could be positive or
4634 negative, depending on the sign of A.
4636 Note that all these transformations are correct if A is
4637 NaN, since the two alternatives (A and -A) are also NaNs. */
4638 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4639 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4640 ? real_zerop (arg01)
4641 : integer_zerop (arg01))
4642 && ((TREE_CODE (arg2) == NEGATE_EXPR
4643 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4644 /* In the case that A is of the form X-Y, '-A' (arg2) may
4645 have already been folded to Y-X, check for that. */
4646 || (TREE_CODE (arg1) == MINUS_EXPR
4647 && TREE_CODE (arg2) == MINUS_EXPR
4648 && operand_equal_p (TREE_OPERAND (arg1, 0),
4649 TREE_OPERAND (arg2, 1), 0)
4650 && operand_equal_p (TREE_OPERAND (arg1, 1),
4651 TREE_OPERAND (arg2, 0), 0))))
4652 switch (comp_code)
4654 case EQ_EXPR:
4655 case UNEQ_EXPR:
4656 tem = fold_convert_loc (loc, arg1_type, arg1);
4657 return pedantic_non_lvalue_loc (loc,
4658 fold_convert_loc (loc, type,
4659 negate_expr (tem)));
4660 case NE_EXPR:
4661 case LTGT_EXPR:
4662 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4663 case UNGE_EXPR:
4664 case UNGT_EXPR:
4665 if (flag_trapping_math)
4666 break;
4667 /* Fall through. */
4668 case GE_EXPR:
4669 case GT_EXPR:
4670 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4671 arg1 = fold_convert_loc (loc, signed_type_for
4672 (TREE_TYPE (arg1)), arg1);
4673 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4674 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4675 case UNLE_EXPR:
4676 case UNLT_EXPR:
4677 if (flag_trapping_math)
4678 break;
4679 case LE_EXPR:
4680 case LT_EXPR:
4681 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4682 arg1 = fold_convert_loc (loc, signed_type_for
4683 (TREE_TYPE (arg1)), arg1);
4684 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4685 return negate_expr (fold_convert_loc (loc, type, tem));
4686 default:
4687 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4688 break;
4691 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4692 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4693 both transformations are correct when A is NaN: A != 0
4694 is then true, and A == 0 is false. */
4696 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4697 && integer_zerop (arg01) && integer_zerop (arg2))
4699 if (comp_code == NE_EXPR)
4700 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4701 else if (comp_code == EQ_EXPR)
4702 return build_zero_cst (type);
4705 /* Try some transformations of A op B ? A : B.
4707 A == B? A : B same as B
4708 A != B? A : B same as A
4709 A >= B? A : B same as max (A, B)
4710 A > B? A : B same as max (B, A)
4711 A <= B? A : B same as min (A, B)
4712 A < B? A : B same as min (B, A)
4714 As above, these transformations don't work in the presence
4715 of signed zeros. For example, if A and B are zeros of
4716 opposite sign, the first two transformations will change
4717 the sign of the result. In the last four, the original
4718 expressions give different results for (A=+0, B=-0) and
4719 (A=-0, B=+0), but the transformed expressions do not.
4721 The first two transformations are correct if either A or B
4722 is a NaN. In the first transformation, the condition will
4723 be false, and B will indeed be chosen. In the case of the
4724 second transformation, the condition A != B will be true,
4725 and A will be chosen.
4727 The conversions to max() and min() are not correct if B is
4728 a number and A is not. The conditions in the original
4729 expressions will be false, so all four give B. The min()
4730 and max() versions would give a NaN instead. */
4731 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4732 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4733 /* Avoid these transformations if the COND_EXPR may be used
4734 as an lvalue in the C++ front-end. PR c++/19199. */
4735 && (in_gimple_form
4736 || VECTOR_TYPE_P (type)
4737 || (strcmp (lang_hooks.name, "GNU C++") != 0
4738 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4739 || ! maybe_lvalue_p (arg1)
4740 || ! maybe_lvalue_p (arg2)))
4742 tree comp_op0 = arg00;
4743 tree comp_op1 = arg01;
4744 tree comp_type = TREE_TYPE (comp_op0);
4746 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4747 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4749 comp_type = type;
4750 comp_op0 = arg1;
4751 comp_op1 = arg2;
4754 switch (comp_code)
4756 case EQ_EXPR:
4757 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4758 case NE_EXPR:
4759 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4760 case LE_EXPR:
4761 case LT_EXPR:
4762 case UNLE_EXPR:
4763 case UNLT_EXPR:
4764 /* In C++ a ?: expression can be an lvalue, so put the
4765 operand which will be used if they are equal first
4766 so that we can convert this back to the
4767 corresponding COND_EXPR. */
4768 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4770 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4771 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4772 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4773 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4774 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4775 comp_op1, comp_op0);
4776 return pedantic_non_lvalue_loc (loc,
4777 fold_convert_loc (loc, type, tem));
4779 break;
4780 case GE_EXPR:
4781 case GT_EXPR:
4782 case UNGE_EXPR:
4783 case UNGT_EXPR:
4784 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4786 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4787 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4788 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4789 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4790 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4791 comp_op1, comp_op0);
4792 return pedantic_non_lvalue_loc (loc,
4793 fold_convert_loc (loc, type, tem));
4795 break;
4796 case UNEQ_EXPR:
4797 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4798 return pedantic_non_lvalue_loc (loc,
4799 fold_convert_loc (loc, type, arg2));
4800 break;
4801 case LTGT_EXPR:
4802 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4803 return pedantic_non_lvalue_loc (loc,
4804 fold_convert_loc (loc, type, arg1));
4805 break;
4806 default:
4807 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4808 break;
4812 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4813 we might still be able to simplify this. For example,
4814 if C1 is one less or one more than C2, this might have started
4815 out as a MIN or MAX and been transformed by this function.
4816 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4818 if (INTEGRAL_TYPE_P (type)
4819 && TREE_CODE (arg01) == INTEGER_CST
4820 && TREE_CODE (arg2) == INTEGER_CST)
4821 switch (comp_code)
4823 case EQ_EXPR:
4824 if (TREE_CODE (arg1) == INTEGER_CST)
4825 break;
4826 /* We can replace A with C1 in this case. */
4827 arg1 = fold_convert_loc (loc, type, arg01);
4828 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4830 case LT_EXPR:
4831 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4832 MIN_EXPR, to preserve the signedness of the comparison. */
4833 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4834 OEP_ONLY_CONST)
4835 && operand_equal_p (arg01,
4836 const_binop (PLUS_EXPR, arg2,
4837 build_int_cst (type, 1)),
4838 OEP_ONLY_CONST))
4840 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4841 fold_convert_loc (loc, TREE_TYPE (arg00),
4842 arg2));
4843 return pedantic_non_lvalue_loc (loc,
4844 fold_convert_loc (loc, type, tem));
4846 break;
4848 case LE_EXPR:
4849 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4850 as above. */
4851 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4852 OEP_ONLY_CONST)
4853 && operand_equal_p (arg01,
4854 const_binop (MINUS_EXPR, arg2,
4855 build_int_cst (type, 1)),
4856 OEP_ONLY_CONST))
4858 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4859 fold_convert_loc (loc, TREE_TYPE (arg00),
4860 arg2));
4861 return pedantic_non_lvalue_loc (loc,
4862 fold_convert_loc (loc, type, tem));
4864 break;
4866 case GT_EXPR:
4867 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4868 MAX_EXPR, to preserve the signedness of the comparison. */
4869 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4870 OEP_ONLY_CONST)
4871 && operand_equal_p (arg01,
4872 const_binop (MINUS_EXPR, arg2,
4873 build_int_cst (type, 1)),
4874 OEP_ONLY_CONST))
4876 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4877 fold_convert_loc (loc, TREE_TYPE (arg00),
4878 arg2));
4879 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4881 break;
4883 case GE_EXPR:
4884 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4885 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4886 OEP_ONLY_CONST)
4887 && operand_equal_p (arg01,
4888 const_binop (PLUS_EXPR, arg2,
4889 build_int_cst (type, 1)),
4890 OEP_ONLY_CONST))
4892 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4893 fold_convert_loc (loc, TREE_TYPE (arg00),
4894 arg2));
4895 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4897 break;
4898 case NE_EXPR:
4899 break;
4900 default:
4901 gcc_unreachable ();
4904 return NULL_TREE;
4909 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4910 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4911 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4912 false) >= 2)
4913 #endif
4915 /* EXP is some logical combination of boolean tests. See if we can
4916 merge it into some range test. Return the new tree if so. */
4918 static tree
4919 fold_range_test (location_t loc, enum tree_code code, tree type,
4920 tree op0, tree op1)
4922 int or_op = (code == TRUTH_ORIF_EXPR
4923 || code == TRUTH_OR_EXPR);
4924 int in0_p, in1_p, in_p;
4925 tree low0, low1, low, high0, high1, high;
4926 bool strict_overflow_p = false;
4927 tree tem, lhs, rhs;
4928 const char * const warnmsg = G_("assuming signed overflow does not occur "
4929 "when simplifying range test");
4931 if (!INTEGRAL_TYPE_P (type))
4932 return 0;
4934 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4935 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4937 /* If this is an OR operation, invert both sides; we will invert
4938 again at the end. */
4939 if (or_op)
4940 in0_p = ! in0_p, in1_p = ! in1_p;
4942 /* If both expressions are the same, if we can merge the ranges, and we
4943 can build the range test, return it or it inverted. If one of the
4944 ranges is always true or always false, consider it to be the same
4945 expression as the other. */
4946 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4947 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4948 in1_p, low1, high1)
4949 && 0 != (tem = (build_range_check (loc, type,
4950 lhs != 0 ? lhs
4951 : rhs != 0 ? rhs : integer_zero_node,
4952 in_p, low, high))))
4954 if (strict_overflow_p)
4955 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4956 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4959 /* On machines where the branch cost is expensive, if this is a
4960 short-circuited branch and the underlying object on both sides
4961 is the same, make a non-short-circuit operation. */
4962 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4963 && lhs != 0 && rhs != 0
4964 && (code == TRUTH_ANDIF_EXPR
4965 || code == TRUTH_ORIF_EXPR)
4966 && operand_equal_p (lhs, rhs, 0))
4968 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4969 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4970 which cases we can't do this. */
4971 if (simple_operand_p (lhs))
4972 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4973 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4974 type, op0, op1);
4976 else if (!lang_hooks.decls.global_bindings_p ()
4977 && !CONTAINS_PLACEHOLDER_P (lhs))
4979 tree common = save_expr (lhs);
4981 if (0 != (lhs = build_range_check (loc, type, common,
4982 or_op ? ! in0_p : in0_p,
4983 low0, high0))
4984 && (0 != (rhs = build_range_check (loc, type, common,
4985 or_op ? ! in1_p : in1_p,
4986 low1, high1))))
4988 if (strict_overflow_p)
4989 fold_overflow_warning (warnmsg,
4990 WARN_STRICT_OVERFLOW_COMPARISON);
4991 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4992 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4993 type, lhs, rhs);
4998 return 0;
5001 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5002 bit value. Arrange things so the extra bits will be set to zero if and
5003 only if C is signed-extended to its full width. If MASK is nonzero,
5004 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5006 static tree
5007 unextend (tree c, int p, int unsignedp, tree mask)
5009 tree type = TREE_TYPE (c);
5010 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5011 tree temp;
5013 if (p == modesize || unsignedp)
5014 return c;
5016 /* We work by getting just the sign bit into the low-order bit, then
5017 into the high-order bit, then sign-extend. We then XOR that value
5018 with C. */
5019 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5021 /* We must use a signed type in order to get an arithmetic right shift.
5022 However, we must also avoid introducing accidental overflows, so that
5023 a subsequent call to integer_zerop will work. Hence we must
5024 do the type conversion here. At this point, the constant is either
5025 zero or one, and the conversion to a signed type can never overflow.
5026 We could get an overflow if this conversion is done anywhere else. */
5027 if (TYPE_UNSIGNED (type))
5028 temp = fold_convert (signed_type_for (type), temp);
5030 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5031 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5032 if (mask != 0)
5033 temp = const_binop (BIT_AND_EXPR, temp,
5034 fold_convert (TREE_TYPE (c), mask));
5035 /* If necessary, convert the type back to match the type of C. */
5036 if (TYPE_UNSIGNED (type))
5037 temp = fold_convert (type, temp);
5039 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5042 /* For an expression that has the form
5043 (A && B) || ~B
5045 (A || B) && ~B,
5046 we can drop one of the inner expressions and simplify to
5047 A || ~B
5049 A && ~B
5050 LOC is the location of the resulting expression. OP is the inner
5051 logical operation; the left-hand side in the examples above, while CMPOP
5052 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5053 removing a condition that guards another, as in
5054 (A != NULL && A->...) || A == NULL
5055 which we must not transform. If RHS_ONLY is true, only eliminate the
5056 right-most operand of the inner logical operation. */
5058 static tree
5059 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5060 bool rhs_only)
5062 tree type = TREE_TYPE (cmpop);
5063 enum tree_code code = TREE_CODE (cmpop);
5064 enum tree_code truthop_code = TREE_CODE (op);
5065 tree lhs = TREE_OPERAND (op, 0);
5066 tree rhs = TREE_OPERAND (op, 1);
5067 tree orig_lhs = lhs, orig_rhs = rhs;
5068 enum tree_code rhs_code = TREE_CODE (rhs);
5069 enum tree_code lhs_code = TREE_CODE (lhs);
5070 enum tree_code inv_code;
5072 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5073 return NULL_TREE;
5075 if (TREE_CODE_CLASS (code) != tcc_comparison)
5076 return NULL_TREE;
5078 if (rhs_code == truthop_code)
5080 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5081 if (newrhs != NULL_TREE)
5083 rhs = newrhs;
5084 rhs_code = TREE_CODE (rhs);
5087 if (lhs_code == truthop_code && !rhs_only)
5089 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5090 if (newlhs != NULL_TREE)
5092 lhs = newlhs;
5093 lhs_code = TREE_CODE (lhs);
5097 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5098 if (inv_code == rhs_code
5099 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5100 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5101 return lhs;
5102 if (!rhs_only && inv_code == lhs_code
5103 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5104 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5105 return rhs;
5106 if (rhs != orig_rhs || lhs != orig_lhs)
5107 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5108 lhs, rhs);
5109 return NULL_TREE;
5112 /* Find ways of folding logical expressions of LHS and RHS:
5113 Try to merge two comparisons to the same innermost item.
5114 Look for range tests like "ch >= '0' && ch <= '9'".
5115 Look for combinations of simple terms on machines with expensive branches
5116 and evaluate the RHS unconditionally.
5118 For example, if we have p->a == 2 && p->b == 4 and we can make an
5119 object large enough to span both A and B, we can do this with a comparison
5120 against the object ANDed with the a mask.
5122 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5123 operations to do this with one comparison.
5125 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5126 function and the one above.
5128 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5129 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5131 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5132 two operands.
5134 We return the simplified tree or 0 if no optimization is possible. */
5136 static tree
5137 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5138 tree lhs, tree rhs)
5140 /* If this is the "or" of two comparisons, we can do something if
5141 the comparisons are NE_EXPR. If this is the "and", we can do something
5142 if the comparisons are EQ_EXPR. I.e.,
5143 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5145 WANTED_CODE is this operation code. For single bit fields, we can
5146 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5147 comparison for one-bit fields. */
5149 enum tree_code wanted_code;
5150 enum tree_code lcode, rcode;
5151 tree ll_arg, lr_arg, rl_arg, rr_arg;
5152 tree ll_inner, lr_inner, rl_inner, rr_inner;
5153 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5154 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5155 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5156 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5157 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5158 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5159 machine_mode lnmode, rnmode;
5160 tree ll_mask, lr_mask, rl_mask, rr_mask;
5161 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5162 tree l_const, r_const;
5163 tree lntype, rntype, result;
5164 HOST_WIDE_INT first_bit, end_bit;
5165 int volatilep;
5167 /* Start by getting the comparison codes. Fail if anything is volatile.
5168 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5169 it were surrounded with a NE_EXPR. */
5171 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5172 return 0;
5174 lcode = TREE_CODE (lhs);
5175 rcode = TREE_CODE (rhs);
5177 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5179 lhs = build2 (NE_EXPR, truth_type, lhs,
5180 build_int_cst (TREE_TYPE (lhs), 0));
5181 lcode = NE_EXPR;
5184 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5186 rhs = build2 (NE_EXPR, truth_type, rhs,
5187 build_int_cst (TREE_TYPE (rhs), 0));
5188 rcode = NE_EXPR;
5191 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5192 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5193 return 0;
5195 ll_arg = TREE_OPERAND (lhs, 0);
5196 lr_arg = TREE_OPERAND (lhs, 1);
5197 rl_arg = TREE_OPERAND (rhs, 0);
5198 rr_arg = TREE_OPERAND (rhs, 1);
5200 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5201 if (simple_operand_p (ll_arg)
5202 && simple_operand_p (lr_arg))
5204 if (operand_equal_p (ll_arg, rl_arg, 0)
5205 && operand_equal_p (lr_arg, rr_arg, 0))
5207 result = combine_comparisons (loc, code, lcode, rcode,
5208 truth_type, ll_arg, lr_arg);
5209 if (result)
5210 return result;
5212 else if (operand_equal_p (ll_arg, rr_arg, 0)
5213 && operand_equal_p (lr_arg, rl_arg, 0))
5215 result = combine_comparisons (loc, code, lcode,
5216 swap_tree_comparison (rcode),
5217 truth_type, ll_arg, lr_arg);
5218 if (result)
5219 return result;
5223 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5224 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5226 /* If the RHS can be evaluated unconditionally and its operands are
5227 simple, it wins to evaluate the RHS unconditionally on machines
5228 with expensive branches. In this case, this isn't a comparison
5229 that can be merged. */
5231 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5232 false) >= 2
5233 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5234 && simple_operand_p (rl_arg)
5235 && simple_operand_p (rr_arg))
5237 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5238 if (code == TRUTH_OR_EXPR
5239 && lcode == NE_EXPR && integer_zerop (lr_arg)
5240 && rcode == NE_EXPR && integer_zerop (rr_arg)
5241 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5242 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5243 return build2_loc (loc, NE_EXPR, truth_type,
5244 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5245 ll_arg, rl_arg),
5246 build_int_cst (TREE_TYPE (ll_arg), 0));
5248 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5249 if (code == TRUTH_AND_EXPR
5250 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5251 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5252 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5253 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5254 return build2_loc (loc, EQ_EXPR, truth_type,
5255 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5256 ll_arg, rl_arg),
5257 build_int_cst (TREE_TYPE (ll_arg), 0));
5260 /* See if the comparisons can be merged. Then get all the parameters for
5261 each side. */
5263 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5264 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5265 return 0;
5267 volatilep = 0;
5268 ll_inner = decode_field_reference (loc, ll_arg,
5269 &ll_bitsize, &ll_bitpos, &ll_mode,
5270 &ll_unsignedp, &volatilep, &ll_mask,
5271 &ll_and_mask);
5272 lr_inner = decode_field_reference (loc, lr_arg,
5273 &lr_bitsize, &lr_bitpos, &lr_mode,
5274 &lr_unsignedp, &volatilep, &lr_mask,
5275 &lr_and_mask);
5276 rl_inner = decode_field_reference (loc, rl_arg,
5277 &rl_bitsize, &rl_bitpos, &rl_mode,
5278 &rl_unsignedp, &volatilep, &rl_mask,
5279 &rl_and_mask);
5280 rr_inner = decode_field_reference (loc, rr_arg,
5281 &rr_bitsize, &rr_bitpos, &rr_mode,
5282 &rr_unsignedp, &volatilep, &rr_mask,
5283 &rr_and_mask);
5285 /* It must be true that the inner operation on the lhs of each
5286 comparison must be the same if we are to be able to do anything.
5287 Then see if we have constants. If not, the same must be true for
5288 the rhs's. */
5289 if (volatilep || ll_inner == 0 || rl_inner == 0
5290 || ! operand_equal_p (ll_inner, rl_inner, 0))
5291 return 0;
5293 if (TREE_CODE (lr_arg) == INTEGER_CST
5294 && TREE_CODE (rr_arg) == INTEGER_CST)
5295 l_const = lr_arg, r_const = rr_arg;
5296 else if (lr_inner == 0 || rr_inner == 0
5297 || ! operand_equal_p (lr_inner, rr_inner, 0))
5298 return 0;
5299 else
5300 l_const = r_const = 0;
5302 /* If either comparison code is not correct for our logical operation,
5303 fail. However, we can convert a one-bit comparison against zero into
5304 the opposite comparison against that bit being set in the field. */
5306 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5307 if (lcode != wanted_code)
5309 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5311 /* Make the left operand unsigned, since we are only interested
5312 in the value of one bit. Otherwise we are doing the wrong
5313 thing below. */
5314 ll_unsignedp = 1;
5315 l_const = ll_mask;
5317 else
5318 return 0;
5321 /* This is analogous to the code for l_const above. */
5322 if (rcode != wanted_code)
5324 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5326 rl_unsignedp = 1;
5327 r_const = rl_mask;
5329 else
5330 return 0;
5333 /* See if we can find a mode that contains both fields being compared on
5334 the left. If we can't, fail. Otherwise, update all constants and masks
5335 to be relative to a field of that size. */
5336 first_bit = MIN (ll_bitpos, rl_bitpos);
5337 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5338 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5339 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5340 volatilep);
5341 if (lnmode == VOIDmode)
5342 return 0;
5344 lnbitsize = GET_MODE_BITSIZE (lnmode);
5345 lnbitpos = first_bit & ~ (lnbitsize - 1);
5346 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5347 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5349 if (BYTES_BIG_ENDIAN)
5351 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5352 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5355 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5356 size_int (xll_bitpos));
5357 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5358 size_int (xrl_bitpos));
5360 if (l_const)
5362 l_const = fold_convert_loc (loc, lntype, l_const);
5363 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5364 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5365 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5366 fold_build1_loc (loc, BIT_NOT_EXPR,
5367 lntype, ll_mask))))
5369 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5371 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5374 if (r_const)
5376 r_const = fold_convert_loc (loc, lntype, r_const);
5377 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5378 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5379 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5380 fold_build1_loc (loc, BIT_NOT_EXPR,
5381 lntype, rl_mask))))
5383 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5385 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5389 /* If the right sides are not constant, do the same for it. Also,
5390 disallow this optimization if a size or signedness mismatch occurs
5391 between the left and right sides. */
5392 if (l_const == 0)
5394 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5395 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5396 /* Make sure the two fields on the right
5397 correspond to the left without being swapped. */
5398 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5399 return 0;
5401 first_bit = MIN (lr_bitpos, rr_bitpos);
5402 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5403 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5404 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5405 volatilep);
5406 if (rnmode == VOIDmode)
5407 return 0;
5409 rnbitsize = GET_MODE_BITSIZE (rnmode);
5410 rnbitpos = first_bit & ~ (rnbitsize - 1);
5411 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5412 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5414 if (BYTES_BIG_ENDIAN)
5416 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5417 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5420 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5421 rntype, lr_mask),
5422 size_int (xlr_bitpos));
5423 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5424 rntype, rr_mask),
5425 size_int (xrr_bitpos));
5427 /* Make a mask that corresponds to both fields being compared.
5428 Do this for both items being compared. If the operands are the
5429 same size and the bits being compared are in the same position
5430 then we can do this by masking both and comparing the masked
5431 results. */
5432 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5433 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5434 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5436 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5437 ll_unsignedp || rl_unsignedp);
5438 if (! all_ones_mask_p (ll_mask, lnbitsize))
5439 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5441 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5442 lr_unsignedp || rr_unsignedp);
5443 if (! all_ones_mask_p (lr_mask, rnbitsize))
5444 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5446 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5449 /* There is still another way we can do something: If both pairs of
5450 fields being compared are adjacent, we may be able to make a wider
5451 field containing them both.
5453 Note that we still must mask the lhs/rhs expressions. Furthermore,
5454 the mask must be shifted to account for the shift done by
5455 make_bit_field_ref. */
5456 if ((ll_bitsize + ll_bitpos == rl_bitpos
5457 && lr_bitsize + lr_bitpos == rr_bitpos)
5458 || (ll_bitpos == rl_bitpos + rl_bitsize
5459 && lr_bitpos == rr_bitpos + rr_bitsize))
5461 tree type;
5463 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5464 ll_bitsize + rl_bitsize,
5465 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5466 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5467 lr_bitsize + rr_bitsize,
5468 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5470 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5471 size_int (MIN (xll_bitpos, xrl_bitpos)));
5472 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5473 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5475 /* Convert to the smaller type before masking out unwanted bits. */
5476 type = lntype;
5477 if (lntype != rntype)
5479 if (lnbitsize > rnbitsize)
5481 lhs = fold_convert_loc (loc, rntype, lhs);
5482 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5483 type = rntype;
5485 else if (lnbitsize < rnbitsize)
5487 rhs = fold_convert_loc (loc, lntype, rhs);
5488 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5489 type = lntype;
5493 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5494 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5496 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5497 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5499 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5502 return 0;
5505 /* Handle the case of comparisons with constants. If there is something in
5506 common between the masks, those bits of the constants must be the same.
5507 If not, the condition is always false. Test for this to avoid generating
5508 incorrect code below. */
5509 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5510 if (! integer_zerop (result)
5511 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5512 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5514 if (wanted_code == NE_EXPR)
5516 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5517 return constant_boolean_node (true, truth_type);
5519 else
5521 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5522 return constant_boolean_node (false, truth_type);
5526 /* Construct the expression we will return. First get the component
5527 reference we will make. Unless the mask is all ones the width of
5528 that field, perform the mask operation. Then compare with the
5529 merged constant. */
5530 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5531 ll_unsignedp || rl_unsignedp);
5533 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5534 if (! all_ones_mask_p (ll_mask, lnbitsize))
5535 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5537 return build2_loc (loc, wanted_code, truth_type, result,
5538 const_binop (BIT_IOR_EXPR, l_const, r_const));
5541 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5542 constant. */
5544 static tree
5545 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5546 tree op0, tree op1)
5548 tree arg0 = op0;
5549 enum tree_code op_code;
5550 tree comp_const;
5551 tree minmax_const;
5552 int consts_equal, consts_lt;
5553 tree inner;
5555 STRIP_SIGN_NOPS (arg0);
5557 op_code = TREE_CODE (arg0);
5558 minmax_const = TREE_OPERAND (arg0, 1);
5559 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5560 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5561 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5562 inner = TREE_OPERAND (arg0, 0);
5564 /* If something does not permit us to optimize, return the original tree. */
5565 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5566 || TREE_CODE (comp_const) != INTEGER_CST
5567 || TREE_OVERFLOW (comp_const)
5568 || TREE_CODE (minmax_const) != INTEGER_CST
5569 || TREE_OVERFLOW (minmax_const))
5570 return NULL_TREE;
5572 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5573 and GT_EXPR, doing the rest with recursive calls using logical
5574 simplifications. */
5575 switch (code)
5577 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5579 tree tem
5580 = optimize_minmax_comparison (loc,
5581 invert_tree_comparison (code, false),
5582 type, op0, op1);
5583 if (tem)
5584 return invert_truthvalue_loc (loc, tem);
5585 return NULL_TREE;
5588 case GE_EXPR:
5589 return
5590 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5591 optimize_minmax_comparison
5592 (loc, EQ_EXPR, type, arg0, comp_const),
5593 optimize_minmax_comparison
5594 (loc, GT_EXPR, type, arg0, comp_const));
5596 case EQ_EXPR:
5597 if (op_code == MAX_EXPR && consts_equal)
5598 /* MAX (X, 0) == 0 -> X <= 0 */
5599 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5601 else if (op_code == MAX_EXPR && consts_lt)
5602 /* MAX (X, 0) == 5 -> X == 5 */
5603 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5605 else if (op_code == MAX_EXPR)
5606 /* MAX (X, 0) == -1 -> false */
5607 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5609 else if (consts_equal)
5610 /* MIN (X, 0) == 0 -> X >= 0 */
5611 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5613 else if (consts_lt)
5614 /* MIN (X, 0) == 5 -> false */
5615 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5617 else
5618 /* MIN (X, 0) == -1 -> X == -1 */
5619 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5621 case GT_EXPR:
5622 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5623 /* MAX (X, 0) > 0 -> X > 0
5624 MAX (X, 0) > 5 -> X > 5 */
5625 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5627 else if (op_code == MAX_EXPR)
5628 /* MAX (X, 0) > -1 -> true */
5629 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5631 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5632 /* MIN (X, 0) > 0 -> false
5633 MIN (X, 0) > 5 -> false */
5634 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5636 else
5637 /* MIN (X, 0) > -1 -> X > -1 */
5638 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5640 default:
5641 return NULL_TREE;
5645 /* T is an integer expression that is being multiplied, divided, or taken a
5646 modulus (CODE says which and what kind of divide or modulus) by a
5647 constant C. See if we can eliminate that operation by folding it with
5648 other operations already in T. WIDE_TYPE, if non-null, is a type that
5649 should be used for the computation if wider than our type.
5651 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5652 (X * 2) + (Y * 4). We must, however, be assured that either the original
5653 expression would not overflow or that overflow is undefined for the type
5654 in the language in question.
5656 If we return a non-null expression, it is an equivalent form of the
5657 original computation, but need not be in the original type.
5659 We set *STRICT_OVERFLOW_P to true if the return values depends on
5660 signed overflow being undefined. Otherwise we do not change
5661 *STRICT_OVERFLOW_P. */
5663 static tree
5664 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5665 bool *strict_overflow_p)
5667 /* To avoid exponential search depth, refuse to allow recursion past
5668 three levels. Beyond that (1) it's highly unlikely that we'll find
5669 something interesting and (2) we've probably processed it before
5670 when we built the inner expression. */
5672 static int depth;
5673 tree ret;
5675 if (depth > 3)
5676 return NULL;
5678 depth++;
5679 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5680 depth--;
5682 return ret;
5685 static tree
5686 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5687 bool *strict_overflow_p)
5689 tree type = TREE_TYPE (t);
5690 enum tree_code tcode = TREE_CODE (t);
5691 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5692 > GET_MODE_SIZE (TYPE_MODE (type)))
5693 ? wide_type : type);
5694 tree t1, t2;
5695 int same_p = tcode == code;
5696 tree op0 = NULL_TREE, op1 = NULL_TREE;
5697 bool sub_strict_overflow_p;
5699 /* Don't deal with constants of zero here; they confuse the code below. */
5700 if (integer_zerop (c))
5701 return NULL_TREE;
5703 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5704 op0 = TREE_OPERAND (t, 0);
5706 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5707 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5709 /* Note that we need not handle conditional operations here since fold
5710 already handles those cases. So just do arithmetic here. */
5711 switch (tcode)
5713 case INTEGER_CST:
5714 /* For a constant, we can always simplify if we are a multiply
5715 or (for divide and modulus) if it is a multiple of our constant. */
5716 if (code == MULT_EXPR
5717 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5718 return const_binop (code, fold_convert (ctype, t),
5719 fold_convert (ctype, c));
5720 break;
5722 CASE_CONVERT: case NON_LVALUE_EXPR:
5723 /* If op0 is an expression ... */
5724 if ((COMPARISON_CLASS_P (op0)
5725 || UNARY_CLASS_P (op0)
5726 || BINARY_CLASS_P (op0)
5727 || VL_EXP_CLASS_P (op0)
5728 || EXPRESSION_CLASS_P (op0))
5729 /* ... and has wrapping overflow, and its type is smaller
5730 than ctype, then we cannot pass through as widening. */
5731 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5732 && (TYPE_PRECISION (ctype)
5733 > TYPE_PRECISION (TREE_TYPE (op0))))
5734 /* ... or this is a truncation (t is narrower than op0),
5735 then we cannot pass through this narrowing. */
5736 || (TYPE_PRECISION (type)
5737 < TYPE_PRECISION (TREE_TYPE (op0)))
5738 /* ... or signedness changes for division or modulus,
5739 then we cannot pass through this conversion. */
5740 || (code != MULT_EXPR
5741 && (TYPE_UNSIGNED (ctype)
5742 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5743 /* ... or has undefined overflow while the converted to
5744 type has not, we cannot do the operation in the inner type
5745 as that would introduce undefined overflow. */
5746 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5747 && !TYPE_OVERFLOW_UNDEFINED (type))))
5748 break;
5750 /* Pass the constant down and see if we can make a simplification. If
5751 we can, replace this expression with the inner simplification for
5752 possible later conversion to our or some other type. */
5753 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5754 && TREE_CODE (t2) == INTEGER_CST
5755 && !TREE_OVERFLOW (t2)
5756 && (0 != (t1 = extract_muldiv (op0, t2, code,
5757 code == MULT_EXPR
5758 ? ctype : NULL_TREE,
5759 strict_overflow_p))))
5760 return t1;
5761 break;
5763 case ABS_EXPR:
5764 /* If widening the type changes it from signed to unsigned, then we
5765 must avoid building ABS_EXPR itself as unsigned. */
5766 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5768 tree cstype = (*signed_type_for) (ctype);
5769 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5770 != 0)
5772 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5773 return fold_convert (ctype, t1);
5775 break;
5777 /* If the constant is negative, we cannot simplify this. */
5778 if (tree_int_cst_sgn (c) == -1)
5779 break;
5780 /* FALLTHROUGH */
5781 case NEGATE_EXPR:
5782 /* For division and modulus, type can't be unsigned, as e.g.
5783 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5784 For signed types, even with wrapping overflow, this is fine. */
5785 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5786 break;
5787 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5788 != 0)
5789 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5790 break;
5792 case MIN_EXPR: case MAX_EXPR:
5793 /* If widening the type changes the signedness, then we can't perform
5794 this optimization as that changes the result. */
5795 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5796 break;
5798 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5799 sub_strict_overflow_p = false;
5800 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5801 &sub_strict_overflow_p)) != 0
5802 && (t2 = extract_muldiv (op1, c, code, wide_type,
5803 &sub_strict_overflow_p)) != 0)
5805 if (tree_int_cst_sgn (c) < 0)
5806 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5807 if (sub_strict_overflow_p)
5808 *strict_overflow_p = true;
5809 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5810 fold_convert (ctype, t2));
5812 break;
5814 case LSHIFT_EXPR: case RSHIFT_EXPR:
5815 /* If the second operand is constant, this is a multiplication
5816 or floor division, by a power of two, so we can treat it that
5817 way unless the multiplier or divisor overflows. Signed
5818 left-shift overflow is implementation-defined rather than
5819 undefined in C90, so do not convert signed left shift into
5820 multiplication. */
5821 if (TREE_CODE (op1) == INTEGER_CST
5822 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5823 /* const_binop may not detect overflow correctly,
5824 so check for it explicitly here. */
5825 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5826 && 0 != (t1 = fold_convert (ctype,
5827 const_binop (LSHIFT_EXPR,
5828 size_one_node,
5829 op1)))
5830 && !TREE_OVERFLOW (t1))
5831 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5832 ? MULT_EXPR : FLOOR_DIV_EXPR,
5833 ctype,
5834 fold_convert (ctype, op0),
5835 t1),
5836 c, code, wide_type, strict_overflow_p);
5837 break;
5839 case PLUS_EXPR: case MINUS_EXPR:
5840 /* See if we can eliminate the operation on both sides. If we can, we
5841 can return a new PLUS or MINUS. If we can't, the only remaining
5842 cases where we can do anything are if the second operand is a
5843 constant. */
5844 sub_strict_overflow_p = false;
5845 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5846 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5847 if (t1 != 0 && t2 != 0
5848 && (code == MULT_EXPR
5849 /* If not multiplication, we can only do this if both operands
5850 are divisible by c. */
5851 || (multiple_of_p (ctype, op0, c)
5852 && multiple_of_p (ctype, op1, c))))
5854 if (sub_strict_overflow_p)
5855 *strict_overflow_p = true;
5856 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5857 fold_convert (ctype, t2));
5860 /* If this was a subtraction, negate OP1 and set it to be an addition.
5861 This simplifies the logic below. */
5862 if (tcode == MINUS_EXPR)
5864 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5865 /* If OP1 was not easily negatable, the constant may be OP0. */
5866 if (TREE_CODE (op0) == INTEGER_CST)
5868 tree tem = op0;
5869 op0 = op1;
5870 op1 = tem;
5871 tem = t1;
5872 t1 = t2;
5873 t2 = tem;
5877 if (TREE_CODE (op1) != INTEGER_CST)
5878 break;
5880 /* If either OP1 or C are negative, this optimization is not safe for
5881 some of the division and remainder types while for others we need
5882 to change the code. */
5883 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5885 if (code == CEIL_DIV_EXPR)
5886 code = FLOOR_DIV_EXPR;
5887 else if (code == FLOOR_DIV_EXPR)
5888 code = CEIL_DIV_EXPR;
5889 else if (code != MULT_EXPR
5890 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5891 break;
5894 /* If it's a multiply or a division/modulus operation of a multiple
5895 of our constant, do the operation and verify it doesn't overflow. */
5896 if (code == MULT_EXPR
5897 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5899 op1 = const_binop (code, fold_convert (ctype, op1),
5900 fold_convert (ctype, c));
5901 /* We allow the constant to overflow with wrapping semantics. */
5902 if (op1 == 0
5903 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5904 break;
5906 else
5907 break;
5909 /* If we have an unsigned type, we cannot widen the operation since it
5910 will change the result if the original computation overflowed. */
5911 if (TYPE_UNSIGNED (ctype) && ctype != type)
5912 break;
5914 /* If we were able to eliminate our operation from the first side,
5915 apply our operation to the second side and reform the PLUS. */
5916 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5917 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5919 /* The last case is if we are a multiply. In that case, we can
5920 apply the distributive law to commute the multiply and addition
5921 if the multiplication of the constants doesn't overflow
5922 and overflow is defined. With undefined overflow
5923 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5924 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5925 return fold_build2 (tcode, ctype,
5926 fold_build2 (code, ctype,
5927 fold_convert (ctype, op0),
5928 fold_convert (ctype, c)),
5929 op1);
5931 break;
5933 case MULT_EXPR:
5934 /* We have a special case here if we are doing something like
5935 (C * 8) % 4 since we know that's zero. */
5936 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5937 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5938 /* If the multiplication can overflow we cannot optimize this. */
5939 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5940 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5941 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5943 *strict_overflow_p = true;
5944 return omit_one_operand (type, integer_zero_node, op0);
5947 /* ... fall through ... */
5949 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5950 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5951 /* If we can extract our operation from the LHS, do so and return a
5952 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5953 do something only if the second operand is a constant. */
5954 if (same_p
5955 && (t1 = extract_muldiv (op0, c, code, wide_type,
5956 strict_overflow_p)) != 0)
5957 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5958 fold_convert (ctype, op1));
5959 else if (tcode == MULT_EXPR && code == MULT_EXPR
5960 && (t1 = extract_muldiv (op1, c, code, wide_type,
5961 strict_overflow_p)) != 0)
5962 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5963 fold_convert (ctype, t1));
5964 else if (TREE_CODE (op1) != INTEGER_CST)
5965 return 0;
5967 /* If these are the same operation types, we can associate them
5968 assuming no overflow. */
5969 if (tcode == code)
5971 bool overflow_p = false;
5972 bool overflow_mul_p;
5973 signop sign = TYPE_SIGN (ctype);
5974 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5975 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5976 if (overflow_mul_p
5977 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5978 overflow_p = true;
5979 if (!overflow_p)
5980 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5981 wide_int_to_tree (ctype, mul));
5984 /* If these operations "cancel" each other, we have the main
5985 optimizations of this pass, which occur when either constant is a
5986 multiple of the other, in which case we replace this with either an
5987 operation or CODE or TCODE.
5989 If we have an unsigned type, we cannot do this since it will change
5990 the result if the original computation overflowed. */
5991 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5992 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5993 || (tcode == MULT_EXPR
5994 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5995 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5996 && code != MULT_EXPR)))
5998 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6000 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6001 *strict_overflow_p = true;
6002 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6003 fold_convert (ctype,
6004 const_binop (TRUNC_DIV_EXPR,
6005 op1, c)));
6007 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6009 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6010 *strict_overflow_p = true;
6011 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6012 fold_convert (ctype,
6013 const_binop (TRUNC_DIV_EXPR,
6014 c, op1)));
6017 break;
6019 default:
6020 break;
6023 return 0;
6026 /* Return a node which has the indicated constant VALUE (either 0 or
6027 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6028 and is of the indicated TYPE. */
6030 tree
6031 constant_boolean_node (bool value, tree type)
6033 if (type == integer_type_node)
6034 return value ? integer_one_node : integer_zero_node;
6035 else if (type == boolean_type_node)
6036 return value ? boolean_true_node : boolean_false_node;
6037 else if (TREE_CODE (type) == VECTOR_TYPE)
6038 return build_vector_from_val (type,
6039 build_int_cst (TREE_TYPE (type),
6040 value ? -1 : 0));
6041 else
6042 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6046 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6047 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6048 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6049 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6050 COND is the first argument to CODE; otherwise (as in the example
6051 given here), it is the second argument. TYPE is the type of the
6052 original expression. Return NULL_TREE if no simplification is
6053 possible. */
6055 static tree
6056 fold_binary_op_with_conditional_arg (location_t loc,
6057 enum tree_code code,
6058 tree type, tree op0, tree op1,
6059 tree cond, tree arg, int cond_first_p)
6061 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6062 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6063 tree test, true_value, false_value;
6064 tree lhs = NULL_TREE;
6065 tree rhs = NULL_TREE;
6066 enum tree_code cond_code = COND_EXPR;
6068 if (TREE_CODE (cond) == COND_EXPR
6069 || TREE_CODE (cond) == VEC_COND_EXPR)
6071 test = TREE_OPERAND (cond, 0);
6072 true_value = TREE_OPERAND (cond, 1);
6073 false_value = TREE_OPERAND (cond, 2);
6074 /* If this operand throws an expression, then it does not make
6075 sense to try to perform a logical or arithmetic operation
6076 involving it. */
6077 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6078 lhs = true_value;
6079 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6080 rhs = false_value;
6082 else
6084 tree testtype = TREE_TYPE (cond);
6085 test = cond;
6086 true_value = constant_boolean_node (true, testtype);
6087 false_value = constant_boolean_node (false, testtype);
6090 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6091 cond_code = VEC_COND_EXPR;
6093 /* This transformation is only worthwhile if we don't have to wrap ARG
6094 in a SAVE_EXPR and the operation can be simplified without recursing
6095 on at least one of the branches once its pushed inside the COND_EXPR. */
6096 if (!TREE_CONSTANT (arg)
6097 && (TREE_SIDE_EFFECTS (arg)
6098 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6099 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6100 return NULL_TREE;
6102 arg = fold_convert_loc (loc, arg_type, arg);
6103 if (lhs == 0)
6105 true_value = fold_convert_loc (loc, cond_type, true_value);
6106 if (cond_first_p)
6107 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6108 else
6109 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6111 if (rhs == 0)
6113 false_value = fold_convert_loc (loc, cond_type, false_value);
6114 if (cond_first_p)
6115 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6116 else
6117 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6120 /* Check that we have simplified at least one of the branches. */
6121 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6122 return NULL_TREE;
6124 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6128 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6130 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6131 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6132 ADDEND is the same as X.
6134 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6135 and finite. The problematic cases are when X is zero, and its mode
6136 has signed zeros. In the case of rounding towards -infinity,
6137 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6138 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6140 bool
6141 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6143 if (!real_zerop (addend))
6144 return false;
6146 /* Don't allow the fold with -fsignaling-nans. */
6147 if (HONOR_SNANS (TYPE_MODE (type)))
6148 return false;
6150 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6151 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6152 return true;
6154 /* In a vector or complex, we would need to check the sign of all zeros. */
6155 if (TREE_CODE (addend) != REAL_CST)
6156 return false;
6158 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6159 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6160 negate = !negate;
6162 /* The mode has signed zeros, and we have to honor their sign.
6163 In this situation, there is only one case we can return true for.
6164 X - 0 is the same as X unless rounding towards -infinity is
6165 supported. */
6166 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6169 /* Subroutine of fold() that checks comparisons of built-in math
6170 functions against real constants.
6172 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6173 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6174 is the type of the result and ARG0 and ARG1 are the operands of the
6175 comparison. ARG1 must be a TREE_REAL_CST.
6177 The function returns the constant folded tree if a simplification
6178 can be made, and NULL_TREE otherwise. */
6180 static tree
6181 fold_mathfn_compare (location_t loc,
6182 enum built_in_function fcode, enum tree_code code,
6183 tree type, tree arg0, tree arg1)
6185 REAL_VALUE_TYPE c;
6187 if (BUILTIN_SQRT_P (fcode))
6189 tree arg = CALL_EXPR_ARG (arg0, 0);
6190 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6192 c = TREE_REAL_CST (arg1);
6193 if (REAL_VALUE_NEGATIVE (c))
6195 /* sqrt(x) < y is always false, if y is negative. */
6196 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6197 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6199 /* sqrt(x) > y is always true, if y is negative and we
6200 don't care about NaNs, i.e. negative values of x. */
6201 if (code == NE_EXPR || !HONOR_NANS (mode))
6202 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6204 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6205 return fold_build2_loc (loc, GE_EXPR, type, arg,
6206 build_real (TREE_TYPE (arg), dconst0));
6208 else if (code == GT_EXPR || code == GE_EXPR)
6210 REAL_VALUE_TYPE c2;
6212 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6213 real_convert (&c2, mode, &c2);
6215 if (REAL_VALUE_ISINF (c2))
6217 /* sqrt(x) > y is x == +Inf, when y is very large. */
6218 if (HONOR_INFINITIES (mode))
6219 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6220 build_real (TREE_TYPE (arg), c2));
6222 /* sqrt(x) > y is always false, when y is very large
6223 and we don't care about infinities. */
6224 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6227 /* sqrt(x) > c is the same as x > c*c. */
6228 return fold_build2_loc (loc, code, type, arg,
6229 build_real (TREE_TYPE (arg), c2));
6231 else if (code == LT_EXPR || code == LE_EXPR)
6233 REAL_VALUE_TYPE c2;
6235 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6236 real_convert (&c2, mode, &c2);
6238 if (REAL_VALUE_ISINF (c2))
6240 /* sqrt(x) < y is always true, when y is a very large
6241 value and we don't care about NaNs or Infinities. */
6242 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6243 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6245 /* sqrt(x) < y is x != +Inf when y is very large and we
6246 don't care about NaNs. */
6247 if (! HONOR_NANS (mode))
6248 return fold_build2_loc (loc, NE_EXPR, type, arg,
6249 build_real (TREE_TYPE (arg), c2));
6251 /* sqrt(x) < y is x >= 0 when y is very large and we
6252 don't care about Infinities. */
6253 if (! HONOR_INFINITIES (mode))
6254 return fold_build2_loc (loc, GE_EXPR, type, arg,
6255 build_real (TREE_TYPE (arg), dconst0));
6257 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6258 arg = save_expr (arg);
6259 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6260 fold_build2_loc (loc, GE_EXPR, type, arg,
6261 build_real (TREE_TYPE (arg),
6262 dconst0)),
6263 fold_build2_loc (loc, NE_EXPR, type, arg,
6264 build_real (TREE_TYPE (arg),
6265 c2)));
6268 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6269 if (! HONOR_NANS (mode))
6270 return fold_build2_loc (loc, code, type, arg,
6271 build_real (TREE_TYPE (arg), c2));
6273 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6274 arg = save_expr (arg);
6275 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6276 fold_build2_loc (loc, GE_EXPR, type, arg,
6277 build_real (TREE_TYPE (arg),
6278 dconst0)),
6279 fold_build2_loc (loc, code, type, arg,
6280 build_real (TREE_TYPE (arg),
6281 c2)));
6285 return NULL_TREE;
6288 /* Subroutine of fold() that optimizes comparisons against Infinities,
6289 either +Inf or -Inf.
6291 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6292 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6293 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6295 The function returns the constant folded tree if a simplification
6296 can be made, and NULL_TREE otherwise. */
6298 static tree
6299 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6300 tree arg0, tree arg1)
6302 machine_mode mode;
6303 REAL_VALUE_TYPE max;
6304 tree temp;
6305 bool neg;
6307 mode = TYPE_MODE (TREE_TYPE (arg0));
6309 /* For negative infinity swap the sense of the comparison. */
6310 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6311 if (neg)
6312 code = swap_tree_comparison (code);
6314 switch (code)
6316 case GT_EXPR:
6317 /* x > +Inf is always false, if with ignore sNANs. */
6318 if (HONOR_SNANS (mode))
6319 return NULL_TREE;
6320 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6322 case LE_EXPR:
6323 /* x <= +Inf is always true, if we don't case about NaNs. */
6324 if (! HONOR_NANS (mode))
6325 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6327 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6328 arg0 = save_expr (arg0);
6329 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6331 case EQ_EXPR:
6332 case GE_EXPR:
6333 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6334 real_maxval (&max, neg, mode);
6335 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6336 arg0, build_real (TREE_TYPE (arg0), max));
6338 case LT_EXPR:
6339 /* x < +Inf is always equal to x <= DBL_MAX. */
6340 real_maxval (&max, neg, mode);
6341 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6342 arg0, build_real (TREE_TYPE (arg0), max));
6344 case NE_EXPR:
6345 /* x != +Inf is always equal to !(x > DBL_MAX). */
6346 real_maxval (&max, neg, mode);
6347 if (! HONOR_NANS (mode))
6348 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6349 arg0, build_real (TREE_TYPE (arg0), max));
6351 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6352 arg0, build_real (TREE_TYPE (arg0), max));
6353 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6355 default:
6356 break;
6359 return NULL_TREE;
6362 /* Subroutine of fold() that optimizes comparisons of a division by
6363 a nonzero integer constant against an integer constant, i.e.
6364 X/C1 op C2.
6366 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6367 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6368 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6370 The function returns the constant folded tree if a simplification
6371 can be made, and NULL_TREE otherwise. */
6373 static tree
6374 fold_div_compare (location_t loc,
6375 enum tree_code code, tree type, tree arg0, tree arg1)
6377 tree prod, tmp, hi, lo;
6378 tree arg00 = TREE_OPERAND (arg0, 0);
6379 tree arg01 = TREE_OPERAND (arg0, 1);
6380 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6381 bool neg_overflow = false;
6382 bool overflow;
6384 /* We have to do this the hard way to detect unsigned overflow.
6385 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6386 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6387 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6388 neg_overflow = false;
6390 if (sign == UNSIGNED)
6392 tmp = int_const_binop (MINUS_EXPR, arg01,
6393 build_int_cst (TREE_TYPE (arg01), 1));
6394 lo = prod;
6396 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6397 val = wi::add (prod, tmp, sign, &overflow);
6398 hi = force_fit_type (TREE_TYPE (arg00), val,
6399 -1, overflow | TREE_OVERFLOW (prod));
6401 else if (tree_int_cst_sgn (arg01) >= 0)
6403 tmp = int_const_binop (MINUS_EXPR, arg01,
6404 build_int_cst (TREE_TYPE (arg01), 1));
6405 switch (tree_int_cst_sgn (arg1))
6407 case -1:
6408 neg_overflow = true;
6409 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6410 hi = prod;
6411 break;
6413 case 0:
6414 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6415 hi = tmp;
6416 break;
6418 case 1:
6419 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6420 lo = prod;
6421 break;
6423 default:
6424 gcc_unreachable ();
6427 else
6429 /* A negative divisor reverses the relational operators. */
6430 code = swap_tree_comparison (code);
6432 tmp = int_const_binop (PLUS_EXPR, arg01,
6433 build_int_cst (TREE_TYPE (arg01), 1));
6434 switch (tree_int_cst_sgn (arg1))
6436 case -1:
6437 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6438 lo = prod;
6439 break;
6441 case 0:
6442 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6443 lo = tmp;
6444 break;
6446 case 1:
6447 neg_overflow = true;
6448 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6449 hi = prod;
6450 break;
6452 default:
6453 gcc_unreachable ();
6457 switch (code)
6459 case EQ_EXPR:
6460 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6461 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6462 if (TREE_OVERFLOW (hi))
6463 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6464 if (TREE_OVERFLOW (lo))
6465 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6466 return build_range_check (loc, type, arg00, 1, lo, hi);
6468 case NE_EXPR:
6469 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6470 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6471 if (TREE_OVERFLOW (hi))
6472 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6473 if (TREE_OVERFLOW (lo))
6474 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6475 return build_range_check (loc, type, arg00, 0, lo, hi);
6477 case LT_EXPR:
6478 if (TREE_OVERFLOW (lo))
6480 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6481 return omit_one_operand_loc (loc, type, tmp, arg00);
6483 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6485 case LE_EXPR:
6486 if (TREE_OVERFLOW (hi))
6488 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6489 return omit_one_operand_loc (loc, type, tmp, arg00);
6491 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6493 case GT_EXPR:
6494 if (TREE_OVERFLOW (hi))
6496 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6497 return omit_one_operand_loc (loc, type, tmp, arg00);
6499 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6501 case GE_EXPR:
6502 if (TREE_OVERFLOW (lo))
6504 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6505 return omit_one_operand_loc (loc, type, tmp, arg00);
6507 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6509 default:
6510 break;
6513 return NULL_TREE;
6517 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6518 equality/inequality test, then return a simplified form of the test
6519 using a sign testing. Otherwise return NULL. TYPE is the desired
6520 result type. */
6522 static tree
6523 fold_single_bit_test_into_sign_test (location_t loc,
6524 enum tree_code code, tree arg0, tree arg1,
6525 tree result_type)
6527 /* If this is testing a single bit, we can optimize the test. */
6528 if ((code == NE_EXPR || code == EQ_EXPR)
6529 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6530 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6532 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6533 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6534 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6536 if (arg00 != NULL_TREE
6537 /* This is only a win if casting to a signed type is cheap,
6538 i.e. when arg00's type is not a partial mode. */
6539 && TYPE_PRECISION (TREE_TYPE (arg00))
6540 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6542 tree stype = signed_type_for (TREE_TYPE (arg00));
6543 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6544 result_type,
6545 fold_convert_loc (loc, stype, arg00),
6546 build_int_cst (stype, 0));
6550 return NULL_TREE;
6553 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6554 equality/inequality test, then return a simplified form of
6555 the test using shifts and logical operations. Otherwise return
6556 NULL. TYPE is the desired result type. */
6558 tree
6559 fold_single_bit_test (location_t loc, enum tree_code code,
6560 tree arg0, tree arg1, tree result_type)
6562 /* If this is testing a single bit, we can optimize the test. */
6563 if ((code == NE_EXPR || code == EQ_EXPR)
6564 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6565 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6567 tree inner = TREE_OPERAND (arg0, 0);
6568 tree type = TREE_TYPE (arg0);
6569 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6570 machine_mode operand_mode = TYPE_MODE (type);
6571 int ops_unsigned;
6572 tree signed_type, unsigned_type, intermediate_type;
6573 tree tem, one;
6575 /* First, see if we can fold the single bit test into a sign-bit
6576 test. */
6577 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6578 result_type);
6579 if (tem)
6580 return tem;
6582 /* Otherwise we have (A & C) != 0 where C is a single bit,
6583 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6584 Similarly for (A & C) == 0. */
6586 /* If INNER is a right shift of a constant and it plus BITNUM does
6587 not overflow, adjust BITNUM and INNER. */
6588 if (TREE_CODE (inner) == RSHIFT_EXPR
6589 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6590 && bitnum < TYPE_PRECISION (type)
6591 && wi::ltu_p (TREE_OPERAND (inner, 1),
6592 TYPE_PRECISION (type) - bitnum))
6594 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6595 inner = TREE_OPERAND (inner, 0);
6598 /* If we are going to be able to omit the AND below, we must do our
6599 operations as unsigned. If we must use the AND, we have a choice.
6600 Normally unsigned is faster, but for some machines signed is. */
6601 #ifdef LOAD_EXTEND_OP
6602 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6603 && !flag_syntax_only) ? 0 : 1;
6604 #else
6605 ops_unsigned = 1;
6606 #endif
6608 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6609 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6610 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6611 inner = fold_convert_loc (loc, intermediate_type, inner);
6613 if (bitnum != 0)
6614 inner = build2 (RSHIFT_EXPR, intermediate_type,
6615 inner, size_int (bitnum));
6617 one = build_int_cst (intermediate_type, 1);
6619 if (code == EQ_EXPR)
6620 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6622 /* Put the AND last so it can combine with more things. */
6623 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6625 /* Make sure to return the proper type. */
6626 inner = fold_convert_loc (loc, result_type, inner);
6628 return inner;
6630 return NULL_TREE;
6633 /* Check whether we are allowed to reorder operands arg0 and arg1,
6634 such that the evaluation of arg1 occurs before arg0. */
6636 static bool
6637 reorder_operands_p (const_tree arg0, const_tree arg1)
6639 if (! flag_evaluation_order)
6640 return true;
6641 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6642 return true;
6643 return ! TREE_SIDE_EFFECTS (arg0)
6644 && ! TREE_SIDE_EFFECTS (arg1);
6647 /* Test whether it is preferable two swap two operands, ARG0 and
6648 ARG1, for example because ARG0 is an integer constant and ARG1
6649 isn't. If REORDER is true, only recommend swapping if we can
6650 evaluate the operands in reverse order. */
6652 bool
6653 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6655 if (CONSTANT_CLASS_P (arg1))
6656 return 0;
6657 if (CONSTANT_CLASS_P (arg0))
6658 return 1;
6660 STRIP_SIGN_NOPS (arg0);
6661 STRIP_SIGN_NOPS (arg1);
6663 if (TREE_CONSTANT (arg1))
6664 return 0;
6665 if (TREE_CONSTANT (arg0))
6666 return 1;
6668 if (reorder && flag_evaluation_order
6669 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6670 return 0;
6672 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6673 for commutative and comparison operators. Ensuring a canonical
6674 form allows the optimizers to find additional redundancies without
6675 having to explicitly check for both orderings. */
6676 if (TREE_CODE (arg0) == SSA_NAME
6677 && TREE_CODE (arg1) == SSA_NAME
6678 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6679 return 1;
6681 /* Put SSA_NAMEs last. */
6682 if (TREE_CODE (arg1) == SSA_NAME)
6683 return 0;
6684 if (TREE_CODE (arg0) == SSA_NAME)
6685 return 1;
6687 /* Put variables last. */
6688 if (DECL_P (arg1))
6689 return 0;
6690 if (DECL_P (arg0))
6691 return 1;
6693 return 0;
6696 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6697 ARG0 is extended to a wider type. */
6699 static tree
6700 fold_widened_comparison (location_t loc, enum tree_code code,
6701 tree type, tree arg0, tree arg1)
6703 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6704 tree arg1_unw;
6705 tree shorter_type, outer_type;
6706 tree min, max;
6707 bool above, below;
6709 if (arg0_unw == arg0)
6710 return NULL_TREE;
6711 shorter_type = TREE_TYPE (arg0_unw);
6713 #ifdef HAVE_canonicalize_funcptr_for_compare
6714 /* Disable this optimization if we're casting a function pointer
6715 type on targets that require function pointer canonicalization. */
6716 if (HAVE_canonicalize_funcptr_for_compare
6717 && TREE_CODE (shorter_type) == POINTER_TYPE
6718 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6719 return NULL_TREE;
6720 #endif
6722 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6723 return NULL_TREE;
6725 arg1_unw = get_unwidened (arg1, NULL_TREE);
6727 /* If possible, express the comparison in the shorter mode. */
6728 if ((code == EQ_EXPR || code == NE_EXPR
6729 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6730 && (TREE_TYPE (arg1_unw) == shorter_type
6731 || ((TYPE_PRECISION (shorter_type)
6732 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6733 && (TYPE_UNSIGNED (shorter_type)
6734 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6735 || (TREE_CODE (arg1_unw) == INTEGER_CST
6736 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6737 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6738 && int_fits_type_p (arg1_unw, shorter_type))))
6739 return fold_build2_loc (loc, code, type, arg0_unw,
6740 fold_convert_loc (loc, shorter_type, arg1_unw));
6742 if (TREE_CODE (arg1_unw) != INTEGER_CST
6743 || TREE_CODE (shorter_type) != INTEGER_TYPE
6744 || !int_fits_type_p (arg1_unw, shorter_type))
6745 return NULL_TREE;
6747 /* If we are comparing with the integer that does not fit into the range
6748 of the shorter type, the result is known. */
6749 outer_type = TREE_TYPE (arg1_unw);
6750 min = lower_bound_in_type (outer_type, shorter_type);
6751 max = upper_bound_in_type (outer_type, shorter_type);
6753 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6754 max, arg1_unw));
6755 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6756 arg1_unw, min));
6758 switch (code)
6760 case EQ_EXPR:
6761 if (above || below)
6762 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6763 break;
6765 case NE_EXPR:
6766 if (above || below)
6767 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6768 break;
6770 case LT_EXPR:
6771 case LE_EXPR:
6772 if (above)
6773 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6774 else if (below)
6775 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6777 case GT_EXPR:
6778 case GE_EXPR:
6779 if (above)
6780 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6781 else if (below)
6782 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6784 default:
6785 break;
6788 return NULL_TREE;
6791 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6792 ARG0 just the signedness is changed. */
6794 static tree
6795 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6796 tree arg0, tree arg1)
6798 tree arg0_inner;
6799 tree inner_type, outer_type;
6801 if (!CONVERT_EXPR_P (arg0))
6802 return NULL_TREE;
6804 outer_type = TREE_TYPE (arg0);
6805 arg0_inner = TREE_OPERAND (arg0, 0);
6806 inner_type = TREE_TYPE (arg0_inner);
6808 #ifdef HAVE_canonicalize_funcptr_for_compare
6809 /* Disable this optimization if we're casting a function pointer
6810 type on targets that require function pointer canonicalization. */
6811 if (HAVE_canonicalize_funcptr_for_compare
6812 && TREE_CODE (inner_type) == POINTER_TYPE
6813 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6814 return NULL_TREE;
6815 #endif
6817 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6818 return NULL_TREE;
6820 if (TREE_CODE (arg1) != INTEGER_CST
6821 && !(CONVERT_EXPR_P (arg1)
6822 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6823 return NULL_TREE;
6825 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6826 && code != NE_EXPR
6827 && code != EQ_EXPR)
6828 return NULL_TREE;
6830 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6831 return NULL_TREE;
6833 if (TREE_CODE (arg1) == INTEGER_CST)
6834 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6835 TREE_OVERFLOW (arg1));
6836 else
6837 arg1 = fold_convert_loc (loc, inner_type, arg1);
6839 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6843 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6844 means A >= Y && A != MAX, but in this case we know that
6845 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6847 static tree
6848 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6850 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6852 if (TREE_CODE (bound) == LT_EXPR)
6853 a = TREE_OPERAND (bound, 0);
6854 else if (TREE_CODE (bound) == GT_EXPR)
6855 a = TREE_OPERAND (bound, 1);
6856 else
6857 return NULL_TREE;
6859 typea = TREE_TYPE (a);
6860 if (!INTEGRAL_TYPE_P (typea)
6861 && !POINTER_TYPE_P (typea))
6862 return NULL_TREE;
6864 if (TREE_CODE (ineq) == LT_EXPR)
6866 a1 = TREE_OPERAND (ineq, 1);
6867 y = TREE_OPERAND (ineq, 0);
6869 else if (TREE_CODE (ineq) == GT_EXPR)
6871 a1 = TREE_OPERAND (ineq, 0);
6872 y = TREE_OPERAND (ineq, 1);
6874 else
6875 return NULL_TREE;
6877 if (TREE_TYPE (a1) != typea)
6878 return NULL_TREE;
6880 if (POINTER_TYPE_P (typea))
6882 /* Convert the pointer types into integer before taking the difference. */
6883 tree ta = fold_convert_loc (loc, ssizetype, a);
6884 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6885 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6887 else
6888 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6890 if (!diff || !integer_onep (diff))
6891 return NULL_TREE;
6893 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6896 /* Fold a sum or difference of at least one multiplication.
6897 Returns the folded tree or NULL if no simplification could be made. */
6899 static tree
6900 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6901 tree arg0, tree arg1)
6903 tree arg00, arg01, arg10, arg11;
6904 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6906 /* (A * C) +- (B * C) -> (A+-B) * C.
6907 (A * C) +- A -> A * (C+-1).
6908 We are most concerned about the case where C is a constant,
6909 but other combinations show up during loop reduction. Since
6910 it is not difficult, try all four possibilities. */
6912 if (TREE_CODE (arg0) == MULT_EXPR)
6914 arg00 = TREE_OPERAND (arg0, 0);
6915 arg01 = TREE_OPERAND (arg0, 1);
6917 else if (TREE_CODE (arg0) == INTEGER_CST)
6919 arg00 = build_one_cst (type);
6920 arg01 = arg0;
6922 else
6924 /* We cannot generate constant 1 for fract. */
6925 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6926 return NULL_TREE;
6927 arg00 = arg0;
6928 arg01 = build_one_cst (type);
6930 if (TREE_CODE (arg1) == MULT_EXPR)
6932 arg10 = TREE_OPERAND (arg1, 0);
6933 arg11 = TREE_OPERAND (arg1, 1);
6935 else if (TREE_CODE (arg1) == INTEGER_CST)
6937 arg10 = build_one_cst (type);
6938 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6939 the purpose of this canonicalization. */
6940 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6941 && negate_expr_p (arg1)
6942 && code == PLUS_EXPR)
6944 arg11 = negate_expr (arg1);
6945 code = MINUS_EXPR;
6947 else
6948 arg11 = arg1;
6950 else
6952 /* We cannot generate constant 1 for fract. */
6953 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6954 return NULL_TREE;
6955 arg10 = arg1;
6956 arg11 = build_one_cst (type);
6958 same = NULL_TREE;
6960 if (operand_equal_p (arg01, arg11, 0))
6961 same = arg01, alt0 = arg00, alt1 = arg10;
6962 else if (operand_equal_p (arg00, arg10, 0))
6963 same = arg00, alt0 = arg01, alt1 = arg11;
6964 else if (operand_equal_p (arg00, arg11, 0))
6965 same = arg00, alt0 = arg01, alt1 = arg10;
6966 else if (operand_equal_p (arg01, arg10, 0))
6967 same = arg01, alt0 = arg00, alt1 = arg11;
6969 /* No identical multiplicands; see if we can find a common
6970 power-of-two factor in non-power-of-two multiplies. This
6971 can help in multi-dimensional array access. */
6972 else if (tree_fits_shwi_p (arg01)
6973 && tree_fits_shwi_p (arg11))
6975 HOST_WIDE_INT int01, int11, tmp;
6976 bool swap = false;
6977 tree maybe_same;
6978 int01 = tree_to_shwi (arg01);
6979 int11 = tree_to_shwi (arg11);
6981 /* Move min of absolute values to int11. */
6982 if (absu_hwi (int01) < absu_hwi (int11))
6984 tmp = int01, int01 = int11, int11 = tmp;
6985 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6986 maybe_same = arg01;
6987 swap = true;
6989 else
6990 maybe_same = arg11;
6992 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6993 /* The remainder should not be a constant, otherwise we
6994 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6995 increased the number of multiplications necessary. */
6996 && TREE_CODE (arg10) != INTEGER_CST)
6998 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6999 build_int_cst (TREE_TYPE (arg00),
7000 int01 / int11));
7001 alt1 = arg10;
7002 same = maybe_same;
7003 if (swap)
7004 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7008 if (same)
7009 return fold_build2_loc (loc, MULT_EXPR, type,
7010 fold_build2_loc (loc, code, type,
7011 fold_convert_loc (loc, type, alt0),
7012 fold_convert_loc (loc, type, alt1)),
7013 fold_convert_loc (loc, type, same));
7015 return NULL_TREE;
7018 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7019 specified by EXPR into the buffer PTR of length LEN bytes.
7020 Return the number of bytes placed in the buffer, or zero
7021 upon failure. */
7023 static int
7024 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7026 tree type = TREE_TYPE (expr);
7027 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7028 int byte, offset, word, words;
7029 unsigned char value;
7031 if ((off == -1 && total_bytes > len)
7032 || off >= total_bytes)
7033 return 0;
7034 if (off == -1)
7035 off = 0;
7036 words = total_bytes / UNITS_PER_WORD;
7038 for (byte = 0; byte < total_bytes; byte++)
7040 int bitpos = byte * BITS_PER_UNIT;
7041 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7042 number of bytes. */
7043 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7045 if (total_bytes > UNITS_PER_WORD)
7047 word = byte / UNITS_PER_WORD;
7048 if (WORDS_BIG_ENDIAN)
7049 word = (words - 1) - word;
7050 offset = word * UNITS_PER_WORD;
7051 if (BYTES_BIG_ENDIAN)
7052 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7053 else
7054 offset += byte % UNITS_PER_WORD;
7056 else
7057 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7058 if (offset >= off
7059 && offset - off < len)
7060 ptr[offset - off] = value;
7062 return MIN (len, total_bytes - off);
7066 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7067 specified by EXPR into the buffer PTR of length LEN bytes.
7068 Return the number of bytes placed in the buffer, or zero
7069 upon failure. */
7071 static int
7072 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7074 tree type = TREE_TYPE (expr);
7075 machine_mode mode = TYPE_MODE (type);
7076 int total_bytes = GET_MODE_SIZE (mode);
7077 FIXED_VALUE_TYPE value;
7078 tree i_value, i_type;
7080 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7081 return 0;
7083 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7085 if (NULL_TREE == i_type
7086 || TYPE_PRECISION (i_type) != total_bytes)
7087 return 0;
7089 value = TREE_FIXED_CST (expr);
7090 i_value = double_int_to_tree (i_type, value.data);
7092 return native_encode_int (i_value, ptr, len, off);
7096 /* Subroutine of native_encode_expr. Encode the REAL_CST
7097 specified by EXPR into the buffer PTR of length LEN bytes.
7098 Return the number of bytes placed in the buffer, or zero
7099 upon failure. */
7101 static int
7102 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7104 tree type = TREE_TYPE (expr);
7105 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7106 int byte, offset, word, words, bitpos;
7107 unsigned char value;
7109 /* There are always 32 bits in each long, no matter the size of
7110 the hosts long. We handle floating point representations with
7111 up to 192 bits. */
7112 long tmp[6];
7114 if ((off == -1 && total_bytes > len)
7115 || off >= total_bytes)
7116 return 0;
7117 if (off == -1)
7118 off = 0;
7119 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7121 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7123 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7124 bitpos += BITS_PER_UNIT)
7126 byte = (bitpos / BITS_PER_UNIT) & 3;
7127 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7129 if (UNITS_PER_WORD < 4)
7131 word = byte / UNITS_PER_WORD;
7132 if (WORDS_BIG_ENDIAN)
7133 word = (words - 1) - word;
7134 offset = word * UNITS_PER_WORD;
7135 if (BYTES_BIG_ENDIAN)
7136 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7137 else
7138 offset += byte % UNITS_PER_WORD;
7140 else
7141 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7142 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7143 if (offset >= off
7144 && offset - off < len)
7145 ptr[offset - off] = value;
7147 return MIN (len, total_bytes - off);
7150 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7151 specified by EXPR into the buffer PTR of length LEN bytes.
7152 Return the number of bytes placed in the buffer, or zero
7153 upon failure. */
7155 static int
7156 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7158 int rsize, isize;
7159 tree part;
7161 part = TREE_REALPART (expr);
7162 rsize = native_encode_expr (part, ptr, len, off);
7163 if (off == -1
7164 && rsize == 0)
7165 return 0;
7166 part = TREE_IMAGPART (expr);
7167 if (off != -1)
7168 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7169 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7170 if (off == -1
7171 && isize != rsize)
7172 return 0;
7173 return rsize + isize;
7177 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7178 specified by EXPR into the buffer PTR of length LEN bytes.
7179 Return the number of bytes placed in the buffer, or zero
7180 upon failure. */
7182 static int
7183 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7185 unsigned i, count;
7186 int size, offset;
7187 tree itype, elem;
7189 offset = 0;
7190 count = VECTOR_CST_NELTS (expr);
7191 itype = TREE_TYPE (TREE_TYPE (expr));
7192 size = GET_MODE_SIZE (TYPE_MODE (itype));
7193 for (i = 0; i < count; i++)
7195 if (off >= size)
7197 off -= size;
7198 continue;
7200 elem = VECTOR_CST_ELT (expr, i);
7201 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7202 if ((off == -1 && res != size)
7203 || res == 0)
7204 return 0;
7205 offset += res;
7206 if (offset >= len)
7207 return offset;
7208 if (off != -1)
7209 off = 0;
7211 return offset;
7215 /* Subroutine of native_encode_expr. Encode the STRING_CST
7216 specified by EXPR into the buffer PTR of length LEN bytes.
7217 Return the number of bytes placed in the buffer, or zero
7218 upon failure. */
7220 static int
7221 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7223 tree type = TREE_TYPE (expr);
7224 HOST_WIDE_INT total_bytes;
7226 if (TREE_CODE (type) != ARRAY_TYPE
7227 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7228 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7229 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7230 return 0;
7231 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7232 if ((off == -1 && total_bytes > len)
7233 || off >= total_bytes)
7234 return 0;
7235 if (off == -1)
7236 off = 0;
7237 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7239 int written = 0;
7240 if (off < TREE_STRING_LENGTH (expr))
7242 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7243 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7245 memset (ptr + written, 0,
7246 MIN (total_bytes - written, len - written));
7248 else
7249 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7250 return MIN (total_bytes - off, len);
7254 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7255 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7256 buffer PTR of length LEN bytes. If OFF is not -1 then start
7257 the encoding at byte offset OFF and encode at most LEN bytes.
7258 Return the number of bytes placed in the buffer, or zero upon failure. */
7261 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7263 switch (TREE_CODE (expr))
7265 case INTEGER_CST:
7266 return native_encode_int (expr, ptr, len, off);
7268 case REAL_CST:
7269 return native_encode_real (expr, ptr, len, off);
7271 case FIXED_CST:
7272 return native_encode_fixed (expr, ptr, len, off);
7274 case COMPLEX_CST:
7275 return native_encode_complex (expr, ptr, len, off);
7277 case VECTOR_CST:
7278 return native_encode_vector (expr, ptr, len, off);
7280 case STRING_CST:
7281 return native_encode_string (expr, ptr, len, off);
7283 default:
7284 return 0;
7289 /* Subroutine of native_interpret_expr. Interpret the contents of
7290 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7291 If the buffer cannot be interpreted, return NULL_TREE. */
7293 static tree
7294 native_interpret_int (tree type, const unsigned char *ptr, int len)
7296 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7298 if (total_bytes > len
7299 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7300 return NULL_TREE;
7302 wide_int result = wi::from_buffer (ptr, total_bytes);
7304 return wide_int_to_tree (type, result);
7308 /* Subroutine of native_interpret_expr. Interpret the contents of
7309 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7310 If the buffer cannot be interpreted, return NULL_TREE. */
7312 static tree
7313 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7315 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7316 double_int result;
7317 FIXED_VALUE_TYPE fixed_value;
7319 if (total_bytes > len
7320 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7321 return NULL_TREE;
7323 result = double_int::from_buffer (ptr, total_bytes);
7324 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7326 return build_fixed (type, fixed_value);
7330 /* Subroutine of native_interpret_expr. Interpret the contents of
7331 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7332 If the buffer cannot be interpreted, return NULL_TREE. */
7334 static tree
7335 native_interpret_real (tree type, const unsigned char *ptr, int len)
7337 machine_mode mode = TYPE_MODE (type);
7338 int total_bytes = GET_MODE_SIZE (mode);
7339 int byte, offset, word, words, bitpos;
7340 unsigned char value;
7341 /* There are always 32 bits in each long, no matter the size of
7342 the hosts long. We handle floating point representations with
7343 up to 192 bits. */
7344 REAL_VALUE_TYPE r;
7345 long tmp[6];
7347 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7348 if (total_bytes > len || total_bytes > 24)
7349 return NULL_TREE;
7350 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7352 memset (tmp, 0, sizeof (tmp));
7353 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7354 bitpos += BITS_PER_UNIT)
7356 byte = (bitpos / BITS_PER_UNIT) & 3;
7357 if (UNITS_PER_WORD < 4)
7359 word = byte / UNITS_PER_WORD;
7360 if (WORDS_BIG_ENDIAN)
7361 word = (words - 1) - word;
7362 offset = word * UNITS_PER_WORD;
7363 if (BYTES_BIG_ENDIAN)
7364 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7365 else
7366 offset += byte % UNITS_PER_WORD;
7368 else
7369 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7370 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7372 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7375 real_from_target (&r, tmp, mode);
7376 return build_real (type, r);
7380 /* Subroutine of native_interpret_expr. Interpret the contents of
7381 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7382 If the buffer cannot be interpreted, return NULL_TREE. */
7384 static tree
7385 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7387 tree etype, rpart, ipart;
7388 int size;
7390 etype = TREE_TYPE (type);
7391 size = GET_MODE_SIZE (TYPE_MODE (etype));
7392 if (size * 2 > len)
7393 return NULL_TREE;
7394 rpart = native_interpret_expr (etype, ptr, size);
7395 if (!rpart)
7396 return NULL_TREE;
7397 ipart = native_interpret_expr (etype, ptr+size, size);
7398 if (!ipart)
7399 return NULL_TREE;
7400 return build_complex (type, rpart, ipart);
7404 /* Subroutine of native_interpret_expr. Interpret the contents of
7405 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7406 If the buffer cannot be interpreted, return NULL_TREE. */
7408 static tree
7409 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7411 tree etype, elem;
7412 int i, size, count;
7413 tree *elements;
7415 etype = TREE_TYPE (type);
7416 size = GET_MODE_SIZE (TYPE_MODE (etype));
7417 count = TYPE_VECTOR_SUBPARTS (type);
7418 if (size * count > len)
7419 return NULL_TREE;
7421 elements = XALLOCAVEC (tree, count);
7422 for (i = count - 1; i >= 0; i--)
7424 elem = native_interpret_expr (etype, ptr+(i*size), size);
7425 if (!elem)
7426 return NULL_TREE;
7427 elements[i] = elem;
7429 return build_vector (type, elements);
7433 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7434 the buffer PTR of length LEN as a constant of type TYPE. For
7435 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7436 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7437 return NULL_TREE. */
7439 tree
7440 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7442 switch (TREE_CODE (type))
7444 case INTEGER_TYPE:
7445 case ENUMERAL_TYPE:
7446 case BOOLEAN_TYPE:
7447 case POINTER_TYPE:
7448 case REFERENCE_TYPE:
7449 return native_interpret_int (type, ptr, len);
7451 case REAL_TYPE:
7452 return native_interpret_real (type, ptr, len);
7454 case FIXED_POINT_TYPE:
7455 return native_interpret_fixed (type, ptr, len);
7457 case COMPLEX_TYPE:
7458 return native_interpret_complex (type, ptr, len);
7460 case VECTOR_TYPE:
7461 return native_interpret_vector (type, ptr, len);
7463 default:
7464 return NULL_TREE;
7468 /* Returns true if we can interpret the contents of a native encoding
7469 as TYPE. */
7471 static bool
7472 can_native_interpret_type_p (tree type)
7474 switch (TREE_CODE (type))
7476 case INTEGER_TYPE:
7477 case ENUMERAL_TYPE:
7478 case BOOLEAN_TYPE:
7479 case POINTER_TYPE:
7480 case REFERENCE_TYPE:
7481 case FIXED_POINT_TYPE:
7482 case REAL_TYPE:
7483 case COMPLEX_TYPE:
7484 case VECTOR_TYPE:
7485 return true;
7486 default:
7487 return false;
7491 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7492 TYPE at compile-time. If we're unable to perform the conversion
7493 return NULL_TREE. */
7495 static tree
7496 fold_view_convert_expr (tree type, tree expr)
7498 /* We support up to 512-bit values (for V8DFmode). */
7499 unsigned char buffer[64];
7500 int len;
7502 /* Check that the host and target are sane. */
7503 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7504 return NULL_TREE;
7506 len = native_encode_expr (expr, buffer, sizeof (buffer));
7507 if (len == 0)
7508 return NULL_TREE;
7510 return native_interpret_expr (type, buffer, len);
7513 /* Build an expression for the address of T. Folds away INDIRECT_REF
7514 to avoid confusing the gimplify process. */
7516 tree
7517 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7519 /* The size of the object is not relevant when talking about its address. */
7520 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7521 t = TREE_OPERAND (t, 0);
7523 if (TREE_CODE (t) == INDIRECT_REF)
7525 t = TREE_OPERAND (t, 0);
7527 if (TREE_TYPE (t) != ptrtype)
7528 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7530 else if (TREE_CODE (t) == MEM_REF
7531 && integer_zerop (TREE_OPERAND (t, 1)))
7532 return TREE_OPERAND (t, 0);
7533 else if (TREE_CODE (t) == MEM_REF
7534 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7535 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7536 TREE_OPERAND (t, 0),
7537 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7538 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7540 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7542 if (TREE_TYPE (t) != ptrtype)
7543 t = fold_convert_loc (loc, ptrtype, t);
7545 else
7546 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7548 return t;
7551 /* Build an expression for the address of T. */
7553 tree
7554 build_fold_addr_expr_loc (location_t loc, tree t)
7556 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7558 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7561 static bool vec_cst_ctor_to_array (tree, tree *);
7563 /* Fold a unary expression of code CODE and type TYPE with operand
7564 OP0. Return the folded expression if folding is successful.
7565 Otherwise, return NULL_TREE. */
7567 tree
7568 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7570 tree tem;
7571 tree arg0;
7572 enum tree_code_class kind = TREE_CODE_CLASS (code);
7574 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7575 && TREE_CODE_LENGTH (code) == 1);
7577 tem = generic_simplify (loc, code, type, op0);
7578 if (tem)
7579 return tem;
7581 arg0 = op0;
7582 if (arg0)
7584 if (CONVERT_EXPR_CODE_P (code)
7585 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7587 /* Don't use STRIP_NOPS, because signedness of argument type
7588 matters. */
7589 STRIP_SIGN_NOPS (arg0);
7591 else
7593 /* Strip any conversions that don't change the mode. This
7594 is safe for every expression, except for a comparison
7595 expression because its signedness is derived from its
7596 operands.
7598 Note that this is done as an internal manipulation within
7599 the constant folder, in order to find the simplest
7600 representation of the arguments so that their form can be
7601 studied. In any cases, the appropriate type conversions
7602 should be put back in the tree that will get out of the
7603 constant folder. */
7604 STRIP_NOPS (arg0);
7608 if (TREE_CODE_CLASS (code) == tcc_unary)
7610 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7611 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7612 fold_build1_loc (loc, code, type,
7613 fold_convert_loc (loc, TREE_TYPE (op0),
7614 TREE_OPERAND (arg0, 1))));
7615 else if (TREE_CODE (arg0) == COND_EXPR)
7617 tree arg01 = TREE_OPERAND (arg0, 1);
7618 tree arg02 = TREE_OPERAND (arg0, 2);
7619 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7620 arg01 = fold_build1_loc (loc, code, type,
7621 fold_convert_loc (loc,
7622 TREE_TYPE (op0), arg01));
7623 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7624 arg02 = fold_build1_loc (loc, code, type,
7625 fold_convert_loc (loc,
7626 TREE_TYPE (op0), arg02));
7627 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7628 arg01, arg02);
7630 /* If this was a conversion, and all we did was to move into
7631 inside the COND_EXPR, bring it back out. But leave it if
7632 it is a conversion from integer to integer and the
7633 result precision is no wider than a word since such a
7634 conversion is cheap and may be optimized away by combine,
7635 while it couldn't if it were outside the COND_EXPR. Then return
7636 so we don't get into an infinite recursion loop taking the
7637 conversion out and then back in. */
7639 if ((CONVERT_EXPR_CODE_P (code)
7640 || code == NON_LVALUE_EXPR)
7641 && TREE_CODE (tem) == COND_EXPR
7642 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7643 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7644 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7645 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7646 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7647 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7648 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7649 && (INTEGRAL_TYPE_P
7650 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7651 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7652 || flag_syntax_only))
7653 tem = build1_loc (loc, code, type,
7654 build3 (COND_EXPR,
7655 TREE_TYPE (TREE_OPERAND
7656 (TREE_OPERAND (tem, 1), 0)),
7657 TREE_OPERAND (tem, 0),
7658 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7659 TREE_OPERAND (TREE_OPERAND (tem, 2),
7660 0)));
7661 return tem;
7665 switch (code)
7667 case NON_LVALUE_EXPR:
7668 if (!maybe_lvalue_p (op0))
7669 return fold_convert_loc (loc, type, op0);
7670 return NULL_TREE;
7672 CASE_CONVERT:
7673 case FLOAT_EXPR:
7674 case FIX_TRUNC_EXPR:
7675 if (COMPARISON_CLASS_P (op0))
7677 /* If we have (type) (a CMP b) and type is an integral type, return
7678 new expression involving the new type. Canonicalize
7679 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7680 non-integral type.
7681 Do not fold the result as that would not simplify further, also
7682 folding again results in recursions. */
7683 if (TREE_CODE (type) == BOOLEAN_TYPE)
7684 return build2_loc (loc, TREE_CODE (op0), type,
7685 TREE_OPERAND (op0, 0),
7686 TREE_OPERAND (op0, 1));
7687 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7688 && TREE_CODE (type) != VECTOR_TYPE)
7689 return build3_loc (loc, COND_EXPR, type, op0,
7690 constant_boolean_node (true, type),
7691 constant_boolean_node (false, type));
7694 /* Handle cases of two conversions in a row. */
7695 if (CONVERT_EXPR_P (op0))
7697 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7698 tree inter_type = TREE_TYPE (op0);
7699 int inside_int = INTEGRAL_TYPE_P (inside_type);
7700 int inside_ptr = POINTER_TYPE_P (inside_type);
7701 int inside_float = FLOAT_TYPE_P (inside_type);
7702 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7703 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7704 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7705 int inter_int = INTEGRAL_TYPE_P (inter_type);
7706 int inter_ptr = POINTER_TYPE_P (inter_type);
7707 int inter_float = FLOAT_TYPE_P (inter_type);
7708 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7709 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7710 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7711 int final_int = INTEGRAL_TYPE_P (type);
7712 int final_ptr = POINTER_TYPE_P (type);
7713 int final_float = FLOAT_TYPE_P (type);
7714 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7715 unsigned int final_prec = TYPE_PRECISION (type);
7716 int final_unsignedp = TYPE_UNSIGNED (type);
7718 /* In addition to the cases of two conversions in a row
7719 handled below, if we are converting something to its own
7720 type via an object of identical or wider precision, neither
7721 conversion is needed. */
7722 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7723 && (((inter_int || inter_ptr) && final_int)
7724 || (inter_float && final_float))
7725 && inter_prec >= final_prec)
7726 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7728 /* Likewise, if the intermediate and initial types are either both
7729 float or both integer, we don't need the middle conversion if the
7730 former is wider than the latter and doesn't change the signedness
7731 (for integers). Avoid this if the final type is a pointer since
7732 then we sometimes need the middle conversion. Likewise if the
7733 final type has a precision not equal to the size of its mode. */
7734 if (((inter_int && inside_int)
7735 || (inter_float && inside_float)
7736 || (inter_vec && inside_vec))
7737 && inter_prec >= inside_prec
7738 && (inter_float || inter_vec
7739 || inter_unsignedp == inside_unsignedp)
7740 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7741 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7742 && ! final_ptr
7743 && (! final_vec || inter_prec == inside_prec))
7744 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7746 /* If we have a sign-extension of a zero-extended value, we can
7747 replace that by a single zero-extension. Likewise if the
7748 final conversion does not change precision we can drop the
7749 intermediate conversion. */
7750 if (inside_int && inter_int && final_int
7751 && ((inside_prec < inter_prec && inter_prec < final_prec
7752 && inside_unsignedp && !inter_unsignedp)
7753 || final_prec == inter_prec))
7754 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7756 /* Two conversions in a row are not needed unless:
7757 - some conversion is floating-point (overstrict for now), or
7758 - some conversion is a vector (overstrict for now), or
7759 - the intermediate type is narrower than both initial and
7760 final, or
7761 - the intermediate type and innermost type differ in signedness,
7762 and the outermost type is wider than the intermediate, or
7763 - the initial type is a pointer type and the precisions of the
7764 intermediate and final types differ, or
7765 - the final type is a pointer type and the precisions of the
7766 initial and intermediate types differ. */
7767 if (! inside_float && ! inter_float && ! final_float
7768 && ! inside_vec && ! inter_vec && ! final_vec
7769 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7770 && ! (inside_int && inter_int
7771 && inter_unsignedp != inside_unsignedp
7772 && inter_prec < final_prec)
7773 && ((inter_unsignedp && inter_prec > inside_prec)
7774 == (final_unsignedp && final_prec > inter_prec))
7775 && ! (inside_ptr && inter_prec != final_prec)
7776 && ! (final_ptr && inside_prec != inter_prec)
7777 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7778 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7779 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7782 /* Handle (T *)&A.B.C for A being of type T and B and C
7783 living at offset zero. This occurs frequently in
7784 C++ upcasting and then accessing the base. */
7785 if (TREE_CODE (op0) == ADDR_EXPR
7786 && POINTER_TYPE_P (type)
7787 && handled_component_p (TREE_OPERAND (op0, 0)))
7789 HOST_WIDE_INT bitsize, bitpos;
7790 tree offset;
7791 machine_mode mode;
7792 int unsignedp, volatilep;
7793 tree base = TREE_OPERAND (op0, 0);
7794 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7795 &mode, &unsignedp, &volatilep, false);
7796 /* If the reference was to a (constant) zero offset, we can use
7797 the address of the base if it has the same base type
7798 as the result type and the pointer type is unqualified. */
7799 if (! offset && bitpos == 0
7800 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7801 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7802 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7803 return fold_convert_loc (loc, type,
7804 build_fold_addr_expr_loc (loc, base));
7807 if (TREE_CODE (op0) == MODIFY_EXPR
7808 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7809 /* Detect assigning a bitfield. */
7810 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7811 && DECL_BIT_FIELD
7812 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7814 /* Don't leave an assignment inside a conversion
7815 unless assigning a bitfield. */
7816 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7817 /* First do the assignment, then return converted constant. */
7818 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7819 TREE_NO_WARNING (tem) = 1;
7820 TREE_USED (tem) = 1;
7821 return tem;
7824 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7825 constants (if x has signed type, the sign bit cannot be set
7826 in c). This folds extension into the BIT_AND_EXPR.
7827 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7828 very likely don't have maximal range for their precision and this
7829 transformation effectively doesn't preserve non-maximal ranges. */
7830 if (TREE_CODE (type) == INTEGER_TYPE
7831 && TREE_CODE (op0) == BIT_AND_EXPR
7832 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7834 tree and_expr = op0;
7835 tree and0 = TREE_OPERAND (and_expr, 0);
7836 tree and1 = TREE_OPERAND (and_expr, 1);
7837 int change = 0;
7839 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7840 || (TYPE_PRECISION (type)
7841 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7842 change = 1;
7843 else if (TYPE_PRECISION (TREE_TYPE (and1))
7844 <= HOST_BITS_PER_WIDE_INT
7845 && tree_fits_uhwi_p (and1))
7847 unsigned HOST_WIDE_INT cst;
7849 cst = tree_to_uhwi (and1);
7850 cst &= HOST_WIDE_INT_M1U
7851 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7852 change = (cst == 0);
7853 #ifdef LOAD_EXTEND_OP
7854 if (change
7855 && !flag_syntax_only
7856 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7857 == ZERO_EXTEND))
7859 tree uns = unsigned_type_for (TREE_TYPE (and0));
7860 and0 = fold_convert_loc (loc, uns, and0);
7861 and1 = fold_convert_loc (loc, uns, and1);
7863 #endif
7865 if (change)
7867 tem = force_fit_type (type, wi::to_widest (and1), 0,
7868 TREE_OVERFLOW (and1));
7869 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7870 fold_convert_loc (loc, type, and0), tem);
7874 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7875 when one of the new casts will fold away. Conservatively we assume
7876 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7877 if (POINTER_TYPE_P (type)
7878 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7879 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7880 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7881 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7882 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7884 tree arg00 = TREE_OPERAND (arg0, 0);
7885 tree arg01 = TREE_OPERAND (arg0, 1);
7887 return fold_build_pointer_plus_loc
7888 (loc, fold_convert_loc (loc, type, arg00), arg01);
7891 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7892 of the same precision, and X is an integer type not narrower than
7893 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7894 if (INTEGRAL_TYPE_P (type)
7895 && TREE_CODE (op0) == BIT_NOT_EXPR
7896 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7897 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7898 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7900 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7901 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7902 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7903 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7904 fold_convert_loc (loc, type, tem));
7907 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7908 type of X and Y (integer types only). */
7909 if (INTEGRAL_TYPE_P (type)
7910 && TREE_CODE (op0) == MULT_EXPR
7911 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7912 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7914 /* Be careful not to introduce new overflows. */
7915 tree mult_type;
7916 if (TYPE_OVERFLOW_WRAPS (type))
7917 mult_type = type;
7918 else
7919 mult_type = unsigned_type_for (type);
7921 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7923 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7924 fold_convert_loc (loc, mult_type,
7925 TREE_OPERAND (op0, 0)),
7926 fold_convert_loc (loc, mult_type,
7927 TREE_OPERAND (op0, 1)));
7928 return fold_convert_loc (loc, type, tem);
7932 tem = fold_convert_const (code, type, arg0);
7933 return tem ? tem : NULL_TREE;
7935 case ADDR_SPACE_CONVERT_EXPR:
7936 if (integer_zerop (arg0))
7937 return fold_convert_const (code, type, arg0);
7938 return NULL_TREE;
7940 case FIXED_CONVERT_EXPR:
7941 tem = fold_convert_const (code, type, arg0);
7942 return tem ? tem : NULL_TREE;
7944 case VIEW_CONVERT_EXPR:
7945 if (TREE_CODE (op0) == MEM_REF)
7946 return fold_build2_loc (loc, MEM_REF, type,
7947 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7949 return fold_view_convert_expr (type, op0);
7951 case NEGATE_EXPR:
7952 tem = fold_negate_expr (loc, arg0);
7953 if (tem)
7954 return fold_convert_loc (loc, type, tem);
7955 return NULL_TREE;
7957 case ABS_EXPR:
7958 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7959 return fold_abs_const (arg0, type);
7960 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7961 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7962 /* Convert fabs((double)float) into (double)fabsf(float). */
7963 else if (TREE_CODE (arg0) == NOP_EXPR
7964 && TREE_CODE (type) == REAL_TYPE)
7966 tree targ0 = strip_float_extensions (arg0);
7967 if (targ0 != arg0)
7968 return fold_convert_loc (loc, type,
7969 fold_build1_loc (loc, ABS_EXPR,
7970 TREE_TYPE (targ0),
7971 targ0));
7973 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7974 else if (TREE_CODE (arg0) == ABS_EXPR)
7975 return arg0;
7976 else if (tree_expr_nonnegative_p (arg0))
7977 return arg0;
7979 /* Strip sign ops from argument. */
7980 if (TREE_CODE (type) == REAL_TYPE)
7982 tem = fold_strip_sign_ops (arg0);
7983 if (tem)
7984 return fold_build1_loc (loc, ABS_EXPR, type,
7985 fold_convert_loc (loc, type, tem));
7987 return NULL_TREE;
7989 case CONJ_EXPR:
7990 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7991 return fold_convert_loc (loc, type, arg0);
7992 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7994 tree itype = TREE_TYPE (type);
7995 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
7996 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
7997 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
7998 negate_expr (ipart));
8000 if (TREE_CODE (arg0) == COMPLEX_CST)
8002 tree itype = TREE_TYPE (type);
8003 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8004 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8005 return build_complex (type, rpart, negate_expr (ipart));
8007 if (TREE_CODE (arg0) == CONJ_EXPR)
8008 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8009 return NULL_TREE;
8011 case BIT_NOT_EXPR:
8012 if (TREE_CODE (arg0) == INTEGER_CST)
8013 return fold_not_const (arg0, type);
8014 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8015 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8016 /* Convert ~ (-A) to A - 1. */
8017 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8018 return fold_build2_loc (loc, MINUS_EXPR, type,
8019 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8020 build_int_cst (type, 1));
8021 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8022 else if (INTEGRAL_TYPE_P (type)
8023 && ((TREE_CODE (arg0) == MINUS_EXPR
8024 && integer_onep (TREE_OPERAND (arg0, 1)))
8025 || (TREE_CODE (arg0) == PLUS_EXPR
8026 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8027 return fold_build1_loc (loc, NEGATE_EXPR, type,
8028 fold_convert_loc (loc, type,
8029 TREE_OPERAND (arg0, 0)));
8030 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8031 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8032 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8033 fold_convert_loc (loc, type,
8034 TREE_OPERAND (arg0, 0)))))
8035 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8036 fold_convert_loc (loc, type,
8037 TREE_OPERAND (arg0, 1)));
8038 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8039 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8040 fold_convert_loc (loc, type,
8041 TREE_OPERAND (arg0, 1)))))
8042 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8043 fold_convert_loc (loc, type,
8044 TREE_OPERAND (arg0, 0)), tem);
8045 /* Perform BIT_NOT_EXPR on each element individually. */
8046 else if (TREE_CODE (arg0) == VECTOR_CST)
8048 tree *elements;
8049 tree elem;
8050 unsigned count = VECTOR_CST_NELTS (arg0), i;
8052 elements = XALLOCAVEC (tree, count);
8053 for (i = 0; i < count; i++)
8055 elem = VECTOR_CST_ELT (arg0, i);
8056 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8057 if (elem == NULL_TREE)
8058 break;
8059 elements[i] = elem;
8061 if (i == count)
8062 return build_vector (type, elements);
8064 else if (COMPARISON_CLASS_P (arg0)
8065 && (VECTOR_TYPE_P (type)
8066 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8068 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8069 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8070 HONOR_NANS (TYPE_MODE (op_type)));
8071 if (subcode != ERROR_MARK)
8072 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8073 TREE_OPERAND (arg0, 1));
8077 return NULL_TREE;
8079 case TRUTH_NOT_EXPR:
8080 /* Note that the operand of this must be an int
8081 and its values must be 0 or 1.
8082 ("true" is a fixed value perhaps depending on the language,
8083 but we don't handle values other than 1 correctly yet.) */
8084 tem = fold_truth_not_expr (loc, arg0);
8085 if (!tem)
8086 return NULL_TREE;
8087 return fold_convert_loc (loc, type, tem);
8089 case REALPART_EXPR:
8090 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8091 return fold_convert_loc (loc, type, arg0);
8092 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8093 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8094 TREE_OPERAND (arg0, 1));
8095 if (TREE_CODE (arg0) == COMPLEX_CST)
8096 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8097 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8099 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8100 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8101 fold_build1_loc (loc, REALPART_EXPR, itype,
8102 TREE_OPERAND (arg0, 0)),
8103 fold_build1_loc (loc, REALPART_EXPR, itype,
8104 TREE_OPERAND (arg0, 1)));
8105 return fold_convert_loc (loc, type, tem);
8107 if (TREE_CODE (arg0) == CONJ_EXPR)
8109 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8110 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8111 TREE_OPERAND (arg0, 0));
8112 return fold_convert_loc (loc, type, tem);
8114 if (TREE_CODE (arg0) == CALL_EXPR)
8116 tree fn = get_callee_fndecl (arg0);
8117 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8118 switch (DECL_FUNCTION_CODE (fn))
8120 CASE_FLT_FN (BUILT_IN_CEXPI):
8121 fn = mathfn_built_in (type, BUILT_IN_COS);
8122 if (fn)
8123 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8124 break;
8126 default:
8127 break;
8130 return NULL_TREE;
8132 case IMAGPART_EXPR:
8133 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8134 return build_zero_cst (type);
8135 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8136 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8137 TREE_OPERAND (arg0, 0));
8138 if (TREE_CODE (arg0) == COMPLEX_CST)
8139 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8140 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8142 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8143 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8144 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8145 TREE_OPERAND (arg0, 0)),
8146 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8147 TREE_OPERAND (arg0, 1)));
8148 return fold_convert_loc (loc, type, tem);
8150 if (TREE_CODE (arg0) == CONJ_EXPR)
8152 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8153 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8154 return fold_convert_loc (loc, type, negate_expr (tem));
8156 if (TREE_CODE (arg0) == CALL_EXPR)
8158 tree fn = get_callee_fndecl (arg0);
8159 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8160 switch (DECL_FUNCTION_CODE (fn))
8162 CASE_FLT_FN (BUILT_IN_CEXPI):
8163 fn = mathfn_built_in (type, BUILT_IN_SIN);
8164 if (fn)
8165 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8166 break;
8168 default:
8169 break;
8172 return NULL_TREE;
8174 case INDIRECT_REF:
8175 /* Fold *&X to X if X is an lvalue. */
8176 if (TREE_CODE (op0) == ADDR_EXPR)
8178 tree op00 = TREE_OPERAND (op0, 0);
8179 if ((TREE_CODE (op00) == VAR_DECL
8180 || TREE_CODE (op00) == PARM_DECL
8181 || TREE_CODE (op00) == RESULT_DECL)
8182 && !TREE_READONLY (op00))
8183 return op00;
8185 return NULL_TREE;
8187 case VEC_UNPACK_LO_EXPR:
8188 case VEC_UNPACK_HI_EXPR:
8189 case VEC_UNPACK_FLOAT_LO_EXPR:
8190 case VEC_UNPACK_FLOAT_HI_EXPR:
8192 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8193 tree *elts;
8194 enum tree_code subcode;
8196 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8197 if (TREE_CODE (arg0) != VECTOR_CST)
8198 return NULL_TREE;
8200 elts = XALLOCAVEC (tree, nelts * 2);
8201 if (!vec_cst_ctor_to_array (arg0, elts))
8202 return NULL_TREE;
8204 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8205 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8206 elts += nelts;
8208 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8209 subcode = NOP_EXPR;
8210 else
8211 subcode = FLOAT_EXPR;
8213 for (i = 0; i < nelts; i++)
8215 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8216 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8217 return NULL_TREE;
8220 return build_vector (type, elts);
8223 case REDUC_MIN_EXPR:
8224 case REDUC_MAX_EXPR:
8225 case REDUC_PLUS_EXPR:
8227 unsigned int nelts, i;
8228 tree *elts;
8229 enum tree_code subcode;
8231 if (TREE_CODE (op0) != VECTOR_CST)
8232 return NULL_TREE;
8233 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (op0));
8235 elts = XALLOCAVEC (tree, nelts);
8236 if (!vec_cst_ctor_to_array (op0, elts))
8237 return NULL_TREE;
8239 switch (code)
8241 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8242 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8243 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8244 default: gcc_unreachable ();
8247 for (i = 1; i < nelts; i++)
8249 elts[0] = const_binop (subcode, elts[0], elts[i]);
8250 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8251 return NULL_TREE;
8254 return elts[0];
8257 default:
8258 return NULL_TREE;
8259 } /* switch (code) */
8263 /* If the operation was a conversion do _not_ mark a resulting constant
8264 with TREE_OVERFLOW if the original constant was not. These conversions
8265 have implementation defined behavior and retaining the TREE_OVERFLOW
8266 flag here would confuse later passes such as VRP. */
8267 tree
8268 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8269 tree type, tree op0)
8271 tree res = fold_unary_loc (loc, code, type, op0);
8272 if (res
8273 && TREE_CODE (res) == INTEGER_CST
8274 && TREE_CODE (op0) == INTEGER_CST
8275 && CONVERT_EXPR_CODE_P (code))
8276 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8278 return res;
8281 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8282 operands OP0 and OP1. LOC is the location of the resulting expression.
8283 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8284 Return the folded expression if folding is successful. Otherwise,
8285 return NULL_TREE. */
8286 static tree
8287 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8288 tree arg0, tree arg1, tree op0, tree op1)
8290 tree tem;
8292 /* We only do these simplifications if we are optimizing. */
8293 if (!optimize)
8294 return NULL_TREE;
8296 /* Check for things like (A || B) && (A || C). We can convert this
8297 to A || (B && C). Note that either operator can be any of the four
8298 truth and/or operations and the transformation will still be
8299 valid. Also note that we only care about order for the
8300 ANDIF and ORIF operators. If B contains side effects, this
8301 might change the truth-value of A. */
8302 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8303 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8304 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8305 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8306 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8307 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8309 tree a00 = TREE_OPERAND (arg0, 0);
8310 tree a01 = TREE_OPERAND (arg0, 1);
8311 tree a10 = TREE_OPERAND (arg1, 0);
8312 tree a11 = TREE_OPERAND (arg1, 1);
8313 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8314 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8315 && (code == TRUTH_AND_EXPR
8316 || code == TRUTH_OR_EXPR));
8318 if (operand_equal_p (a00, a10, 0))
8319 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8320 fold_build2_loc (loc, code, type, a01, a11));
8321 else if (commutative && operand_equal_p (a00, a11, 0))
8322 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8323 fold_build2_loc (loc, code, type, a01, a10));
8324 else if (commutative && operand_equal_p (a01, a10, 0))
8325 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8326 fold_build2_loc (loc, code, type, a00, a11));
8328 /* This case if tricky because we must either have commutative
8329 operators or else A10 must not have side-effects. */
8331 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8332 && operand_equal_p (a01, a11, 0))
8333 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8334 fold_build2_loc (loc, code, type, a00, a10),
8335 a01);
8338 /* See if we can build a range comparison. */
8339 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8340 return tem;
8342 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8343 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8345 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8346 if (tem)
8347 return fold_build2_loc (loc, code, type, tem, arg1);
8350 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8351 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8353 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8354 if (tem)
8355 return fold_build2_loc (loc, code, type, arg0, tem);
8358 /* Check for the possibility of merging component references. If our
8359 lhs is another similar operation, try to merge its rhs with our
8360 rhs. Then try to merge our lhs and rhs. */
8361 if (TREE_CODE (arg0) == code
8362 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8363 TREE_OPERAND (arg0, 1), arg1)))
8364 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8366 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8367 return tem;
8369 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8370 && (code == TRUTH_AND_EXPR
8371 || code == TRUTH_ANDIF_EXPR
8372 || code == TRUTH_OR_EXPR
8373 || code == TRUTH_ORIF_EXPR))
8375 enum tree_code ncode, icode;
8377 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8378 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8379 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8381 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8382 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8383 We don't want to pack more than two leafs to a non-IF AND/OR
8384 expression.
8385 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8386 equal to IF-CODE, then we don't want to add right-hand operand.
8387 If the inner right-hand side of left-hand operand has
8388 side-effects, or isn't simple, then we can't add to it,
8389 as otherwise we might destroy if-sequence. */
8390 if (TREE_CODE (arg0) == icode
8391 && simple_operand_p_2 (arg1)
8392 /* Needed for sequence points to handle trappings, and
8393 side-effects. */
8394 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8396 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8397 arg1);
8398 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8399 tem);
8401 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8402 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8403 else if (TREE_CODE (arg1) == icode
8404 && simple_operand_p_2 (arg0)
8405 /* Needed for sequence points to handle trappings, and
8406 side-effects. */
8407 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8409 tem = fold_build2_loc (loc, ncode, type,
8410 arg0, TREE_OPERAND (arg1, 0));
8411 return fold_build2_loc (loc, icode, type, tem,
8412 TREE_OPERAND (arg1, 1));
8414 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8415 into (A OR B).
8416 For sequence point consistancy, we need to check for trapping,
8417 and side-effects. */
8418 else if (code == icode && simple_operand_p_2 (arg0)
8419 && simple_operand_p_2 (arg1))
8420 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8423 return NULL_TREE;
8426 /* Fold a binary expression of code CODE and type TYPE with operands
8427 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8428 Return the folded expression if folding is successful. Otherwise,
8429 return NULL_TREE. */
8431 static tree
8432 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8434 enum tree_code compl_code;
8436 if (code == MIN_EXPR)
8437 compl_code = MAX_EXPR;
8438 else if (code == MAX_EXPR)
8439 compl_code = MIN_EXPR;
8440 else
8441 gcc_unreachable ();
8443 /* MIN (MAX (a, b), b) == b. */
8444 if (TREE_CODE (op0) == compl_code
8445 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8446 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8448 /* MIN (MAX (b, a), b) == b. */
8449 if (TREE_CODE (op0) == compl_code
8450 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8451 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8452 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8454 /* MIN (a, MAX (a, b)) == a. */
8455 if (TREE_CODE (op1) == compl_code
8456 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8457 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8458 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8460 /* MIN (a, MAX (b, a)) == a. */
8461 if (TREE_CODE (op1) == compl_code
8462 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8463 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8464 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8466 return NULL_TREE;
8469 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8470 by changing CODE to reduce the magnitude of constants involved in
8471 ARG0 of the comparison.
8472 Returns a canonicalized comparison tree if a simplification was
8473 possible, otherwise returns NULL_TREE.
8474 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8475 valid if signed overflow is undefined. */
8477 static tree
8478 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8479 tree arg0, tree arg1,
8480 bool *strict_overflow_p)
8482 enum tree_code code0 = TREE_CODE (arg0);
8483 tree t, cst0 = NULL_TREE;
8484 int sgn0;
8485 bool swap = false;
8487 /* Match A +- CST code arg1 and CST code arg1. We can change the
8488 first form only if overflow is undefined. */
8489 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8490 /* In principle pointers also have undefined overflow behavior,
8491 but that causes problems elsewhere. */
8492 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8493 && (code0 == MINUS_EXPR
8494 || code0 == PLUS_EXPR)
8495 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8496 || code0 == INTEGER_CST))
8497 return NULL_TREE;
8499 /* Identify the constant in arg0 and its sign. */
8500 if (code0 == INTEGER_CST)
8501 cst0 = arg0;
8502 else
8503 cst0 = TREE_OPERAND (arg0, 1);
8504 sgn0 = tree_int_cst_sgn (cst0);
8506 /* Overflowed constants and zero will cause problems. */
8507 if (integer_zerop (cst0)
8508 || TREE_OVERFLOW (cst0))
8509 return NULL_TREE;
8511 /* See if we can reduce the magnitude of the constant in
8512 arg0 by changing the comparison code. */
8513 if (code0 == INTEGER_CST)
8515 /* CST <= arg1 -> CST-1 < arg1. */
8516 if (code == LE_EXPR && sgn0 == 1)
8517 code = LT_EXPR;
8518 /* -CST < arg1 -> -CST-1 <= arg1. */
8519 else if (code == LT_EXPR && sgn0 == -1)
8520 code = LE_EXPR;
8521 /* CST > arg1 -> CST-1 >= arg1. */
8522 else if (code == GT_EXPR && sgn0 == 1)
8523 code = GE_EXPR;
8524 /* -CST >= arg1 -> -CST-1 > arg1. */
8525 else if (code == GE_EXPR && sgn0 == -1)
8526 code = GT_EXPR;
8527 else
8528 return NULL_TREE;
8529 /* arg1 code' CST' might be more canonical. */
8530 swap = true;
8532 else
8534 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8535 if (code == LT_EXPR
8536 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8537 code = LE_EXPR;
8538 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8539 else if (code == GT_EXPR
8540 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8541 code = GE_EXPR;
8542 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8543 else if (code == LE_EXPR
8544 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8545 code = LT_EXPR;
8546 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8547 else if (code == GE_EXPR
8548 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8549 code = GT_EXPR;
8550 else
8551 return NULL_TREE;
8552 *strict_overflow_p = true;
8555 /* Now build the constant reduced in magnitude. But not if that
8556 would produce one outside of its types range. */
8557 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8558 && ((sgn0 == 1
8559 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8560 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8561 || (sgn0 == -1
8562 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8563 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8564 /* We cannot swap the comparison here as that would cause us to
8565 endlessly recurse. */
8566 return NULL_TREE;
8568 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8569 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8570 if (code0 != INTEGER_CST)
8571 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8572 t = fold_convert (TREE_TYPE (arg1), t);
8574 /* If swapping might yield to a more canonical form, do so. */
8575 if (swap)
8576 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8577 else
8578 return fold_build2_loc (loc, code, type, t, arg1);
8581 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8582 overflow further. Try to decrease the magnitude of constants involved
8583 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8584 and put sole constants at the second argument position.
8585 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8587 static tree
8588 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8589 tree arg0, tree arg1)
8591 tree t;
8592 bool strict_overflow_p;
8593 const char * const warnmsg = G_("assuming signed overflow does not occur "
8594 "when reducing constant in comparison");
8596 /* Try canonicalization by simplifying arg0. */
8597 strict_overflow_p = false;
8598 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8599 &strict_overflow_p);
8600 if (t)
8602 if (strict_overflow_p)
8603 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8604 return t;
8607 /* Try canonicalization by simplifying arg1 using the swapped
8608 comparison. */
8609 code = swap_tree_comparison (code);
8610 strict_overflow_p = false;
8611 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8612 &strict_overflow_p);
8613 if (t && strict_overflow_p)
8614 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8615 return t;
8618 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8619 space. This is used to avoid issuing overflow warnings for
8620 expressions like &p->x which can not wrap. */
8622 static bool
8623 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8625 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8626 return true;
8628 if (bitpos < 0)
8629 return true;
8631 wide_int wi_offset;
8632 int precision = TYPE_PRECISION (TREE_TYPE (base));
8633 if (offset == NULL_TREE)
8634 wi_offset = wi::zero (precision);
8635 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8636 return true;
8637 else
8638 wi_offset = offset;
8640 bool overflow;
8641 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8642 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8643 if (overflow)
8644 return true;
8646 if (!wi::fits_uhwi_p (total))
8647 return true;
8649 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8650 if (size <= 0)
8651 return true;
8653 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8654 array. */
8655 if (TREE_CODE (base) == ADDR_EXPR)
8657 HOST_WIDE_INT base_size;
8659 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8660 if (base_size > 0 && size < base_size)
8661 size = base_size;
8664 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8667 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8668 kind INTEGER_CST. This makes sure to properly sign-extend the
8669 constant. */
8671 static HOST_WIDE_INT
8672 size_low_cst (const_tree t)
8674 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8675 int prec = TYPE_PRECISION (TREE_TYPE (t));
8676 if (prec < HOST_BITS_PER_WIDE_INT)
8677 return sext_hwi (w, prec);
8678 return w;
8681 /* Subroutine of fold_binary. This routine performs all of the
8682 transformations that are common to the equality/inequality
8683 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8684 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8685 fold_binary should call fold_binary. Fold a comparison with
8686 tree code CODE and type TYPE with operands OP0 and OP1. Return
8687 the folded comparison or NULL_TREE. */
8689 static tree
8690 fold_comparison (location_t loc, enum tree_code code, tree type,
8691 tree op0, tree op1)
8693 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8694 tree arg0, arg1, tem;
8696 arg0 = op0;
8697 arg1 = op1;
8699 STRIP_SIGN_NOPS (arg0);
8700 STRIP_SIGN_NOPS (arg1);
8702 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8703 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8704 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8705 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8706 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8707 && TREE_CODE (arg1) == INTEGER_CST
8708 && !TREE_OVERFLOW (arg1))
8710 const enum tree_code
8711 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8712 tree const1 = TREE_OPERAND (arg0, 1);
8713 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8714 tree variable = TREE_OPERAND (arg0, 0);
8715 tree new_const = int_const_binop (reverse_op, const2, const1);
8717 /* If the constant operation overflowed this can be
8718 simplified as a comparison against INT_MAX/INT_MIN. */
8719 if (TREE_OVERFLOW (new_const)
8720 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8722 int const1_sgn = tree_int_cst_sgn (const1);
8723 enum tree_code code2 = code;
8725 /* Get the sign of the constant on the lhs if the
8726 operation were VARIABLE + CONST1. */
8727 if (TREE_CODE (arg0) == MINUS_EXPR)
8728 const1_sgn = -const1_sgn;
8730 /* The sign of the constant determines if we overflowed
8731 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8732 Canonicalize to the INT_MIN overflow by swapping the comparison
8733 if necessary. */
8734 if (const1_sgn == -1)
8735 code2 = swap_tree_comparison (code);
8737 /* We now can look at the canonicalized case
8738 VARIABLE + 1 CODE2 INT_MIN
8739 and decide on the result. */
8740 switch (code2)
8742 case EQ_EXPR:
8743 case LT_EXPR:
8744 case LE_EXPR:
8745 return
8746 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8748 case NE_EXPR:
8749 case GE_EXPR:
8750 case GT_EXPR:
8751 return
8752 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8754 default:
8755 gcc_unreachable ();
8758 else
8760 if (!equality_code)
8761 fold_overflow_warning ("assuming signed overflow does not occur "
8762 "when changing X +- C1 cmp C2 to "
8763 "X cmp C2 -+ C1",
8764 WARN_STRICT_OVERFLOW_COMPARISON);
8765 return fold_build2_loc (loc, code, type, variable, new_const);
8769 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8770 if (TREE_CODE (arg0) == MINUS_EXPR
8771 && equality_code
8772 && integer_zerop (arg1))
8774 /* ??? The transformation is valid for the other operators if overflow
8775 is undefined for the type, but performing it here badly interacts
8776 with the transformation in fold_cond_expr_with_comparison which
8777 attempts to synthetize ABS_EXPR. */
8778 if (!equality_code)
8779 fold_overflow_warning ("assuming signed overflow does not occur "
8780 "when changing X - Y cmp 0 to X cmp Y",
8781 WARN_STRICT_OVERFLOW_COMPARISON);
8782 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8783 TREE_OPERAND (arg0, 1));
8786 /* For comparisons of pointers we can decompose it to a compile time
8787 comparison of the base objects and the offsets into the object.
8788 This requires at least one operand being an ADDR_EXPR or a
8789 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8790 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8791 && (TREE_CODE (arg0) == ADDR_EXPR
8792 || TREE_CODE (arg1) == ADDR_EXPR
8793 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8794 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8796 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8797 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8798 machine_mode mode;
8799 int volatilep, unsignedp;
8800 bool indirect_base0 = false, indirect_base1 = false;
8802 /* Get base and offset for the access. Strip ADDR_EXPR for
8803 get_inner_reference, but put it back by stripping INDIRECT_REF
8804 off the base object if possible. indirect_baseN will be true
8805 if baseN is not an address but refers to the object itself. */
8806 base0 = arg0;
8807 if (TREE_CODE (arg0) == ADDR_EXPR)
8809 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8810 &bitsize, &bitpos0, &offset0, &mode,
8811 &unsignedp, &volatilep, false);
8812 if (TREE_CODE (base0) == INDIRECT_REF)
8813 base0 = TREE_OPERAND (base0, 0);
8814 else
8815 indirect_base0 = true;
8817 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8819 base0 = TREE_OPERAND (arg0, 0);
8820 STRIP_SIGN_NOPS (base0);
8821 if (TREE_CODE (base0) == ADDR_EXPR)
8823 base0 = TREE_OPERAND (base0, 0);
8824 indirect_base0 = true;
8826 offset0 = TREE_OPERAND (arg0, 1);
8827 if (tree_fits_shwi_p (offset0))
8829 HOST_WIDE_INT off = size_low_cst (offset0);
8830 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8831 * BITS_PER_UNIT)
8832 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8834 bitpos0 = off * BITS_PER_UNIT;
8835 offset0 = NULL_TREE;
8840 base1 = arg1;
8841 if (TREE_CODE (arg1) == ADDR_EXPR)
8843 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8844 &bitsize, &bitpos1, &offset1, &mode,
8845 &unsignedp, &volatilep, false);
8846 if (TREE_CODE (base1) == INDIRECT_REF)
8847 base1 = TREE_OPERAND (base1, 0);
8848 else
8849 indirect_base1 = true;
8851 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8853 base1 = TREE_OPERAND (arg1, 0);
8854 STRIP_SIGN_NOPS (base1);
8855 if (TREE_CODE (base1) == ADDR_EXPR)
8857 base1 = TREE_OPERAND (base1, 0);
8858 indirect_base1 = true;
8860 offset1 = TREE_OPERAND (arg1, 1);
8861 if (tree_fits_shwi_p (offset1))
8863 HOST_WIDE_INT off = size_low_cst (offset1);
8864 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8865 * BITS_PER_UNIT)
8866 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8868 bitpos1 = off * BITS_PER_UNIT;
8869 offset1 = NULL_TREE;
8874 /* A local variable can never be pointed to by
8875 the default SSA name of an incoming parameter. */
8876 if ((TREE_CODE (arg0) == ADDR_EXPR
8877 && indirect_base0
8878 && TREE_CODE (base0) == VAR_DECL
8879 && auto_var_in_fn_p (base0, current_function_decl)
8880 && !indirect_base1
8881 && TREE_CODE (base1) == SSA_NAME
8882 && SSA_NAME_IS_DEFAULT_DEF (base1)
8883 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8884 || (TREE_CODE (arg1) == ADDR_EXPR
8885 && indirect_base1
8886 && TREE_CODE (base1) == VAR_DECL
8887 && auto_var_in_fn_p (base1, current_function_decl)
8888 && !indirect_base0
8889 && TREE_CODE (base0) == SSA_NAME
8890 && SSA_NAME_IS_DEFAULT_DEF (base0)
8891 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8893 if (code == NE_EXPR)
8894 return constant_boolean_node (1, type);
8895 else if (code == EQ_EXPR)
8896 return constant_boolean_node (0, type);
8898 /* If we have equivalent bases we might be able to simplify. */
8899 else if (indirect_base0 == indirect_base1
8900 && operand_equal_p (base0, base1, 0))
8902 /* We can fold this expression to a constant if the non-constant
8903 offset parts are equal. */
8904 if ((offset0 == offset1
8905 || (offset0 && offset1
8906 && operand_equal_p (offset0, offset1, 0)))
8907 && (code == EQ_EXPR
8908 || code == NE_EXPR
8909 || (indirect_base0 && DECL_P (base0))
8910 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8913 if (!equality_code
8914 && bitpos0 != bitpos1
8915 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8916 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8917 fold_overflow_warning (("assuming pointer wraparound does not "
8918 "occur when comparing P +- C1 with "
8919 "P +- C2"),
8920 WARN_STRICT_OVERFLOW_CONDITIONAL);
8922 switch (code)
8924 case EQ_EXPR:
8925 return constant_boolean_node (bitpos0 == bitpos1, type);
8926 case NE_EXPR:
8927 return constant_boolean_node (bitpos0 != bitpos1, type);
8928 case LT_EXPR:
8929 return constant_boolean_node (bitpos0 < bitpos1, type);
8930 case LE_EXPR:
8931 return constant_boolean_node (bitpos0 <= bitpos1, type);
8932 case GE_EXPR:
8933 return constant_boolean_node (bitpos0 >= bitpos1, type);
8934 case GT_EXPR:
8935 return constant_boolean_node (bitpos0 > bitpos1, type);
8936 default:;
8939 /* We can simplify the comparison to a comparison of the variable
8940 offset parts if the constant offset parts are equal.
8941 Be careful to use signed sizetype here because otherwise we
8942 mess with array offsets in the wrong way. This is possible
8943 because pointer arithmetic is restricted to retain within an
8944 object and overflow on pointer differences is undefined as of
8945 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8946 else if (bitpos0 == bitpos1
8947 && (equality_code
8948 || (indirect_base0 && DECL_P (base0))
8949 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8951 /* By converting to signed sizetype we cover middle-end pointer
8952 arithmetic which operates on unsigned pointer types of size
8953 type size and ARRAY_REF offsets which are properly sign or
8954 zero extended from their type in case it is narrower than
8955 sizetype. */
8956 if (offset0 == NULL_TREE)
8957 offset0 = build_int_cst (ssizetype, 0);
8958 else
8959 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8960 if (offset1 == NULL_TREE)
8961 offset1 = build_int_cst (ssizetype, 0);
8962 else
8963 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8965 if (!equality_code
8966 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8967 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8968 fold_overflow_warning (("assuming pointer wraparound does not "
8969 "occur when comparing P +- C1 with "
8970 "P +- C2"),
8971 WARN_STRICT_OVERFLOW_COMPARISON);
8973 return fold_build2_loc (loc, code, type, offset0, offset1);
8976 /* For non-equal bases we can simplify if they are addresses
8977 of local binding decls or constants. */
8978 else if (indirect_base0 && indirect_base1
8979 /* We know that !operand_equal_p (base0, base1, 0)
8980 because the if condition was false. But make
8981 sure two decls are not the same. */
8982 && base0 != base1
8983 && TREE_CODE (arg0) == ADDR_EXPR
8984 && TREE_CODE (arg1) == ADDR_EXPR
8985 && (((TREE_CODE (base0) == VAR_DECL
8986 || TREE_CODE (base0) == PARM_DECL)
8987 && (targetm.binds_local_p (base0)
8988 || CONSTANT_CLASS_P (base1)))
8989 || CONSTANT_CLASS_P (base0))
8990 && (((TREE_CODE (base1) == VAR_DECL
8991 || TREE_CODE (base1) == PARM_DECL)
8992 && (targetm.binds_local_p (base1)
8993 || CONSTANT_CLASS_P (base0)))
8994 || CONSTANT_CLASS_P (base1)))
8996 if (code == EQ_EXPR)
8997 return omit_two_operands_loc (loc, type, boolean_false_node,
8998 arg0, arg1);
8999 else if (code == NE_EXPR)
9000 return omit_two_operands_loc (loc, type, boolean_true_node,
9001 arg0, arg1);
9003 /* For equal offsets we can simplify to a comparison of the
9004 base addresses. */
9005 else if (bitpos0 == bitpos1
9006 && (indirect_base0
9007 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9008 && (indirect_base1
9009 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9010 && ((offset0 == offset1)
9011 || (offset0 && offset1
9012 && operand_equal_p (offset0, offset1, 0))))
9014 if (indirect_base0)
9015 base0 = build_fold_addr_expr_loc (loc, base0);
9016 if (indirect_base1)
9017 base1 = build_fold_addr_expr_loc (loc, base1);
9018 return fold_build2_loc (loc, code, type, base0, base1);
9022 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9023 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9024 the resulting offset is smaller in absolute value than the
9025 original one and has the same sign. */
9026 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9027 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9028 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9029 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9030 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9031 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9032 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9034 tree const1 = TREE_OPERAND (arg0, 1);
9035 tree const2 = TREE_OPERAND (arg1, 1);
9036 tree variable1 = TREE_OPERAND (arg0, 0);
9037 tree variable2 = TREE_OPERAND (arg1, 0);
9038 tree cst;
9039 const char * const warnmsg = G_("assuming signed overflow does not "
9040 "occur when combining constants around "
9041 "a comparison");
9043 /* Put the constant on the side where it doesn't overflow and is
9044 of lower absolute value and of same sign than before. */
9045 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9046 ? MINUS_EXPR : PLUS_EXPR,
9047 const2, const1);
9048 if (!TREE_OVERFLOW (cst)
9049 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9050 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9052 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9053 return fold_build2_loc (loc, code, type,
9054 variable1,
9055 fold_build2_loc (loc, TREE_CODE (arg1),
9056 TREE_TYPE (arg1),
9057 variable2, cst));
9060 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9061 ? MINUS_EXPR : PLUS_EXPR,
9062 const1, const2);
9063 if (!TREE_OVERFLOW (cst)
9064 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9065 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9067 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9068 return fold_build2_loc (loc, code, type,
9069 fold_build2_loc (loc, TREE_CODE (arg0),
9070 TREE_TYPE (arg0),
9071 variable1, cst),
9072 variable2);
9076 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9077 signed arithmetic case. That form is created by the compiler
9078 often enough for folding it to be of value. One example is in
9079 computing loop trip counts after Operator Strength Reduction. */
9080 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9081 && TREE_CODE (arg0) == MULT_EXPR
9082 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9083 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9084 && integer_zerop (arg1))
9086 tree const1 = TREE_OPERAND (arg0, 1);
9087 tree const2 = arg1; /* zero */
9088 tree variable1 = TREE_OPERAND (arg0, 0);
9089 enum tree_code cmp_code = code;
9091 /* Handle unfolded multiplication by zero. */
9092 if (integer_zerop (const1))
9093 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9095 fold_overflow_warning (("assuming signed overflow does not occur when "
9096 "eliminating multiplication in comparison "
9097 "with zero"),
9098 WARN_STRICT_OVERFLOW_COMPARISON);
9100 /* If const1 is negative we swap the sense of the comparison. */
9101 if (tree_int_cst_sgn (const1) < 0)
9102 cmp_code = swap_tree_comparison (cmp_code);
9104 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9107 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9108 if (tem)
9109 return tem;
9111 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9113 tree targ0 = strip_float_extensions (arg0);
9114 tree targ1 = strip_float_extensions (arg1);
9115 tree newtype = TREE_TYPE (targ0);
9117 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9118 newtype = TREE_TYPE (targ1);
9120 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9121 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9122 return fold_build2_loc (loc, code, type,
9123 fold_convert_loc (loc, newtype, targ0),
9124 fold_convert_loc (loc, newtype, targ1));
9126 /* (-a) CMP (-b) -> b CMP a */
9127 if (TREE_CODE (arg0) == NEGATE_EXPR
9128 && TREE_CODE (arg1) == NEGATE_EXPR)
9129 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9130 TREE_OPERAND (arg0, 0));
9132 if (TREE_CODE (arg1) == REAL_CST)
9134 REAL_VALUE_TYPE cst;
9135 cst = TREE_REAL_CST (arg1);
9137 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9138 if (TREE_CODE (arg0) == NEGATE_EXPR)
9139 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9140 TREE_OPERAND (arg0, 0),
9141 build_real (TREE_TYPE (arg1),
9142 real_value_negate (&cst)));
9144 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9145 /* a CMP (-0) -> a CMP 0 */
9146 if (REAL_VALUE_MINUS_ZERO (cst))
9147 return fold_build2_loc (loc, code, type, arg0,
9148 build_real (TREE_TYPE (arg1), dconst0));
9150 /* x != NaN is always true, other ops are always false. */
9151 if (REAL_VALUE_ISNAN (cst)
9152 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9154 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9155 return omit_one_operand_loc (loc, type, tem, arg0);
9158 /* Fold comparisons against infinity. */
9159 if (REAL_VALUE_ISINF (cst)
9160 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9162 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9163 if (tem != NULL_TREE)
9164 return tem;
9168 /* If this is a comparison of a real constant with a PLUS_EXPR
9169 or a MINUS_EXPR of a real constant, we can convert it into a
9170 comparison with a revised real constant as long as no overflow
9171 occurs when unsafe_math_optimizations are enabled. */
9172 if (flag_unsafe_math_optimizations
9173 && TREE_CODE (arg1) == REAL_CST
9174 && (TREE_CODE (arg0) == PLUS_EXPR
9175 || TREE_CODE (arg0) == MINUS_EXPR)
9176 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9177 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9178 ? MINUS_EXPR : PLUS_EXPR,
9179 arg1, TREE_OPERAND (arg0, 1)))
9180 && !TREE_OVERFLOW (tem))
9181 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9183 /* Likewise, we can simplify a comparison of a real constant with
9184 a MINUS_EXPR whose first operand is also a real constant, i.e.
9185 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9186 floating-point types only if -fassociative-math is set. */
9187 if (flag_associative_math
9188 && TREE_CODE (arg1) == REAL_CST
9189 && TREE_CODE (arg0) == MINUS_EXPR
9190 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9191 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9192 arg1))
9193 && !TREE_OVERFLOW (tem))
9194 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9195 TREE_OPERAND (arg0, 1), tem);
9197 /* Fold comparisons against built-in math functions. */
9198 if (TREE_CODE (arg1) == REAL_CST
9199 && flag_unsafe_math_optimizations
9200 && ! flag_errno_math)
9202 enum built_in_function fcode = builtin_mathfn_code (arg0);
9204 if (fcode != END_BUILTINS)
9206 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9207 if (tem != NULL_TREE)
9208 return tem;
9213 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9214 && CONVERT_EXPR_P (arg0))
9216 /* If we are widening one operand of an integer comparison,
9217 see if the other operand is similarly being widened. Perhaps we
9218 can do the comparison in the narrower type. */
9219 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9220 if (tem)
9221 return tem;
9223 /* Or if we are changing signedness. */
9224 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9225 if (tem)
9226 return tem;
9229 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9230 constant, we can simplify it. */
9231 if (TREE_CODE (arg1) == INTEGER_CST
9232 && (TREE_CODE (arg0) == MIN_EXPR
9233 || TREE_CODE (arg0) == MAX_EXPR)
9234 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9236 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9237 if (tem)
9238 return tem;
9241 /* Simplify comparison of something with itself. (For IEEE
9242 floating-point, we can only do some of these simplifications.) */
9243 if (operand_equal_p (arg0, arg1, 0))
9245 switch (code)
9247 case EQ_EXPR:
9248 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9249 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9250 return constant_boolean_node (1, type);
9251 break;
9253 case GE_EXPR:
9254 case LE_EXPR:
9255 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9256 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9257 return constant_boolean_node (1, type);
9258 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9260 case NE_EXPR:
9261 /* For NE, we can only do this simplification if integer
9262 or we don't honor IEEE floating point NaNs. */
9263 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9264 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9265 break;
9266 /* ... fall through ... */
9267 case GT_EXPR:
9268 case LT_EXPR:
9269 return constant_boolean_node (0, type);
9270 default:
9271 gcc_unreachable ();
9275 /* If we are comparing an expression that just has comparisons
9276 of two integer values, arithmetic expressions of those comparisons,
9277 and constants, we can simplify it. There are only three cases
9278 to check: the two values can either be equal, the first can be
9279 greater, or the second can be greater. Fold the expression for
9280 those three values. Since each value must be 0 or 1, we have
9281 eight possibilities, each of which corresponds to the constant 0
9282 or 1 or one of the six possible comparisons.
9284 This handles common cases like (a > b) == 0 but also handles
9285 expressions like ((x > y) - (y > x)) > 0, which supposedly
9286 occur in macroized code. */
9288 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9290 tree cval1 = 0, cval2 = 0;
9291 int save_p = 0;
9293 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9294 /* Don't handle degenerate cases here; they should already
9295 have been handled anyway. */
9296 && cval1 != 0 && cval2 != 0
9297 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9298 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9299 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9300 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9301 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9302 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9303 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9305 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9306 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9308 /* We can't just pass T to eval_subst in case cval1 or cval2
9309 was the same as ARG1. */
9311 tree high_result
9312 = fold_build2_loc (loc, code, type,
9313 eval_subst (loc, arg0, cval1, maxval,
9314 cval2, minval),
9315 arg1);
9316 tree equal_result
9317 = fold_build2_loc (loc, code, type,
9318 eval_subst (loc, arg0, cval1, maxval,
9319 cval2, maxval),
9320 arg1);
9321 tree low_result
9322 = fold_build2_loc (loc, code, type,
9323 eval_subst (loc, arg0, cval1, minval,
9324 cval2, maxval),
9325 arg1);
9327 /* All three of these results should be 0 or 1. Confirm they are.
9328 Then use those values to select the proper code to use. */
9330 if (TREE_CODE (high_result) == INTEGER_CST
9331 && TREE_CODE (equal_result) == INTEGER_CST
9332 && TREE_CODE (low_result) == INTEGER_CST)
9334 /* Make a 3-bit mask with the high-order bit being the
9335 value for `>', the next for '=', and the low for '<'. */
9336 switch ((integer_onep (high_result) * 4)
9337 + (integer_onep (equal_result) * 2)
9338 + integer_onep (low_result))
9340 case 0:
9341 /* Always false. */
9342 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9343 case 1:
9344 code = LT_EXPR;
9345 break;
9346 case 2:
9347 code = EQ_EXPR;
9348 break;
9349 case 3:
9350 code = LE_EXPR;
9351 break;
9352 case 4:
9353 code = GT_EXPR;
9354 break;
9355 case 5:
9356 code = NE_EXPR;
9357 break;
9358 case 6:
9359 code = GE_EXPR;
9360 break;
9361 case 7:
9362 /* Always true. */
9363 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9366 if (save_p)
9368 tem = save_expr (build2 (code, type, cval1, cval2));
9369 SET_EXPR_LOCATION (tem, loc);
9370 return tem;
9372 return fold_build2_loc (loc, code, type, cval1, cval2);
9377 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9378 into a single range test. */
9379 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9380 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9381 && TREE_CODE (arg1) == INTEGER_CST
9382 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9383 && !integer_zerop (TREE_OPERAND (arg0, 1))
9384 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9385 && !TREE_OVERFLOW (arg1))
9387 tem = fold_div_compare (loc, code, type, arg0, arg1);
9388 if (tem != NULL_TREE)
9389 return tem;
9392 /* Fold ~X op ~Y as Y op X. */
9393 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9394 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9396 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9397 return fold_build2_loc (loc, code, type,
9398 fold_convert_loc (loc, cmp_type,
9399 TREE_OPERAND (arg1, 0)),
9400 TREE_OPERAND (arg0, 0));
9403 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9404 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9405 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9407 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9408 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9409 TREE_OPERAND (arg0, 0),
9410 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9411 fold_convert_loc (loc, cmp_type, arg1)));
9414 return NULL_TREE;
9418 /* Subroutine of fold_binary. Optimize complex multiplications of the
9419 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9420 argument EXPR represents the expression "z" of type TYPE. */
9422 static tree
9423 fold_mult_zconjz (location_t loc, tree type, tree expr)
9425 tree itype = TREE_TYPE (type);
9426 tree rpart, ipart, tem;
9428 if (TREE_CODE (expr) == COMPLEX_EXPR)
9430 rpart = TREE_OPERAND (expr, 0);
9431 ipart = TREE_OPERAND (expr, 1);
9433 else if (TREE_CODE (expr) == COMPLEX_CST)
9435 rpart = TREE_REALPART (expr);
9436 ipart = TREE_IMAGPART (expr);
9438 else
9440 expr = save_expr (expr);
9441 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9442 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9445 rpart = save_expr (rpart);
9446 ipart = save_expr (ipart);
9447 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9448 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9449 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9450 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9451 build_zero_cst (itype));
9455 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9456 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9457 guarantees that P and N have the same least significant log2(M) bits.
9458 N is not otherwise constrained. In particular, N is not normalized to
9459 0 <= N < M as is common. In general, the precise value of P is unknown.
9460 M is chosen as large as possible such that constant N can be determined.
9462 Returns M and sets *RESIDUE to N.
9464 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9465 account. This is not always possible due to PR 35705.
9468 static unsigned HOST_WIDE_INT
9469 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9470 bool allow_func_align)
9472 enum tree_code code;
9474 *residue = 0;
9476 code = TREE_CODE (expr);
9477 if (code == ADDR_EXPR)
9479 unsigned int bitalign;
9480 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9481 *residue /= BITS_PER_UNIT;
9482 return bitalign / BITS_PER_UNIT;
9484 else if (code == POINTER_PLUS_EXPR)
9486 tree op0, op1;
9487 unsigned HOST_WIDE_INT modulus;
9488 enum tree_code inner_code;
9490 op0 = TREE_OPERAND (expr, 0);
9491 STRIP_NOPS (op0);
9492 modulus = get_pointer_modulus_and_residue (op0, residue,
9493 allow_func_align);
9495 op1 = TREE_OPERAND (expr, 1);
9496 STRIP_NOPS (op1);
9497 inner_code = TREE_CODE (op1);
9498 if (inner_code == INTEGER_CST)
9500 *residue += TREE_INT_CST_LOW (op1);
9501 return modulus;
9503 else if (inner_code == MULT_EXPR)
9505 op1 = TREE_OPERAND (op1, 1);
9506 if (TREE_CODE (op1) == INTEGER_CST)
9508 unsigned HOST_WIDE_INT align;
9510 /* Compute the greatest power-of-2 divisor of op1. */
9511 align = TREE_INT_CST_LOW (op1);
9512 align &= -align;
9514 /* If align is non-zero and less than *modulus, replace
9515 *modulus with align., If align is 0, then either op1 is 0
9516 or the greatest power-of-2 divisor of op1 doesn't fit in an
9517 unsigned HOST_WIDE_INT. In either case, no additional
9518 constraint is imposed. */
9519 if (align)
9520 modulus = MIN (modulus, align);
9522 return modulus;
9527 /* If we get here, we were unable to determine anything useful about the
9528 expression. */
9529 return 1;
9532 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9533 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9535 static bool
9536 vec_cst_ctor_to_array (tree arg, tree *elts)
9538 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9540 if (TREE_CODE (arg) == VECTOR_CST)
9542 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9543 elts[i] = VECTOR_CST_ELT (arg, i);
9545 else if (TREE_CODE (arg) == CONSTRUCTOR)
9547 constructor_elt *elt;
9549 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9550 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9551 return false;
9552 else
9553 elts[i] = elt->value;
9555 else
9556 return false;
9557 for (; i < nelts; i++)
9558 elts[i]
9559 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9560 return true;
9563 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9564 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9565 NULL_TREE otherwise. */
9567 static tree
9568 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9570 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9571 tree *elts;
9572 bool need_ctor = false;
9574 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9575 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9576 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9577 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9578 return NULL_TREE;
9580 elts = XALLOCAVEC (tree, nelts * 3);
9581 if (!vec_cst_ctor_to_array (arg0, elts)
9582 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9583 return NULL_TREE;
9585 for (i = 0; i < nelts; i++)
9587 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9588 need_ctor = true;
9589 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9592 if (need_ctor)
9594 vec<constructor_elt, va_gc> *v;
9595 vec_alloc (v, nelts);
9596 for (i = 0; i < nelts; i++)
9597 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9598 return build_constructor (type, v);
9600 else
9601 return build_vector (type, &elts[2 * nelts]);
9604 /* Try to fold a pointer difference of type TYPE two address expressions of
9605 array references AREF0 and AREF1 using location LOC. Return a
9606 simplified expression for the difference or NULL_TREE. */
9608 static tree
9609 fold_addr_of_array_ref_difference (location_t loc, tree type,
9610 tree aref0, tree aref1)
9612 tree base0 = TREE_OPERAND (aref0, 0);
9613 tree base1 = TREE_OPERAND (aref1, 0);
9614 tree base_offset = build_int_cst (type, 0);
9616 /* If the bases are array references as well, recurse. If the bases
9617 are pointer indirections compute the difference of the pointers.
9618 If the bases are equal, we are set. */
9619 if ((TREE_CODE (base0) == ARRAY_REF
9620 && TREE_CODE (base1) == ARRAY_REF
9621 && (base_offset
9622 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9623 || (INDIRECT_REF_P (base0)
9624 && INDIRECT_REF_P (base1)
9625 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9626 TREE_OPERAND (base0, 0),
9627 TREE_OPERAND (base1, 0))))
9628 || operand_equal_p (base0, base1, 0))
9630 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9631 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9632 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9633 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9634 return fold_build2_loc (loc, PLUS_EXPR, type,
9635 base_offset,
9636 fold_build2_loc (loc, MULT_EXPR, type,
9637 diff, esz));
9639 return NULL_TREE;
9642 /* If the real or vector real constant CST of type TYPE has an exact
9643 inverse, return it, else return NULL. */
9645 static tree
9646 exact_inverse (tree type, tree cst)
9648 REAL_VALUE_TYPE r;
9649 tree unit_type, *elts;
9650 machine_mode mode;
9651 unsigned vec_nelts, i;
9653 switch (TREE_CODE (cst))
9655 case REAL_CST:
9656 r = TREE_REAL_CST (cst);
9658 if (exact_real_inverse (TYPE_MODE (type), &r))
9659 return build_real (type, r);
9661 return NULL_TREE;
9663 case VECTOR_CST:
9664 vec_nelts = VECTOR_CST_NELTS (cst);
9665 elts = XALLOCAVEC (tree, vec_nelts);
9666 unit_type = TREE_TYPE (type);
9667 mode = TYPE_MODE (unit_type);
9669 for (i = 0; i < vec_nelts; i++)
9671 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9672 if (!exact_real_inverse (mode, &r))
9673 return NULL_TREE;
9674 elts[i] = build_real (unit_type, r);
9677 return build_vector (type, elts);
9679 default:
9680 return NULL_TREE;
9684 /* Mask out the tz least significant bits of X of type TYPE where
9685 tz is the number of trailing zeroes in Y. */
9686 static wide_int
9687 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9689 int tz = wi::ctz (y);
9690 if (tz > 0)
9691 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9692 return x;
9695 /* Return true when T is an address and is known to be nonzero.
9696 For floating point we further ensure that T is not denormal.
9697 Similar logic is present in nonzero_address in rtlanal.h.
9699 If the return value is based on the assumption that signed overflow
9700 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9701 change *STRICT_OVERFLOW_P. */
9703 static bool
9704 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9706 tree type = TREE_TYPE (t);
9707 enum tree_code code;
9709 /* Doing something useful for floating point would need more work. */
9710 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9711 return false;
9713 code = TREE_CODE (t);
9714 switch (TREE_CODE_CLASS (code))
9716 case tcc_unary:
9717 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9718 strict_overflow_p);
9719 case tcc_binary:
9720 case tcc_comparison:
9721 return tree_binary_nonzero_warnv_p (code, type,
9722 TREE_OPERAND (t, 0),
9723 TREE_OPERAND (t, 1),
9724 strict_overflow_p);
9725 case tcc_constant:
9726 case tcc_declaration:
9727 case tcc_reference:
9728 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9730 default:
9731 break;
9734 switch (code)
9736 case TRUTH_NOT_EXPR:
9737 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9738 strict_overflow_p);
9740 case TRUTH_AND_EXPR:
9741 case TRUTH_OR_EXPR:
9742 case TRUTH_XOR_EXPR:
9743 return tree_binary_nonzero_warnv_p (code, type,
9744 TREE_OPERAND (t, 0),
9745 TREE_OPERAND (t, 1),
9746 strict_overflow_p);
9748 case COND_EXPR:
9749 case CONSTRUCTOR:
9750 case OBJ_TYPE_REF:
9751 case ASSERT_EXPR:
9752 case ADDR_EXPR:
9753 case WITH_SIZE_EXPR:
9754 case SSA_NAME:
9755 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9757 case COMPOUND_EXPR:
9758 case MODIFY_EXPR:
9759 case BIND_EXPR:
9760 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9761 strict_overflow_p);
9763 case SAVE_EXPR:
9764 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9765 strict_overflow_p);
9767 case CALL_EXPR:
9769 tree fndecl = get_callee_fndecl (t);
9770 if (!fndecl) return false;
9771 if (flag_delete_null_pointer_checks && !flag_check_new
9772 && DECL_IS_OPERATOR_NEW (fndecl)
9773 && !TREE_NOTHROW (fndecl))
9774 return true;
9775 if (flag_delete_null_pointer_checks
9776 && lookup_attribute ("returns_nonnull",
9777 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9778 return true;
9779 return alloca_call_p (t);
9782 default:
9783 break;
9785 return false;
9788 /* Return true when T is an address and is known to be nonzero.
9789 Handle warnings about undefined signed overflow. */
9791 static bool
9792 tree_expr_nonzero_p (tree t)
9794 bool ret, strict_overflow_p;
9796 strict_overflow_p = false;
9797 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9798 if (strict_overflow_p)
9799 fold_overflow_warning (("assuming signed overflow does not occur when "
9800 "determining that expression is always "
9801 "non-zero"),
9802 WARN_STRICT_OVERFLOW_MISC);
9803 return ret;
9806 /* Fold a binary expression of code CODE and type TYPE with operands
9807 OP0 and OP1. LOC is the location of the resulting expression.
9808 Return the folded expression if folding is successful. Otherwise,
9809 return NULL_TREE. */
9811 tree
9812 fold_binary_loc (location_t loc,
9813 enum tree_code code, tree type, tree op0, tree op1)
9815 enum tree_code_class kind = TREE_CODE_CLASS (code);
9816 tree arg0, arg1, tem;
9817 tree t1 = NULL_TREE;
9818 bool strict_overflow_p;
9819 unsigned int prec;
9821 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9822 && TREE_CODE_LENGTH (code) == 2
9823 && op0 != NULL_TREE
9824 && op1 != NULL_TREE);
9826 arg0 = op0;
9827 arg1 = op1;
9829 /* Strip any conversions that don't change the mode. This is
9830 safe for every expression, except for a comparison expression
9831 because its signedness is derived from its operands. So, in
9832 the latter case, only strip conversions that don't change the
9833 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9834 preserved.
9836 Note that this is done as an internal manipulation within the
9837 constant folder, in order to find the simplest representation
9838 of the arguments so that their form can be studied. In any
9839 cases, the appropriate type conversions should be put back in
9840 the tree that will get out of the constant folder. */
9842 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9844 STRIP_SIGN_NOPS (arg0);
9845 STRIP_SIGN_NOPS (arg1);
9847 else
9849 STRIP_NOPS (arg0);
9850 STRIP_NOPS (arg1);
9853 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9854 constant but we can't do arithmetic on them. */
9855 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9856 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9857 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9858 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9859 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9860 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9861 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9863 if (kind == tcc_binary)
9865 /* Make sure type and arg0 have the same saturating flag. */
9866 gcc_assert (TYPE_SATURATING (type)
9867 == TYPE_SATURATING (TREE_TYPE (arg0)));
9868 tem = const_binop (code, arg0, arg1);
9870 else if (kind == tcc_comparison)
9871 tem = fold_relational_const (code, type, arg0, arg1);
9872 else
9873 tem = NULL_TREE;
9875 if (tem != NULL_TREE)
9877 if (TREE_TYPE (tem) != type)
9878 tem = fold_convert_loc (loc, type, tem);
9879 return tem;
9883 /* If this is a commutative operation, and ARG0 is a constant, move it
9884 to ARG1 to reduce the number of tests below. */
9885 if (commutative_tree_code (code)
9886 && tree_swap_operands_p (arg0, arg1, true))
9887 return fold_build2_loc (loc, code, type, op1, op0);
9889 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9890 to ARG1 to reduce the number of tests below. */
9891 if (kind == tcc_comparison
9892 && tree_swap_operands_p (arg0, arg1, true))
9893 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9895 tem = generic_simplify (loc, code, type, op0, op1);
9896 if (tem)
9897 return tem;
9899 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9901 First check for cases where an arithmetic operation is applied to a
9902 compound, conditional, or comparison operation. Push the arithmetic
9903 operation inside the compound or conditional to see if any folding
9904 can then be done. Convert comparison to conditional for this purpose.
9905 The also optimizes non-constant cases that used to be done in
9906 expand_expr.
9908 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9909 one of the operands is a comparison and the other is a comparison, a
9910 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9911 code below would make the expression more complex. Change it to a
9912 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9913 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9915 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9916 || code == EQ_EXPR || code == NE_EXPR)
9917 && TREE_CODE (type) != VECTOR_TYPE
9918 && ((truth_value_p (TREE_CODE (arg0))
9919 && (truth_value_p (TREE_CODE (arg1))
9920 || (TREE_CODE (arg1) == BIT_AND_EXPR
9921 && integer_onep (TREE_OPERAND (arg1, 1)))))
9922 || (truth_value_p (TREE_CODE (arg1))
9923 && (truth_value_p (TREE_CODE (arg0))
9924 || (TREE_CODE (arg0) == BIT_AND_EXPR
9925 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9927 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9928 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9929 : TRUTH_XOR_EXPR,
9930 boolean_type_node,
9931 fold_convert_loc (loc, boolean_type_node, arg0),
9932 fold_convert_loc (loc, boolean_type_node, arg1));
9934 if (code == EQ_EXPR)
9935 tem = invert_truthvalue_loc (loc, tem);
9937 return fold_convert_loc (loc, type, tem);
9940 if (TREE_CODE_CLASS (code) == tcc_binary
9941 || TREE_CODE_CLASS (code) == tcc_comparison)
9943 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9945 tem = fold_build2_loc (loc, code, type,
9946 fold_convert_loc (loc, TREE_TYPE (op0),
9947 TREE_OPERAND (arg0, 1)), op1);
9948 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9949 tem);
9951 if (TREE_CODE (arg1) == COMPOUND_EXPR
9952 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9954 tem = fold_build2_loc (loc, code, type, op0,
9955 fold_convert_loc (loc, TREE_TYPE (op1),
9956 TREE_OPERAND (arg1, 1)));
9957 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9958 tem);
9961 if (TREE_CODE (arg0) == COND_EXPR
9962 || TREE_CODE (arg0) == VEC_COND_EXPR
9963 || COMPARISON_CLASS_P (arg0))
9965 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9966 arg0, arg1,
9967 /*cond_first_p=*/1);
9968 if (tem != NULL_TREE)
9969 return tem;
9972 if (TREE_CODE (arg1) == COND_EXPR
9973 || TREE_CODE (arg1) == VEC_COND_EXPR
9974 || COMPARISON_CLASS_P (arg1))
9976 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9977 arg1, arg0,
9978 /*cond_first_p=*/0);
9979 if (tem != NULL_TREE)
9980 return tem;
9984 switch (code)
9986 case MEM_REF:
9987 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9988 if (TREE_CODE (arg0) == ADDR_EXPR
9989 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9991 tree iref = TREE_OPERAND (arg0, 0);
9992 return fold_build2 (MEM_REF, type,
9993 TREE_OPERAND (iref, 0),
9994 int_const_binop (PLUS_EXPR, arg1,
9995 TREE_OPERAND (iref, 1)));
9998 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9999 if (TREE_CODE (arg0) == ADDR_EXPR
10000 && handled_component_p (TREE_OPERAND (arg0, 0)))
10002 tree base;
10003 HOST_WIDE_INT coffset;
10004 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10005 &coffset);
10006 if (!base)
10007 return NULL_TREE;
10008 return fold_build2 (MEM_REF, type,
10009 build_fold_addr_expr (base),
10010 int_const_binop (PLUS_EXPR, arg1,
10011 size_int (coffset)));
10014 return NULL_TREE;
10016 case POINTER_PLUS_EXPR:
10017 /* 0 +p index -> (type)index */
10018 if (integer_zerop (arg0))
10019 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10021 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10022 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10023 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10024 return fold_convert_loc (loc, type,
10025 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10026 fold_convert_loc (loc, sizetype,
10027 arg1),
10028 fold_convert_loc (loc, sizetype,
10029 arg0)));
10031 /* (PTR +p B) +p A -> PTR +p (B + A) */
10032 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10034 tree inner;
10035 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10036 tree arg00 = TREE_OPERAND (arg0, 0);
10037 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10038 arg01, fold_convert_loc (loc, sizetype, arg1));
10039 return fold_convert_loc (loc, type,
10040 fold_build_pointer_plus_loc (loc,
10041 arg00, inner));
10044 /* PTR_CST +p CST -> CST1 */
10045 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10046 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10047 fold_convert_loc (loc, type, arg1));
10049 return NULL_TREE;
10051 case PLUS_EXPR:
10052 /* A + (-B) -> A - B */
10053 if (TREE_CODE (arg1) == NEGATE_EXPR
10054 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10055 return fold_build2_loc (loc, MINUS_EXPR, type,
10056 fold_convert_loc (loc, type, arg0),
10057 fold_convert_loc (loc, type,
10058 TREE_OPERAND (arg1, 0)));
10059 /* (-A) + B -> B - A */
10060 if (TREE_CODE (arg0) == NEGATE_EXPR
10061 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)
10062 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10063 return fold_build2_loc (loc, MINUS_EXPR, type,
10064 fold_convert_loc (loc, type, arg1),
10065 fold_convert_loc (loc, type,
10066 TREE_OPERAND (arg0, 0)));
10068 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10070 /* Convert ~A + 1 to -A. */
10071 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10072 && integer_each_onep (arg1))
10073 return fold_build1_loc (loc, NEGATE_EXPR, type,
10074 fold_convert_loc (loc, type,
10075 TREE_OPERAND (arg0, 0)));
10077 /* ~X + X is -1. */
10078 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10079 && !TYPE_OVERFLOW_TRAPS (type))
10081 tree tem = TREE_OPERAND (arg0, 0);
10083 STRIP_NOPS (tem);
10084 if (operand_equal_p (tem, arg1, 0))
10086 t1 = build_all_ones_cst (type);
10087 return omit_one_operand_loc (loc, type, t1, arg1);
10091 /* X + ~X is -1. */
10092 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10093 && !TYPE_OVERFLOW_TRAPS (type))
10095 tree tem = TREE_OPERAND (arg1, 0);
10097 STRIP_NOPS (tem);
10098 if (operand_equal_p (arg0, tem, 0))
10100 t1 = build_all_ones_cst (type);
10101 return omit_one_operand_loc (loc, type, t1, arg0);
10105 /* X + (X / CST) * -CST is X % CST. */
10106 if (TREE_CODE (arg1) == MULT_EXPR
10107 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10108 && operand_equal_p (arg0,
10109 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10111 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10112 tree cst1 = TREE_OPERAND (arg1, 1);
10113 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10114 cst1, cst0);
10115 if (sum && integer_zerop (sum))
10116 return fold_convert_loc (loc, type,
10117 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10118 TREE_TYPE (arg0), arg0,
10119 cst0));
10123 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10124 one. Make sure the type is not saturating and has the signedness of
10125 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10126 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10127 if ((TREE_CODE (arg0) == MULT_EXPR
10128 || TREE_CODE (arg1) == MULT_EXPR)
10129 && !TYPE_SATURATING (type)
10130 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10131 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10132 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10134 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10135 if (tem)
10136 return tem;
10139 if (! FLOAT_TYPE_P (type))
10141 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10142 with a constant, and the two constants have no bits in common,
10143 we should treat this as a BIT_IOR_EXPR since this may produce more
10144 simplifications. */
10145 if (TREE_CODE (arg0) == BIT_AND_EXPR
10146 && TREE_CODE (arg1) == BIT_AND_EXPR
10147 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10148 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10149 && wi::bit_and (TREE_OPERAND (arg0, 1),
10150 TREE_OPERAND (arg1, 1)) == 0)
10152 code = BIT_IOR_EXPR;
10153 goto bit_ior;
10156 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10157 (plus (plus (mult) (mult)) (foo)) so that we can
10158 take advantage of the factoring cases below. */
10159 if (TYPE_OVERFLOW_WRAPS (type)
10160 && (((TREE_CODE (arg0) == PLUS_EXPR
10161 || TREE_CODE (arg0) == MINUS_EXPR)
10162 && TREE_CODE (arg1) == MULT_EXPR)
10163 || ((TREE_CODE (arg1) == PLUS_EXPR
10164 || TREE_CODE (arg1) == MINUS_EXPR)
10165 && TREE_CODE (arg0) == MULT_EXPR)))
10167 tree parg0, parg1, parg, marg;
10168 enum tree_code pcode;
10170 if (TREE_CODE (arg1) == MULT_EXPR)
10171 parg = arg0, marg = arg1;
10172 else
10173 parg = arg1, marg = arg0;
10174 pcode = TREE_CODE (parg);
10175 parg0 = TREE_OPERAND (parg, 0);
10176 parg1 = TREE_OPERAND (parg, 1);
10177 STRIP_NOPS (parg0);
10178 STRIP_NOPS (parg1);
10180 if (TREE_CODE (parg0) == MULT_EXPR
10181 && TREE_CODE (parg1) != MULT_EXPR)
10182 return fold_build2_loc (loc, pcode, type,
10183 fold_build2_loc (loc, PLUS_EXPR, type,
10184 fold_convert_loc (loc, type,
10185 parg0),
10186 fold_convert_loc (loc, type,
10187 marg)),
10188 fold_convert_loc (loc, type, parg1));
10189 if (TREE_CODE (parg0) != MULT_EXPR
10190 && TREE_CODE (parg1) == MULT_EXPR)
10191 return
10192 fold_build2_loc (loc, PLUS_EXPR, type,
10193 fold_convert_loc (loc, type, parg0),
10194 fold_build2_loc (loc, pcode, type,
10195 fold_convert_loc (loc, type, marg),
10196 fold_convert_loc (loc, type,
10197 parg1)));
10200 else
10202 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10203 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10204 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10206 /* Likewise if the operands are reversed. */
10207 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10208 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10210 /* Convert X + -C into X - C. */
10211 if (TREE_CODE (arg1) == REAL_CST
10212 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10214 tem = fold_negate_const (arg1, type);
10215 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10216 return fold_build2_loc (loc, MINUS_EXPR, type,
10217 fold_convert_loc (loc, type, arg0),
10218 fold_convert_loc (loc, type, tem));
10221 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10222 to __complex__ ( x, y ). This is not the same for SNaNs or
10223 if signed zeros are involved. */
10224 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10225 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10226 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10228 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10229 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10230 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10231 bool arg0rz = false, arg0iz = false;
10232 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10233 || (arg0i && (arg0iz = real_zerop (arg0i))))
10235 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10236 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10237 if (arg0rz && arg1i && real_zerop (arg1i))
10239 tree rp = arg1r ? arg1r
10240 : build1 (REALPART_EXPR, rtype, arg1);
10241 tree ip = arg0i ? arg0i
10242 : build1 (IMAGPART_EXPR, rtype, arg0);
10243 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10245 else if (arg0iz && arg1r && real_zerop (arg1r))
10247 tree rp = arg0r ? arg0r
10248 : build1 (REALPART_EXPR, rtype, arg0);
10249 tree ip = arg1i ? arg1i
10250 : build1 (IMAGPART_EXPR, rtype, arg1);
10251 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10256 if (flag_unsafe_math_optimizations
10257 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10258 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10259 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10260 return tem;
10262 /* Convert x+x into x*2.0. */
10263 if (operand_equal_p (arg0, arg1, 0)
10264 && SCALAR_FLOAT_TYPE_P (type))
10265 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10266 build_real (type, dconst2));
10268 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10269 We associate floats only if the user has specified
10270 -fassociative-math. */
10271 if (flag_associative_math
10272 && TREE_CODE (arg1) == PLUS_EXPR
10273 && TREE_CODE (arg0) != MULT_EXPR)
10275 tree tree10 = TREE_OPERAND (arg1, 0);
10276 tree tree11 = TREE_OPERAND (arg1, 1);
10277 if (TREE_CODE (tree11) == MULT_EXPR
10278 && TREE_CODE (tree10) == MULT_EXPR)
10280 tree tree0;
10281 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10282 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10285 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10286 We associate floats only if the user has specified
10287 -fassociative-math. */
10288 if (flag_associative_math
10289 && TREE_CODE (arg0) == PLUS_EXPR
10290 && TREE_CODE (arg1) != MULT_EXPR)
10292 tree tree00 = TREE_OPERAND (arg0, 0);
10293 tree tree01 = TREE_OPERAND (arg0, 1);
10294 if (TREE_CODE (tree01) == MULT_EXPR
10295 && TREE_CODE (tree00) == MULT_EXPR)
10297 tree tree0;
10298 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10299 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10304 bit_rotate:
10305 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10306 is a rotate of A by C1 bits. */
10307 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10308 is a rotate of A by B bits. */
10310 enum tree_code code0, code1;
10311 tree rtype;
10312 code0 = TREE_CODE (arg0);
10313 code1 = TREE_CODE (arg1);
10314 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10315 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10316 && operand_equal_p (TREE_OPERAND (arg0, 0),
10317 TREE_OPERAND (arg1, 0), 0)
10318 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10319 TYPE_UNSIGNED (rtype))
10320 /* Only create rotates in complete modes. Other cases are not
10321 expanded properly. */
10322 && (element_precision (rtype)
10323 == element_precision (TYPE_MODE (rtype))))
10325 tree tree01, tree11;
10326 enum tree_code code01, code11;
10328 tree01 = TREE_OPERAND (arg0, 1);
10329 tree11 = TREE_OPERAND (arg1, 1);
10330 STRIP_NOPS (tree01);
10331 STRIP_NOPS (tree11);
10332 code01 = TREE_CODE (tree01);
10333 code11 = TREE_CODE (tree11);
10334 if (code01 == INTEGER_CST
10335 && code11 == INTEGER_CST
10336 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10337 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10339 tem = build2_loc (loc, LROTATE_EXPR,
10340 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10341 TREE_OPERAND (arg0, 0),
10342 code0 == LSHIFT_EXPR ? tree01 : tree11);
10343 return fold_convert_loc (loc, type, tem);
10345 else if (code11 == MINUS_EXPR)
10347 tree tree110, tree111;
10348 tree110 = TREE_OPERAND (tree11, 0);
10349 tree111 = TREE_OPERAND (tree11, 1);
10350 STRIP_NOPS (tree110);
10351 STRIP_NOPS (tree111);
10352 if (TREE_CODE (tree110) == INTEGER_CST
10353 && 0 == compare_tree_int (tree110,
10354 element_precision
10355 (TREE_TYPE (TREE_OPERAND
10356 (arg0, 0))))
10357 && operand_equal_p (tree01, tree111, 0))
10358 return
10359 fold_convert_loc (loc, type,
10360 build2 ((code0 == LSHIFT_EXPR
10361 ? LROTATE_EXPR
10362 : RROTATE_EXPR),
10363 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10364 TREE_OPERAND (arg0, 0), tree01));
10366 else if (code01 == MINUS_EXPR)
10368 tree tree010, tree011;
10369 tree010 = TREE_OPERAND (tree01, 0);
10370 tree011 = TREE_OPERAND (tree01, 1);
10371 STRIP_NOPS (tree010);
10372 STRIP_NOPS (tree011);
10373 if (TREE_CODE (tree010) == INTEGER_CST
10374 && 0 == compare_tree_int (tree010,
10375 element_precision
10376 (TREE_TYPE (TREE_OPERAND
10377 (arg0, 0))))
10378 && operand_equal_p (tree11, tree011, 0))
10379 return fold_convert_loc
10380 (loc, type,
10381 build2 ((code0 != LSHIFT_EXPR
10382 ? LROTATE_EXPR
10383 : RROTATE_EXPR),
10384 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10385 TREE_OPERAND (arg0, 0), tree11));
10390 associate:
10391 /* In most languages, can't associate operations on floats through
10392 parentheses. Rather than remember where the parentheses were, we
10393 don't associate floats at all, unless the user has specified
10394 -fassociative-math.
10395 And, we need to make sure type is not saturating. */
10397 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10398 && !TYPE_SATURATING (type))
10400 tree var0, con0, lit0, minus_lit0;
10401 tree var1, con1, lit1, minus_lit1;
10402 tree atype = type;
10403 bool ok = true;
10405 /* Split both trees into variables, constants, and literals. Then
10406 associate each group together, the constants with literals,
10407 then the result with variables. This increases the chances of
10408 literals being recombined later and of generating relocatable
10409 expressions for the sum of a constant and literal. */
10410 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10411 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10412 code == MINUS_EXPR);
10414 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10415 if (code == MINUS_EXPR)
10416 code = PLUS_EXPR;
10418 /* With undefined overflow prefer doing association in a type
10419 which wraps on overflow, if that is one of the operand types. */
10420 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10421 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10423 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10424 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10425 atype = TREE_TYPE (arg0);
10426 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10427 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10428 atype = TREE_TYPE (arg1);
10429 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10432 /* With undefined overflow we can only associate constants with one
10433 variable, and constants whose association doesn't overflow. */
10434 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10435 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10437 if (var0 && var1)
10439 tree tmp0 = var0;
10440 tree tmp1 = var1;
10442 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10443 tmp0 = TREE_OPERAND (tmp0, 0);
10444 if (CONVERT_EXPR_P (tmp0)
10445 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10446 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10447 <= TYPE_PRECISION (atype)))
10448 tmp0 = TREE_OPERAND (tmp0, 0);
10449 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10450 tmp1 = TREE_OPERAND (tmp1, 0);
10451 if (CONVERT_EXPR_P (tmp1)
10452 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10453 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10454 <= TYPE_PRECISION (atype)))
10455 tmp1 = TREE_OPERAND (tmp1, 0);
10456 /* The only case we can still associate with two variables
10457 is if they are the same, modulo negation and bit-pattern
10458 preserving conversions. */
10459 if (!operand_equal_p (tmp0, tmp1, 0))
10460 ok = false;
10464 /* Only do something if we found more than two objects. Otherwise,
10465 nothing has changed and we risk infinite recursion. */
10466 if (ok
10467 && (2 < ((var0 != 0) + (var1 != 0)
10468 + (con0 != 0) + (con1 != 0)
10469 + (lit0 != 0) + (lit1 != 0)
10470 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10472 bool any_overflows = false;
10473 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10474 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10475 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10476 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10477 var0 = associate_trees (loc, var0, var1, code, atype);
10478 con0 = associate_trees (loc, con0, con1, code, atype);
10479 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10480 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10481 code, atype);
10483 /* Preserve the MINUS_EXPR if the negative part of the literal is
10484 greater than the positive part. Otherwise, the multiplicative
10485 folding code (i.e extract_muldiv) may be fooled in case
10486 unsigned constants are subtracted, like in the following
10487 example: ((X*2 + 4) - 8U)/2. */
10488 if (minus_lit0 && lit0)
10490 if (TREE_CODE (lit0) == INTEGER_CST
10491 && TREE_CODE (minus_lit0) == INTEGER_CST
10492 && tree_int_cst_lt (lit0, minus_lit0))
10494 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10495 MINUS_EXPR, atype);
10496 lit0 = 0;
10498 else
10500 lit0 = associate_trees (loc, lit0, minus_lit0,
10501 MINUS_EXPR, atype);
10502 minus_lit0 = 0;
10506 /* Don't introduce overflows through reassociation. */
10507 if (!any_overflows
10508 && ((lit0 && TREE_OVERFLOW (lit0))
10509 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10510 return NULL_TREE;
10512 if (minus_lit0)
10514 if (con0 == 0)
10515 return
10516 fold_convert_loc (loc, type,
10517 associate_trees (loc, var0, minus_lit0,
10518 MINUS_EXPR, atype));
10519 else
10521 con0 = associate_trees (loc, con0, minus_lit0,
10522 MINUS_EXPR, atype);
10523 return
10524 fold_convert_loc (loc, type,
10525 associate_trees (loc, var0, con0,
10526 PLUS_EXPR, atype));
10530 con0 = associate_trees (loc, con0, lit0, code, atype);
10531 return
10532 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10533 code, atype));
10537 return NULL_TREE;
10539 case MINUS_EXPR:
10540 /* Pointer simplifications for subtraction, simple reassociations. */
10541 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10543 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10544 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10545 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10547 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10548 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10549 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10550 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10551 return fold_build2_loc (loc, PLUS_EXPR, type,
10552 fold_build2_loc (loc, MINUS_EXPR, type,
10553 arg00, arg10),
10554 fold_build2_loc (loc, MINUS_EXPR, type,
10555 arg01, arg11));
10557 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10558 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10560 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10561 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10562 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10563 fold_convert_loc (loc, type, arg1));
10564 if (tmp)
10565 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10567 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10568 simplifies. */
10569 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10571 tree arg10 = fold_convert_loc (loc, type,
10572 TREE_OPERAND (arg1, 0));
10573 tree arg11 = fold_convert_loc (loc, type,
10574 TREE_OPERAND (arg1, 1));
10575 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10576 fold_convert_loc (loc, type, arg0),
10577 arg10);
10578 if (tmp)
10579 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10582 /* A - (-B) -> A + B */
10583 if (TREE_CODE (arg1) == NEGATE_EXPR)
10584 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10585 fold_convert_loc (loc, type,
10586 TREE_OPERAND (arg1, 0)));
10587 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10588 if (TREE_CODE (arg0) == NEGATE_EXPR
10589 && negate_expr_p (arg1)
10590 && reorder_operands_p (arg0, arg1))
10591 return fold_build2_loc (loc, MINUS_EXPR, type,
10592 fold_convert_loc (loc, type,
10593 negate_expr (arg1)),
10594 fold_convert_loc (loc, type,
10595 TREE_OPERAND (arg0, 0)));
10596 /* Convert -A - 1 to ~A. */
10597 if (TREE_CODE (arg0) == NEGATE_EXPR
10598 && integer_each_onep (arg1)
10599 && !TYPE_OVERFLOW_TRAPS (type))
10600 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10601 fold_convert_loc (loc, type,
10602 TREE_OPERAND (arg0, 0)));
10604 /* Convert -1 - A to ~A. */
10605 if (TREE_CODE (type) != COMPLEX_TYPE
10606 && integer_all_onesp (arg0))
10607 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10610 /* X - (X / Y) * Y is X % Y. */
10611 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10612 && TREE_CODE (arg1) == MULT_EXPR
10613 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10614 && operand_equal_p (arg0,
10615 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10616 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10617 TREE_OPERAND (arg1, 1), 0))
10618 return
10619 fold_convert_loc (loc, type,
10620 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10621 arg0, TREE_OPERAND (arg1, 1)));
10623 if (! FLOAT_TYPE_P (type))
10625 if (integer_zerop (arg0))
10626 return negate_expr (fold_convert_loc (loc, type, arg1));
10628 /* Fold A - (A & B) into ~B & A. */
10629 if (!TREE_SIDE_EFFECTS (arg0)
10630 && TREE_CODE (arg1) == BIT_AND_EXPR)
10632 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10634 tree arg10 = fold_convert_loc (loc, type,
10635 TREE_OPERAND (arg1, 0));
10636 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10637 fold_build1_loc (loc, BIT_NOT_EXPR,
10638 type, arg10),
10639 fold_convert_loc (loc, type, arg0));
10641 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10643 tree arg11 = fold_convert_loc (loc,
10644 type, TREE_OPERAND (arg1, 1));
10645 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10646 fold_build1_loc (loc, BIT_NOT_EXPR,
10647 type, arg11),
10648 fold_convert_loc (loc, type, arg0));
10652 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10653 any power of 2 minus 1. */
10654 if (TREE_CODE (arg0) == BIT_AND_EXPR
10655 && TREE_CODE (arg1) == BIT_AND_EXPR
10656 && operand_equal_p (TREE_OPERAND (arg0, 0),
10657 TREE_OPERAND (arg1, 0), 0))
10659 tree mask0 = TREE_OPERAND (arg0, 1);
10660 tree mask1 = TREE_OPERAND (arg1, 1);
10661 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10663 if (operand_equal_p (tem, mask1, 0))
10665 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10666 TREE_OPERAND (arg0, 0), mask1);
10667 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10672 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10673 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10674 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10676 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10677 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10678 (-ARG1 + ARG0) reduces to -ARG1. */
10679 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10680 return negate_expr (fold_convert_loc (loc, type, arg1));
10682 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10683 __complex__ ( x, -y ). This is not the same for SNaNs or if
10684 signed zeros are involved. */
10685 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10686 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10687 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10689 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10690 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10691 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10692 bool arg0rz = false, arg0iz = false;
10693 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10694 || (arg0i && (arg0iz = real_zerop (arg0i))))
10696 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10697 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10698 if (arg0rz && arg1i && real_zerop (arg1i))
10700 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10701 arg1r ? arg1r
10702 : build1 (REALPART_EXPR, rtype, arg1));
10703 tree ip = arg0i ? arg0i
10704 : build1 (IMAGPART_EXPR, rtype, arg0);
10705 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10707 else if (arg0iz && arg1r && real_zerop (arg1r))
10709 tree rp = arg0r ? arg0r
10710 : build1 (REALPART_EXPR, rtype, arg0);
10711 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10712 arg1i ? arg1i
10713 : build1 (IMAGPART_EXPR, rtype, arg1));
10714 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10719 /* A - B -> A + (-B) if B is easily negatable. */
10720 if (negate_expr_p (arg1)
10721 && ((FLOAT_TYPE_P (type)
10722 /* Avoid this transformation if B is a positive REAL_CST. */
10723 && (TREE_CODE (arg1) != REAL_CST
10724 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10725 || INTEGRAL_TYPE_P (type)))
10726 return fold_build2_loc (loc, PLUS_EXPR, type,
10727 fold_convert_loc (loc, type, arg0),
10728 fold_convert_loc (loc, type,
10729 negate_expr (arg1)));
10731 /* Try folding difference of addresses. */
10733 HOST_WIDE_INT diff;
10735 if ((TREE_CODE (arg0) == ADDR_EXPR
10736 || TREE_CODE (arg1) == ADDR_EXPR)
10737 && ptr_difference_const (arg0, arg1, &diff))
10738 return build_int_cst_type (type, diff);
10741 /* Fold &a[i] - &a[j] to i-j. */
10742 if (TREE_CODE (arg0) == ADDR_EXPR
10743 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10744 && TREE_CODE (arg1) == ADDR_EXPR
10745 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10747 tree tem = fold_addr_of_array_ref_difference (loc, type,
10748 TREE_OPERAND (arg0, 0),
10749 TREE_OPERAND (arg1, 0));
10750 if (tem)
10751 return tem;
10754 if (FLOAT_TYPE_P (type)
10755 && flag_unsafe_math_optimizations
10756 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10757 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10758 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10759 return tem;
10761 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10762 one. Make sure the type is not saturating and has the signedness of
10763 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10764 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10765 if ((TREE_CODE (arg0) == MULT_EXPR
10766 || TREE_CODE (arg1) == MULT_EXPR)
10767 && !TYPE_SATURATING (type)
10768 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10769 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10770 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10772 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10773 if (tem)
10774 return tem;
10777 goto associate;
10779 case MULT_EXPR:
10780 /* (-A) * (-B) -> A * B */
10781 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10782 return fold_build2_loc (loc, MULT_EXPR, type,
10783 fold_convert_loc (loc, type,
10784 TREE_OPERAND (arg0, 0)),
10785 fold_convert_loc (loc, type,
10786 negate_expr (arg1)));
10787 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10788 return fold_build2_loc (loc, MULT_EXPR, type,
10789 fold_convert_loc (loc, type,
10790 negate_expr (arg0)),
10791 fold_convert_loc (loc, type,
10792 TREE_OPERAND (arg1, 0)));
10794 if (! FLOAT_TYPE_P (type))
10796 /* Transform x * -1 into -x. Make sure to do the negation
10797 on the original operand with conversions not stripped
10798 because we can only strip non-sign-changing conversions. */
10799 if (integer_minus_onep (arg1))
10800 return fold_convert_loc (loc, type, negate_expr (op0));
10801 /* Transform x * -C into -x * C if x is easily negatable. */
10802 if (TREE_CODE (arg1) == INTEGER_CST
10803 && tree_int_cst_sgn (arg1) == -1
10804 && negate_expr_p (arg0)
10805 && (tem = negate_expr (arg1)) != arg1
10806 && !TREE_OVERFLOW (tem))
10807 return fold_build2_loc (loc, MULT_EXPR, type,
10808 fold_convert_loc (loc, type,
10809 negate_expr (arg0)),
10810 tem);
10812 /* (a * (1 << b)) is (a << b) */
10813 if (TREE_CODE (arg1) == LSHIFT_EXPR
10814 && integer_onep (TREE_OPERAND (arg1, 0)))
10815 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10816 TREE_OPERAND (arg1, 1));
10817 if (TREE_CODE (arg0) == LSHIFT_EXPR
10818 && integer_onep (TREE_OPERAND (arg0, 0)))
10819 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10820 TREE_OPERAND (arg0, 1));
10822 /* (A + A) * C -> A * 2 * C */
10823 if (TREE_CODE (arg0) == PLUS_EXPR
10824 && TREE_CODE (arg1) == INTEGER_CST
10825 && operand_equal_p (TREE_OPERAND (arg0, 0),
10826 TREE_OPERAND (arg0, 1), 0))
10827 return fold_build2_loc (loc, MULT_EXPR, type,
10828 omit_one_operand_loc (loc, type,
10829 TREE_OPERAND (arg0, 0),
10830 TREE_OPERAND (arg0, 1)),
10831 fold_build2_loc (loc, MULT_EXPR, type,
10832 build_int_cst (type, 2) , arg1));
10834 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10835 sign-changing only. */
10836 if (TREE_CODE (arg1) == INTEGER_CST
10837 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10838 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10839 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10841 strict_overflow_p = false;
10842 if (TREE_CODE (arg1) == INTEGER_CST
10843 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10844 &strict_overflow_p)))
10846 if (strict_overflow_p)
10847 fold_overflow_warning (("assuming signed overflow does not "
10848 "occur when simplifying "
10849 "multiplication"),
10850 WARN_STRICT_OVERFLOW_MISC);
10851 return fold_convert_loc (loc, type, tem);
10854 /* Optimize z * conj(z) for integer complex numbers. */
10855 if (TREE_CODE (arg0) == CONJ_EXPR
10856 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10857 return fold_mult_zconjz (loc, type, arg1);
10858 if (TREE_CODE (arg1) == CONJ_EXPR
10859 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10860 return fold_mult_zconjz (loc, type, arg0);
10862 else
10864 /* Maybe fold x * 0 to 0. The expressions aren't the same
10865 when x is NaN, since x * 0 is also NaN. Nor are they the
10866 same in modes with signed zeros, since multiplying a
10867 negative value by 0 gives -0, not +0. */
10868 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10869 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10870 && real_zerop (arg1))
10871 return omit_one_operand_loc (loc, type, arg1, arg0);
10872 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10873 Likewise for complex arithmetic with signed zeros. */
10874 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10875 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10876 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10877 && real_onep (arg1))
10878 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10880 /* Transform x * -1.0 into -x. */
10881 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10882 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10883 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10884 && real_minus_onep (arg1))
10885 return fold_convert_loc (loc, type, negate_expr (arg0));
10887 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10888 the result for floating point types due to rounding so it is applied
10889 only if -fassociative-math was specify. */
10890 if (flag_associative_math
10891 && TREE_CODE (arg0) == RDIV_EXPR
10892 && TREE_CODE (arg1) == REAL_CST
10893 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10895 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10896 arg1);
10897 if (tem)
10898 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10899 TREE_OPERAND (arg0, 1));
10902 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10903 if (operand_equal_p (arg0, arg1, 0))
10905 tree tem = fold_strip_sign_ops (arg0);
10906 if (tem != NULL_TREE)
10908 tem = fold_convert_loc (loc, type, tem);
10909 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10913 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10914 This is not the same for NaNs or if signed zeros are
10915 involved. */
10916 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10917 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10918 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10919 && TREE_CODE (arg1) == COMPLEX_CST
10920 && real_zerop (TREE_REALPART (arg1)))
10922 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10923 if (real_onep (TREE_IMAGPART (arg1)))
10924 return
10925 fold_build2_loc (loc, COMPLEX_EXPR, type,
10926 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10927 rtype, arg0)),
10928 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10929 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10930 return
10931 fold_build2_loc (loc, COMPLEX_EXPR, type,
10932 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10933 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10934 rtype, arg0)));
10937 /* Optimize z * conj(z) for floating point complex numbers.
10938 Guarded by flag_unsafe_math_optimizations as non-finite
10939 imaginary components don't produce scalar results. */
10940 if (flag_unsafe_math_optimizations
10941 && TREE_CODE (arg0) == CONJ_EXPR
10942 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10943 return fold_mult_zconjz (loc, type, arg1);
10944 if (flag_unsafe_math_optimizations
10945 && TREE_CODE (arg1) == CONJ_EXPR
10946 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10947 return fold_mult_zconjz (loc, type, arg0);
10949 if (flag_unsafe_math_optimizations)
10951 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10952 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10954 /* Optimizations of root(...)*root(...). */
10955 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10957 tree rootfn, arg;
10958 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10959 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10961 /* Optimize sqrt(x)*sqrt(x) as x. */
10962 if (BUILTIN_SQRT_P (fcode0)
10963 && operand_equal_p (arg00, arg10, 0)
10964 && ! HONOR_SNANS (TYPE_MODE (type)))
10965 return arg00;
10967 /* Optimize root(x)*root(y) as root(x*y). */
10968 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10969 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10970 return build_call_expr_loc (loc, rootfn, 1, arg);
10973 /* Optimize expN(x)*expN(y) as expN(x+y). */
10974 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10976 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10977 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10978 CALL_EXPR_ARG (arg0, 0),
10979 CALL_EXPR_ARG (arg1, 0));
10980 return build_call_expr_loc (loc, expfn, 1, arg);
10983 /* Optimizations of pow(...)*pow(...). */
10984 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10985 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10986 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10988 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10989 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10990 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10991 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10993 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10994 if (operand_equal_p (arg01, arg11, 0))
10996 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10997 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10998 arg00, arg10);
10999 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11002 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11003 if (operand_equal_p (arg00, arg10, 0))
11005 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11006 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11007 arg01, arg11);
11008 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11012 /* Optimize tan(x)*cos(x) as sin(x). */
11013 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11014 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11015 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11016 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11017 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11018 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11019 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11020 CALL_EXPR_ARG (arg1, 0), 0))
11022 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11024 if (sinfn != NULL_TREE)
11025 return build_call_expr_loc (loc, sinfn, 1,
11026 CALL_EXPR_ARG (arg0, 0));
11029 /* Optimize x*pow(x,c) as pow(x,c+1). */
11030 if (fcode1 == BUILT_IN_POW
11031 || fcode1 == BUILT_IN_POWF
11032 || fcode1 == BUILT_IN_POWL)
11034 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11035 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11036 if (TREE_CODE (arg11) == REAL_CST
11037 && !TREE_OVERFLOW (arg11)
11038 && operand_equal_p (arg0, arg10, 0))
11040 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11041 REAL_VALUE_TYPE c;
11042 tree arg;
11044 c = TREE_REAL_CST (arg11);
11045 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11046 arg = build_real (type, c);
11047 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11051 /* Optimize pow(x,c)*x as pow(x,c+1). */
11052 if (fcode0 == BUILT_IN_POW
11053 || fcode0 == BUILT_IN_POWF
11054 || fcode0 == BUILT_IN_POWL)
11056 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11057 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11058 if (TREE_CODE (arg01) == REAL_CST
11059 && !TREE_OVERFLOW (arg01)
11060 && operand_equal_p (arg1, arg00, 0))
11062 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11063 REAL_VALUE_TYPE c;
11064 tree arg;
11066 c = TREE_REAL_CST (arg01);
11067 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11068 arg = build_real (type, c);
11069 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11073 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11074 if (!in_gimple_form
11075 && optimize
11076 && operand_equal_p (arg0, arg1, 0))
11078 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11080 if (powfn)
11082 tree arg = build_real (type, dconst2);
11083 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11088 goto associate;
11090 case BIT_IOR_EXPR:
11091 bit_ior:
11092 if (operand_equal_p (arg0, arg1, 0))
11093 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11095 /* ~X | X is -1. */
11096 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11097 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11099 t1 = build_zero_cst (type);
11100 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11101 return omit_one_operand_loc (loc, type, t1, arg1);
11104 /* X | ~X is -1. */
11105 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11106 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11108 t1 = build_zero_cst (type);
11109 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11110 return omit_one_operand_loc (loc, type, t1, arg0);
11113 /* Canonicalize (X & C1) | C2. */
11114 if (TREE_CODE (arg0) == BIT_AND_EXPR
11115 && TREE_CODE (arg1) == INTEGER_CST
11116 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11118 int width = TYPE_PRECISION (type), w;
11119 wide_int c1 = TREE_OPERAND (arg0, 1);
11120 wide_int c2 = arg1;
11122 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11123 if ((c1 & c2) == c1)
11124 return omit_one_operand_loc (loc, type, arg1,
11125 TREE_OPERAND (arg0, 0));
11127 wide_int msk = wi::mask (width, false,
11128 TYPE_PRECISION (TREE_TYPE (arg1)));
11130 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11131 if (msk.and_not (c1 | c2) == 0)
11132 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11133 TREE_OPERAND (arg0, 0), arg1);
11135 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11136 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11137 mode which allows further optimizations. */
11138 c1 &= msk;
11139 c2 &= msk;
11140 wide_int c3 = c1.and_not (c2);
11141 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11143 wide_int mask = wi::mask (w, false,
11144 TYPE_PRECISION (type));
11145 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11147 c3 = mask;
11148 break;
11152 if (c3 != c1)
11153 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11154 fold_build2_loc (loc, BIT_AND_EXPR, type,
11155 TREE_OPERAND (arg0, 0),
11156 wide_int_to_tree (type,
11157 c3)),
11158 arg1);
11161 /* (X & Y) | Y is (X, Y). */
11162 if (TREE_CODE (arg0) == BIT_AND_EXPR
11163 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11164 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11165 /* (X & Y) | X is (Y, X). */
11166 if (TREE_CODE (arg0) == BIT_AND_EXPR
11167 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11168 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11169 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11170 /* X | (X & Y) is (Y, X). */
11171 if (TREE_CODE (arg1) == BIT_AND_EXPR
11172 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11173 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11174 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11175 /* X | (Y & X) is (Y, X). */
11176 if (TREE_CODE (arg1) == BIT_AND_EXPR
11177 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11178 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11179 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11181 /* (X & ~Y) | (~X & Y) is X ^ Y */
11182 if (TREE_CODE (arg0) == BIT_AND_EXPR
11183 && TREE_CODE (arg1) == BIT_AND_EXPR)
11185 tree a0, a1, l0, l1, n0, n1;
11187 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11188 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11190 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11191 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11193 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11194 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11196 if ((operand_equal_p (n0, a0, 0)
11197 && operand_equal_p (n1, a1, 0))
11198 || (operand_equal_p (n0, a1, 0)
11199 && operand_equal_p (n1, a0, 0)))
11200 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11203 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11204 if (t1 != NULL_TREE)
11205 return t1;
11207 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11209 This results in more efficient code for machines without a NAND
11210 instruction. Combine will canonicalize to the first form
11211 which will allow use of NAND instructions provided by the
11212 backend if they exist. */
11213 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11214 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11216 return
11217 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11218 build2 (BIT_AND_EXPR, type,
11219 fold_convert_loc (loc, type,
11220 TREE_OPERAND (arg0, 0)),
11221 fold_convert_loc (loc, type,
11222 TREE_OPERAND (arg1, 0))));
11225 /* See if this can be simplified into a rotate first. If that
11226 is unsuccessful continue in the association code. */
11227 goto bit_rotate;
11229 case BIT_XOR_EXPR:
11230 if (integer_all_onesp (arg1))
11231 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11233 /* ~X ^ X is -1. */
11234 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11235 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11237 t1 = build_zero_cst (type);
11238 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11239 return omit_one_operand_loc (loc, type, t1, arg1);
11242 /* X ^ ~X is -1. */
11243 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11244 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11246 t1 = build_zero_cst (type);
11247 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11248 return omit_one_operand_loc (loc, type, t1, arg0);
11251 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11252 with a constant, and the two constants have no bits in common,
11253 we should treat this as a BIT_IOR_EXPR since this may produce more
11254 simplifications. */
11255 if (TREE_CODE (arg0) == BIT_AND_EXPR
11256 && TREE_CODE (arg1) == BIT_AND_EXPR
11257 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11258 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11259 && wi::bit_and (TREE_OPERAND (arg0, 1),
11260 TREE_OPERAND (arg1, 1)) == 0)
11262 code = BIT_IOR_EXPR;
11263 goto bit_ior;
11266 /* (X | Y) ^ X -> Y & ~ X*/
11267 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11268 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11270 tree t2 = TREE_OPERAND (arg0, 1);
11271 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11272 arg1);
11273 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11274 fold_convert_loc (loc, type, t2),
11275 fold_convert_loc (loc, type, t1));
11276 return t1;
11279 /* (Y | X) ^ X -> Y & ~ X*/
11280 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11281 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11283 tree t2 = TREE_OPERAND (arg0, 0);
11284 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11285 arg1);
11286 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11287 fold_convert_loc (loc, type, t2),
11288 fold_convert_loc (loc, type, t1));
11289 return t1;
11292 /* X ^ (X | Y) -> Y & ~ X*/
11293 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11294 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11296 tree t2 = TREE_OPERAND (arg1, 1);
11297 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11298 arg0);
11299 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11300 fold_convert_loc (loc, type, t2),
11301 fold_convert_loc (loc, type, t1));
11302 return t1;
11305 /* X ^ (Y | X) -> Y & ~ X*/
11306 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11307 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11309 tree t2 = TREE_OPERAND (arg1, 0);
11310 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11311 arg0);
11312 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11313 fold_convert_loc (loc, type, t2),
11314 fold_convert_loc (loc, type, t1));
11315 return t1;
11318 /* Convert ~X ^ ~Y to X ^ Y. */
11319 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11320 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11321 return fold_build2_loc (loc, code, type,
11322 fold_convert_loc (loc, type,
11323 TREE_OPERAND (arg0, 0)),
11324 fold_convert_loc (loc, type,
11325 TREE_OPERAND (arg1, 0)));
11327 /* Convert ~X ^ C to X ^ ~C. */
11328 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11329 && TREE_CODE (arg1) == INTEGER_CST)
11330 return fold_build2_loc (loc, code, type,
11331 fold_convert_loc (loc, type,
11332 TREE_OPERAND (arg0, 0)),
11333 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11335 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11336 if (TREE_CODE (arg0) == BIT_AND_EXPR
11337 && INTEGRAL_TYPE_P (type)
11338 && integer_onep (TREE_OPERAND (arg0, 1))
11339 && integer_onep (arg1))
11340 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11341 build_zero_cst (TREE_TYPE (arg0)));
11343 /* Fold (X & Y) ^ Y as ~X & Y. */
11344 if (TREE_CODE (arg0) == BIT_AND_EXPR
11345 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11347 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11348 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11349 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11350 fold_convert_loc (loc, type, arg1));
11352 /* Fold (X & Y) ^ X as ~Y & X. */
11353 if (TREE_CODE (arg0) == BIT_AND_EXPR
11354 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11355 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11357 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11358 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11359 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11360 fold_convert_loc (loc, type, arg1));
11362 /* Fold X ^ (X & Y) as X & ~Y. */
11363 if (TREE_CODE (arg1) == BIT_AND_EXPR
11364 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11366 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11367 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11368 fold_convert_loc (loc, type, arg0),
11369 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11371 /* Fold X ^ (Y & X) as ~Y & X. */
11372 if (TREE_CODE (arg1) == BIT_AND_EXPR
11373 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11374 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11376 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11377 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11378 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11379 fold_convert_loc (loc, type, arg0));
11382 /* See if this can be simplified into a rotate first. If that
11383 is unsuccessful continue in the association code. */
11384 goto bit_rotate;
11386 case BIT_AND_EXPR:
11387 if (integer_all_onesp (arg1))
11388 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11389 if (operand_equal_p (arg0, arg1, 0))
11390 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11392 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11393 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11394 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11395 || (TREE_CODE (arg0) == EQ_EXPR
11396 && integer_zerop (TREE_OPERAND (arg0, 1))))
11397 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11398 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11400 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11401 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11402 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11403 || (TREE_CODE (arg1) == EQ_EXPR
11404 && integer_zerop (TREE_OPERAND (arg1, 1))))
11405 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11406 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11408 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11409 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11410 && TREE_CODE (arg1) == INTEGER_CST
11411 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11413 tree tmp1 = fold_convert_loc (loc, type, arg1);
11414 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11415 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11416 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11417 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11418 return
11419 fold_convert_loc (loc, type,
11420 fold_build2_loc (loc, BIT_IOR_EXPR,
11421 type, tmp2, tmp3));
11424 /* (X | Y) & Y is (X, Y). */
11425 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11426 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11427 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11428 /* (X | Y) & X is (Y, X). */
11429 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11430 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11431 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11432 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11433 /* X & (X | Y) is (Y, X). */
11434 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11435 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11436 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11437 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11438 /* X & (Y | X) is (Y, X). */
11439 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11440 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11441 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11442 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11444 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11445 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11446 && INTEGRAL_TYPE_P (type)
11447 && integer_onep (TREE_OPERAND (arg0, 1))
11448 && integer_onep (arg1))
11450 tree tem2;
11451 tem = TREE_OPERAND (arg0, 0);
11452 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11453 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11454 tem, tem2);
11455 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11456 build_zero_cst (TREE_TYPE (tem)));
11458 /* Fold ~X & 1 as (X & 1) == 0. */
11459 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11460 && INTEGRAL_TYPE_P (type)
11461 && integer_onep (arg1))
11463 tree tem2;
11464 tem = TREE_OPERAND (arg0, 0);
11465 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11466 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11467 tem, tem2);
11468 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11469 build_zero_cst (TREE_TYPE (tem)));
11471 /* Fold !X & 1 as X == 0. */
11472 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11473 && integer_onep (arg1))
11475 tem = TREE_OPERAND (arg0, 0);
11476 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11477 build_zero_cst (TREE_TYPE (tem)));
11480 /* Fold (X ^ Y) & Y as ~X & Y. */
11481 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11482 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11484 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11485 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11486 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11487 fold_convert_loc (loc, type, arg1));
11489 /* Fold (X ^ Y) & X as ~Y & X. */
11490 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11491 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11492 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11494 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11495 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11496 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11497 fold_convert_loc (loc, type, arg1));
11499 /* Fold X & (X ^ Y) as X & ~Y. */
11500 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11501 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11503 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11504 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11505 fold_convert_loc (loc, type, arg0),
11506 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11508 /* Fold X & (Y ^ X) as ~Y & X. */
11509 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11510 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11511 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11513 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11514 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11515 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11516 fold_convert_loc (loc, type, arg0));
11519 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11520 multiple of 1 << CST. */
11521 if (TREE_CODE (arg1) == INTEGER_CST)
11523 wide_int cst1 = arg1;
11524 wide_int ncst1 = -cst1;
11525 if ((cst1 & ncst1) == ncst1
11526 && multiple_of_p (type, arg0,
11527 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11528 return fold_convert_loc (loc, type, arg0);
11531 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11532 bits from CST2. */
11533 if (TREE_CODE (arg1) == INTEGER_CST
11534 && TREE_CODE (arg0) == MULT_EXPR
11535 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11537 wide_int warg1 = arg1;
11538 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11540 if (masked == 0)
11541 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11542 arg0, arg1);
11543 else if (masked != warg1)
11545 /* Avoid the transform if arg1 is a mask of some
11546 mode which allows further optimizations. */
11547 int pop = wi::popcount (warg1);
11548 if (!(pop >= BITS_PER_UNIT
11549 && exact_log2 (pop) != -1
11550 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11551 return fold_build2_loc (loc, code, type, op0,
11552 wide_int_to_tree (type, masked));
11556 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11557 ((A & N) + B) & M -> (A + B) & M
11558 Similarly if (N & M) == 0,
11559 ((A | N) + B) & M -> (A + B) & M
11560 and for - instead of + (or unary - instead of +)
11561 and/or ^ instead of |.
11562 If B is constant and (B & M) == 0, fold into A & M. */
11563 if (TREE_CODE (arg1) == INTEGER_CST)
11565 wide_int cst1 = arg1;
11566 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11567 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11568 && (TREE_CODE (arg0) == PLUS_EXPR
11569 || TREE_CODE (arg0) == MINUS_EXPR
11570 || TREE_CODE (arg0) == NEGATE_EXPR)
11571 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11572 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11574 tree pmop[2];
11575 int which = 0;
11576 wide_int cst0;
11578 /* Now we know that arg0 is (C + D) or (C - D) or
11579 -C and arg1 (M) is == (1LL << cst) - 1.
11580 Store C into PMOP[0] and D into PMOP[1]. */
11581 pmop[0] = TREE_OPERAND (arg0, 0);
11582 pmop[1] = NULL;
11583 if (TREE_CODE (arg0) != NEGATE_EXPR)
11585 pmop[1] = TREE_OPERAND (arg0, 1);
11586 which = 1;
11589 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11590 which = -1;
11592 for (; which >= 0; which--)
11593 switch (TREE_CODE (pmop[which]))
11595 case BIT_AND_EXPR:
11596 case BIT_IOR_EXPR:
11597 case BIT_XOR_EXPR:
11598 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11599 != INTEGER_CST)
11600 break;
11601 cst0 = TREE_OPERAND (pmop[which], 1);
11602 cst0 &= cst1;
11603 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11605 if (cst0 != cst1)
11606 break;
11608 else if (cst0 != 0)
11609 break;
11610 /* If C or D is of the form (A & N) where
11611 (N & M) == M, or of the form (A | N) or
11612 (A ^ N) where (N & M) == 0, replace it with A. */
11613 pmop[which] = TREE_OPERAND (pmop[which], 0);
11614 break;
11615 case INTEGER_CST:
11616 /* If C or D is a N where (N & M) == 0, it can be
11617 omitted (assumed 0). */
11618 if ((TREE_CODE (arg0) == PLUS_EXPR
11619 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11620 && (cst1 & pmop[which]) == 0)
11621 pmop[which] = NULL;
11622 break;
11623 default:
11624 break;
11627 /* Only build anything new if we optimized one or both arguments
11628 above. */
11629 if (pmop[0] != TREE_OPERAND (arg0, 0)
11630 || (TREE_CODE (arg0) != NEGATE_EXPR
11631 && pmop[1] != TREE_OPERAND (arg0, 1)))
11633 tree utype = TREE_TYPE (arg0);
11634 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11636 /* Perform the operations in a type that has defined
11637 overflow behavior. */
11638 utype = unsigned_type_for (TREE_TYPE (arg0));
11639 if (pmop[0] != NULL)
11640 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11641 if (pmop[1] != NULL)
11642 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11645 if (TREE_CODE (arg0) == NEGATE_EXPR)
11646 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11647 else if (TREE_CODE (arg0) == PLUS_EXPR)
11649 if (pmop[0] != NULL && pmop[1] != NULL)
11650 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11651 pmop[0], pmop[1]);
11652 else if (pmop[0] != NULL)
11653 tem = pmop[0];
11654 else if (pmop[1] != NULL)
11655 tem = pmop[1];
11656 else
11657 return build_int_cst (type, 0);
11659 else if (pmop[0] == NULL)
11660 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11661 else
11662 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11663 pmop[0], pmop[1]);
11664 /* TEM is now the new binary +, - or unary - replacement. */
11665 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11666 fold_convert_loc (loc, utype, arg1));
11667 return fold_convert_loc (loc, type, tem);
11672 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11673 if (t1 != NULL_TREE)
11674 return t1;
11675 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11676 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11677 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11679 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11681 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11682 if (mask == -1)
11683 return
11684 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11687 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11689 This results in more efficient code for machines without a NOR
11690 instruction. Combine will canonicalize to the first form
11691 which will allow use of NOR instructions provided by the
11692 backend if they exist. */
11693 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11694 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11696 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11697 build2 (BIT_IOR_EXPR, type,
11698 fold_convert_loc (loc, type,
11699 TREE_OPERAND (arg0, 0)),
11700 fold_convert_loc (loc, type,
11701 TREE_OPERAND (arg1, 0))));
11704 /* If arg0 is derived from the address of an object or function, we may
11705 be able to fold this expression using the object or function's
11706 alignment. */
11707 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11709 unsigned HOST_WIDE_INT modulus, residue;
11710 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11712 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11713 integer_onep (arg1));
11715 /* This works because modulus is a power of 2. If this weren't the
11716 case, we'd have to replace it by its greatest power-of-2
11717 divisor: modulus & -modulus. */
11718 if (low < modulus)
11719 return build_int_cst (type, residue & low);
11722 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11723 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11724 if the new mask might be further optimized. */
11725 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11726 || TREE_CODE (arg0) == RSHIFT_EXPR)
11727 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11728 && TREE_CODE (arg1) == INTEGER_CST
11729 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11730 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11731 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11732 < TYPE_PRECISION (TREE_TYPE (arg0))))
11734 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11735 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11736 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11737 tree shift_type = TREE_TYPE (arg0);
11739 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11740 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11741 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11742 && TYPE_PRECISION (TREE_TYPE (arg0))
11743 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11745 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11746 tree arg00 = TREE_OPERAND (arg0, 0);
11747 /* See if more bits can be proven as zero because of
11748 zero extension. */
11749 if (TREE_CODE (arg00) == NOP_EXPR
11750 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11752 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11753 if (TYPE_PRECISION (inner_type)
11754 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11755 && TYPE_PRECISION (inner_type) < prec)
11757 prec = TYPE_PRECISION (inner_type);
11758 /* See if we can shorten the right shift. */
11759 if (shiftc < prec)
11760 shift_type = inner_type;
11761 /* Otherwise X >> C1 is all zeros, so we'll optimize
11762 it into (X, 0) later on by making sure zerobits
11763 is all ones. */
11766 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11767 if (shiftc < prec)
11769 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11770 zerobits <<= prec - shiftc;
11772 /* For arithmetic shift if sign bit could be set, zerobits
11773 can contain actually sign bits, so no transformation is
11774 possible, unless MASK masks them all away. In that
11775 case the shift needs to be converted into logical shift. */
11776 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11777 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11779 if ((mask & zerobits) == 0)
11780 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11781 else
11782 zerobits = 0;
11786 /* ((X << 16) & 0xff00) is (X, 0). */
11787 if ((mask & zerobits) == mask)
11788 return omit_one_operand_loc (loc, type,
11789 build_int_cst (type, 0), arg0);
11791 newmask = mask | zerobits;
11792 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11794 /* Only do the transformation if NEWMASK is some integer
11795 mode's mask. */
11796 for (prec = BITS_PER_UNIT;
11797 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11798 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11799 break;
11800 if (prec < HOST_BITS_PER_WIDE_INT
11801 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11803 tree newmaskt;
11805 if (shift_type != TREE_TYPE (arg0))
11807 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11808 fold_convert_loc (loc, shift_type,
11809 TREE_OPERAND (arg0, 0)),
11810 TREE_OPERAND (arg0, 1));
11811 tem = fold_convert_loc (loc, type, tem);
11813 else
11814 tem = op0;
11815 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11816 if (!tree_int_cst_equal (newmaskt, arg1))
11817 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11822 goto associate;
11824 case RDIV_EXPR:
11825 /* Don't touch a floating-point divide by zero unless the mode
11826 of the constant can represent infinity. */
11827 if (TREE_CODE (arg1) == REAL_CST
11828 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11829 && real_zerop (arg1))
11830 return NULL_TREE;
11832 /* Optimize A / A to 1.0 if we don't care about
11833 NaNs or Infinities. Skip the transformation
11834 for non-real operands. */
11835 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11836 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11837 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11838 && operand_equal_p (arg0, arg1, 0))
11840 tree r = build_real (TREE_TYPE (arg0), dconst1);
11842 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11845 /* The complex version of the above A / A optimization. */
11846 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11847 && operand_equal_p (arg0, arg1, 0))
11849 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11850 if (! HONOR_NANS (TYPE_MODE (elem_type))
11851 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11853 tree r = build_real (elem_type, dconst1);
11854 /* omit_two_operands will call fold_convert for us. */
11855 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11859 /* (-A) / (-B) -> A / B */
11860 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11861 return fold_build2_loc (loc, RDIV_EXPR, type,
11862 TREE_OPERAND (arg0, 0),
11863 negate_expr (arg1));
11864 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11865 return fold_build2_loc (loc, RDIV_EXPR, type,
11866 negate_expr (arg0),
11867 TREE_OPERAND (arg1, 0));
11869 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11870 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11871 && real_onep (arg1))
11872 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11874 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11875 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11876 && real_minus_onep (arg1))
11877 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11878 negate_expr (arg0)));
11880 /* If ARG1 is a constant, we can convert this to a multiply by the
11881 reciprocal. This does not have the same rounding properties,
11882 so only do this if -freciprocal-math. We can actually
11883 always safely do it if ARG1 is a power of two, but it's hard to
11884 tell if it is or not in a portable manner. */
11885 if (optimize
11886 && (TREE_CODE (arg1) == REAL_CST
11887 || (TREE_CODE (arg1) == COMPLEX_CST
11888 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11889 || (TREE_CODE (arg1) == VECTOR_CST
11890 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11892 if (flag_reciprocal_math
11893 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11894 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11895 /* Find the reciprocal if optimizing and the result is exact.
11896 TODO: Complex reciprocal not implemented. */
11897 if (TREE_CODE (arg1) != COMPLEX_CST)
11899 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11901 if (inverse)
11902 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11905 /* Convert A/B/C to A/(B*C). */
11906 if (flag_reciprocal_math
11907 && TREE_CODE (arg0) == RDIV_EXPR)
11908 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11909 fold_build2_loc (loc, MULT_EXPR, type,
11910 TREE_OPERAND (arg0, 1), arg1));
11912 /* Convert A/(B/C) to (A/B)*C. */
11913 if (flag_reciprocal_math
11914 && TREE_CODE (arg1) == RDIV_EXPR)
11915 return fold_build2_loc (loc, MULT_EXPR, type,
11916 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11917 TREE_OPERAND (arg1, 0)),
11918 TREE_OPERAND (arg1, 1));
11920 /* Convert C1/(X*C2) into (C1/C2)/X. */
11921 if (flag_reciprocal_math
11922 && TREE_CODE (arg1) == MULT_EXPR
11923 && TREE_CODE (arg0) == REAL_CST
11924 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11926 tree tem = const_binop (RDIV_EXPR, arg0,
11927 TREE_OPERAND (arg1, 1));
11928 if (tem)
11929 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11930 TREE_OPERAND (arg1, 0));
11933 if (flag_unsafe_math_optimizations)
11935 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11936 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11938 /* Optimize sin(x)/cos(x) as tan(x). */
11939 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11940 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11941 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11942 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11943 CALL_EXPR_ARG (arg1, 0), 0))
11945 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11947 if (tanfn != NULL_TREE)
11948 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11951 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11952 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11953 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11954 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11955 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11956 CALL_EXPR_ARG (arg1, 0), 0))
11958 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11960 if (tanfn != NULL_TREE)
11962 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11963 CALL_EXPR_ARG (arg0, 0));
11964 return fold_build2_loc (loc, RDIV_EXPR, type,
11965 build_real (type, dconst1), tmp);
11969 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11970 NaNs or Infinities. */
11971 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11972 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11973 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11975 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11976 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11978 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11979 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11980 && operand_equal_p (arg00, arg01, 0))
11982 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11984 if (cosfn != NULL_TREE)
11985 return build_call_expr_loc (loc, cosfn, 1, arg00);
11989 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11990 NaNs or Infinities. */
11991 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11992 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11993 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11995 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11996 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11998 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11999 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12000 && operand_equal_p (arg00, arg01, 0))
12002 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12004 if (cosfn != NULL_TREE)
12006 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12007 return fold_build2_loc (loc, RDIV_EXPR, type,
12008 build_real (type, dconst1),
12009 tmp);
12014 /* Optimize pow(x,c)/x as pow(x,c-1). */
12015 if (fcode0 == BUILT_IN_POW
12016 || fcode0 == BUILT_IN_POWF
12017 || fcode0 == BUILT_IN_POWL)
12019 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12020 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12021 if (TREE_CODE (arg01) == REAL_CST
12022 && !TREE_OVERFLOW (arg01)
12023 && operand_equal_p (arg1, arg00, 0))
12025 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12026 REAL_VALUE_TYPE c;
12027 tree arg;
12029 c = TREE_REAL_CST (arg01);
12030 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12031 arg = build_real (type, c);
12032 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12036 /* Optimize a/root(b/c) into a*root(c/b). */
12037 if (BUILTIN_ROOT_P (fcode1))
12039 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12041 if (TREE_CODE (rootarg) == RDIV_EXPR)
12043 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12044 tree b = TREE_OPERAND (rootarg, 0);
12045 tree c = TREE_OPERAND (rootarg, 1);
12047 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12049 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12050 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12054 /* Optimize x/expN(y) into x*expN(-y). */
12055 if (BUILTIN_EXPONENT_P (fcode1))
12057 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12058 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12059 arg1 = build_call_expr_loc (loc,
12060 expfn, 1,
12061 fold_convert_loc (loc, type, arg));
12062 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12065 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12066 if (fcode1 == BUILT_IN_POW
12067 || fcode1 == BUILT_IN_POWF
12068 || fcode1 == BUILT_IN_POWL)
12070 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12071 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12072 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12073 tree neg11 = fold_convert_loc (loc, type,
12074 negate_expr (arg11));
12075 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12076 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12079 return NULL_TREE;
12081 case TRUNC_DIV_EXPR:
12082 /* Optimize (X & (-A)) / A where A is a power of 2,
12083 to X >> log2(A) */
12084 if (TREE_CODE (arg0) == BIT_AND_EXPR
12085 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12086 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12088 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12089 arg1, TREE_OPERAND (arg0, 1));
12090 if (sum && integer_zerop (sum)) {
12091 tree pow2 = build_int_cst (integer_type_node,
12092 wi::exact_log2 (arg1));
12093 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12094 TREE_OPERAND (arg0, 0), pow2);
12098 /* Fall through */
12100 case FLOOR_DIV_EXPR:
12101 /* Simplify A / (B << N) where A and B are positive and B is
12102 a power of 2, to A >> (N + log2(B)). */
12103 strict_overflow_p = false;
12104 if (TREE_CODE (arg1) == LSHIFT_EXPR
12105 && (TYPE_UNSIGNED (type)
12106 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12108 tree sval = TREE_OPERAND (arg1, 0);
12109 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12111 tree sh_cnt = TREE_OPERAND (arg1, 1);
12112 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12113 wi::exact_log2 (sval));
12115 if (strict_overflow_p)
12116 fold_overflow_warning (("assuming signed overflow does not "
12117 "occur when simplifying A / (B << N)"),
12118 WARN_STRICT_OVERFLOW_MISC);
12120 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12121 sh_cnt, pow2);
12122 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12123 fold_convert_loc (loc, type, arg0), sh_cnt);
12127 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12128 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12129 if (INTEGRAL_TYPE_P (type)
12130 && TYPE_UNSIGNED (type)
12131 && code == FLOOR_DIV_EXPR)
12132 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12134 /* Fall through */
12136 case ROUND_DIV_EXPR:
12137 case CEIL_DIV_EXPR:
12138 case EXACT_DIV_EXPR:
12139 if (integer_zerop (arg1))
12140 return NULL_TREE;
12141 /* X / -1 is -X. */
12142 if (!TYPE_UNSIGNED (type)
12143 && TREE_CODE (arg1) == INTEGER_CST
12144 && wi::eq_p (arg1, -1))
12145 return fold_convert_loc (loc, type, negate_expr (arg0));
12147 /* Convert -A / -B to A / B when the type is signed and overflow is
12148 undefined. */
12149 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12150 && TREE_CODE (arg0) == NEGATE_EXPR
12151 && negate_expr_p (arg1))
12153 if (INTEGRAL_TYPE_P (type))
12154 fold_overflow_warning (("assuming signed overflow does not occur "
12155 "when distributing negation across "
12156 "division"),
12157 WARN_STRICT_OVERFLOW_MISC);
12158 return fold_build2_loc (loc, code, type,
12159 fold_convert_loc (loc, type,
12160 TREE_OPERAND (arg0, 0)),
12161 fold_convert_loc (loc, type,
12162 negate_expr (arg1)));
12164 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12165 && TREE_CODE (arg1) == NEGATE_EXPR
12166 && negate_expr_p (arg0))
12168 if (INTEGRAL_TYPE_P (type))
12169 fold_overflow_warning (("assuming signed overflow does not occur "
12170 "when distributing negation across "
12171 "division"),
12172 WARN_STRICT_OVERFLOW_MISC);
12173 return fold_build2_loc (loc, code, type,
12174 fold_convert_loc (loc, type,
12175 negate_expr (arg0)),
12176 fold_convert_loc (loc, type,
12177 TREE_OPERAND (arg1, 0)));
12180 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12181 operation, EXACT_DIV_EXPR.
12183 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12184 At one time others generated faster code, it's not clear if they do
12185 after the last round to changes to the DIV code in expmed.c. */
12186 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12187 && multiple_of_p (type, arg0, arg1))
12188 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12190 strict_overflow_p = false;
12191 if (TREE_CODE (arg1) == INTEGER_CST
12192 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12193 &strict_overflow_p)))
12195 if (strict_overflow_p)
12196 fold_overflow_warning (("assuming signed overflow does not occur "
12197 "when simplifying division"),
12198 WARN_STRICT_OVERFLOW_MISC);
12199 return fold_convert_loc (loc, type, tem);
12202 return NULL_TREE;
12204 case CEIL_MOD_EXPR:
12205 case FLOOR_MOD_EXPR:
12206 case ROUND_MOD_EXPR:
12207 case TRUNC_MOD_EXPR:
12208 /* X % -1 is zero. */
12209 if (!TYPE_UNSIGNED (type)
12210 && TREE_CODE (arg1) == INTEGER_CST
12211 && wi::eq_p (arg1, -1))
12212 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12214 /* X % -C is the same as X % C. */
12215 if (code == TRUNC_MOD_EXPR
12216 && TYPE_SIGN (type) == SIGNED
12217 && TREE_CODE (arg1) == INTEGER_CST
12218 && !TREE_OVERFLOW (arg1)
12219 && wi::neg_p (arg1)
12220 && !TYPE_OVERFLOW_TRAPS (type)
12221 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12222 && !sign_bit_p (arg1, arg1))
12223 return fold_build2_loc (loc, code, type,
12224 fold_convert_loc (loc, type, arg0),
12225 fold_convert_loc (loc, type,
12226 negate_expr (arg1)));
12228 /* X % -Y is the same as X % Y. */
12229 if (code == TRUNC_MOD_EXPR
12230 && !TYPE_UNSIGNED (type)
12231 && TREE_CODE (arg1) == NEGATE_EXPR
12232 && !TYPE_OVERFLOW_TRAPS (type))
12233 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12234 fold_convert_loc (loc, type,
12235 TREE_OPERAND (arg1, 0)));
12237 strict_overflow_p = false;
12238 if (TREE_CODE (arg1) == INTEGER_CST
12239 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12240 &strict_overflow_p)))
12242 if (strict_overflow_p)
12243 fold_overflow_warning (("assuming signed overflow does not occur "
12244 "when simplifying modulus"),
12245 WARN_STRICT_OVERFLOW_MISC);
12246 return fold_convert_loc (loc, type, tem);
12249 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12250 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12251 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12252 && (TYPE_UNSIGNED (type)
12253 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12255 tree c = arg1;
12256 /* Also optimize A % (C << N) where C is a power of 2,
12257 to A & ((C << N) - 1). */
12258 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12259 c = TREE_OPERAND (arg1, 0);
12261 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12263 tree mask
12264 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12265 build_int_cst (TREE_TYPE (arg1), 1));
12266 if (strict_overflow_p)
12267 fold_overflow_warning (("assuming signed overflow does not "
12268 "occur when simplifying "
12269 "X % (power of two)"),
12270 WARN_STRICT_OVERFLOW_MISC);
12271 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12272 fold_convert_loc (loc, type, arg0),
12273 fold_convert_loc (loc, type, mask));
12277 return NULL_TREE;
12279 case LROTATE_EXPR:
12280 case RROTATE_EXPR:
12281 if (integer_all_onesp (arg0))
12282 return omit_one_operand_loc (loc, type, arg0, arg1);
12283 goto shift;
12285 case RSHIFT_EXPR:
12286 /* Optimize -1 >> x for arithmetic right shifts. */
12287 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12288 && tree_expr_nonnegative_p (arg1))
12289 return omit_one_operand_loc (loc, type, arg0, arg1);
12290 /* ... fall through ... */
12292 case LSHIFT_EXPR:
12293 shift:
12294 if (integer_zerop (arg1))
12295 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12296 if (integer_zerop (arg0))
12297 return omit_one_operand_loc (loc, type, arg0, arg1);
12299 /* Prefer vector1 << scalar to vector1 << vector2
12300 if vector2 is uniform. */
12301 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12302 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12303 return fold_build2_loc (loc, code, type, op0, tem);
12305 /* Since negative shift count is not well-defined,
12306 don't try to compute it in the compiler. */
12307 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12308 return NULL_TREE;
12310 prec = element_precision (type);
12312 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12313 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12314 && tree_to_uhwi (arg1) < prec
12315 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12316 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12318 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12319 + tree_to_uhwi (arg1));
12321 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12322 being well defined. */
12323 if (low >= prec)
12325 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12326 low = low % prec;
12327 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12328 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12329 TREE_OPERAND (arg0, 0));
12330 else
12331 low = prec - 1;
12334 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12335 build_int_cst (TREE_TYPE (arg1), low));
12338 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12339 into x & ((unsigned)-1 >> c) for unsigned types. */
12340 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12341 || (TYPE_UNSIGNED (type)
12342 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12343 && tree_fits_uhwi_p (arg1)
12344 && tree_to_uhwi (arg1) < prec
12345 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12346 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12348 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12349 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12350 tree lshift;
12351 tree arg00;
12353 if (low0 == low1)
12355 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12357 lshift = build_minus_one_cst (type);
12358 lshift = const_binop (code, lshift, arg1);
12360 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12364 /* Rewrite an LROTATE_EXPR by a constant into an
12365 RROTATE_EXPR by a new constant. */
12366 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12368 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12369 tem = const_binop (MINUS_EXPR, tem, arg1);
12370 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12373 /* If we have a rotate of a bit operation with the rotate count and
12374 the second operand of the bit operation both constant,
12375 permute the two operations. */
12376 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12377 && (TREE_CODE (arg0) == BIT_AND_EXPR
12378 || TREE_CODE (arg0) == BIT_IOR_EXPR
12379 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12380 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12381 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12382 fold_build2_loc (loc, code, type,
12383 TREE_OPERAND (arg0, 0), arg1),
12384 fold_build2_loc (loc, code, type,
12385 TREE_OPERAND (arg0, 1), arg1));
12387 /* Two consecutive rotates adding up to the some integer
12388 multiple of the precision of the type can be ignored. */
12389 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12390 && TREE_CODE (arg0) == RROTATE_EXPR
12391 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12392 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12393 prec) == 0)
12394 return TREE_OPERAND (arg0, 0);
12396 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12397 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12398 if the latter can be further optimized. */
12399 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12400 && TREE_CODE (arg0) == BIT_AND_EXPR
12401 && TREE_CODE (arg1) == INTEGER_CST
12402 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12404 tree mask = fold_build2_loc (loc, code, type,
12405 fold_convert_loc (loc, type,
12406 TREE_OPERAND (arg0, 1)),
12407 arg1);
12408 tree shift = fold_build2_loc (loc, code, type,
12409 fold_convert_loc (loc, type,
12410 TREE_OPERAND (arg0, 0)),
12411 arg1);
12412 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12413 if (tem)
12414 return tem;
12417 return NULL_TREE;
12419 case MIN_EXPR:
12420 if (operand_equal_p (arg0, arg1, 0))
12421 return omit_one_operand_loc (loc, type, arg0, arg1);
12422 if (INTEGRAL_TYPE_P (type)
12423 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12424 return omit_one_operand_loc (loc, type, arg1, arg0);
12425 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12426 if (tem)
12427 return tem;
12428 goto associate;
12430 case MAX_EXPR:
12431 if (operand_equal_p (arg0, arg1, 0))
12432 return omit_one_operand_loc (loc, type, arg0, arg1);
12433 if (INTEGRAL_TYPE_P (type)
12434 && TYPE_MAX_VALUE (type)
12435 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12436 return omit_one_operand_loc (loc, type, arg1, arg0);
12437 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12438 if (tem)
12439 return tem;
12440 goto associate;
12442 case TRUTH_ANDIF_EXPR:
12443 /* Note that the operands of this must be ints
12444 and their values must be 0 or 1.
12445 ("true" is a fixed value perhaps depending on the language.) */
12446 /* If first arg is constant zero, return it. */
12447 if (integer_zerop (arg0))
12448 return fold_convert_loc (loc, type, arg0);
12449 case TRUTH_AND_EXPR:
12450 /* If either arg is constant true, drop it. */
12451 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12452 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12453 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12454 /* Preserve sequence points. */
12455 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12456 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12457 /* If second arg is constant zero, result is zero, but first arg
12458 must be evaluated. */
12459 if (integer_zerop (arg1))
12460 return omit_one_operand_loc (loc, type, arg1, arg0);
12461 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12462 case will be handled here. */
12463 if (integer_zerop (arg0))
12464 return omit_one_operand_loc (loc, type, arg0, arg1);
12466 /* !X && X is always false. */
12467 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12468 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12469 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12470 /* X && !X is always false. */
12471 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12472 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12473 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12475 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12476 means A >= Y && A != MAX, but in this case we know that
12477 A < X <= MAX. */
12479 if (!TREE_SIDE_EFFECTS (arg0)
12480 && !TREE_SIDE_EFFECTS (arg1))
12482 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12483 if (tem && !operand_equal_p (tem, arg0, 0))
12484 return fold_build2_loc (loc, code, type, tem, arg1);
12486 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12487 if (tem && !operand_equal_p (tem, arg1, 0))
12488 return fold_build2_loc (loc, code, type, arg0, tem);
12491 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12492 != NULL_TREE)
12493 return tem;
12495 return NULL_TREE;
12497 case TRUTH_ORIF_EXPR:
12498 /* Note that the operands of this must be ints
12499 and their values must be 0 or true.
12500 ("true" is a fixed value perhaps depending on the language.) */
12501 /* If first arg is constant true, return it. */
12502 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12503 return fold_convert_loc (loc, type, arg0);
12504 case TRUTH_OR_EXPR:
12505 /* If either arg is constant zero, drop it. */
12506 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12507 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12508 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12509 /* Preserve sequence points. */
12510 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12511 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12512 /* If second arg is constant true, result is true, but we must
12513 evaluate first arg. */
12514 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12515 return omit_one_operand_loc (loc, type, arg1, arg0);
12516 /* Likewise for first arg, but note this only occurs here for
12517 TRUTH_OR_EXPR. */
12518 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12519 return omit_one_operand_loc (loc, type, arg0, arg1);
12521 /* !X || X is always true. */
12522 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12523 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12524 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12525 /* X || !X is always true. */
12526 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12527 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12528 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12530 /* (X && !Y) || (!X && Y) is X ^ Y */
12531 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12532 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12534 tree a0, a1, l0, l1, n0, n1;
12536 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12537 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12539 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12540 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12542 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12543 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12545 if ((operand_equal_p (n0, a0, 0)
12546 && operand_equal_p (n1, a1, 0))
12547 || (operand_equal_p (n0, a1, 0)
12548 && operand_equal_p (n1, a0, 0)))
12549 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12552 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12553 != NULL_TREE)
12554 return tem;
12556 return NULL_TREE;
12558 case TRUTH_XOR_EXPR:
12559 /* If the second arg is constant zero, drop it. */
12560 if (integer_zerop (arg1))
12561 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12562 /* If the second arg is constant true, this is a logical inversion. */
12563 if (integer_onep (arg1))
12565 tem = invert_truthvalue_loc (loc, arg0);
12566 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12568 /* Identical arguments cancel to zero. */
12569 if (operand_equal_p (arg0, arg1, 0))
12570 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12572 /* !X ^ X is always true. */
12573 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12574 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12575 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12577 /* X ^ !X is always true. */
12578 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12579 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12580 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12582 return NULL_TREE;
12584 case EQ_EXPR:
12585 case NE_EXPR:
12586 STRIP_NOPS (arg0);
12587 STRIP_NOPS (arg1);
12589 tem = fold_comparison (loc, code, type, op0, op1);
12590 if (tem != NULL_TREE)
12591 return tem;
12593 /* bool_var != 0 becomes bool_var. */
12594 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12595 && code == NE_EXPR)
12596 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12598 /* bool_var == 1 becomes bool_var. */
12599 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12600 && code == EQ_EXPR)
12601 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12603 /* bool_var != 1 becomes !bool_var. */
12604 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12605 && code == NE_EXPR)
12606 return fold_convert_loc (loc, type,
12607 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12608 TREE_TYPE (arg0), arg0));
12610 /* bool_var == 0 becomes !bool_var. */
12611 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12612 && code == EQ_EXPR)
12613 return fold_convert_loc (loc, type,
12614 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12615 TREE_TYPE (arg0), arg0));
12617 /* !exp != 0 becomes !exp */
12618 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12619 && code == NE_EXPR)
12620 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12622 /* If this is an equality comparison of the address of two non-weak,
12623 unaliased symbols neither of which are extern (since we do not
12624 have access to attributes for externs), then we know the result. */
12625 if (TREE_CODE (arg0) == ADDR_EXPR
12626 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12627 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12628 && ! lookup_attribute ("alias",
12629 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12630 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12631 && TREE_CODE (arg1) == ADDR_EXPR
12632 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12633 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12634 && ! lookup_attribute ("alias",
12635 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12636 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12638 /* We know that we're looking at the address of two
12639 non-weak, unaliased, static _DECL nodes.
12641 It is both wasteful and incorrect to call operand_equal_p
12642 to compare the two ADDR_EXPR nodes. It is wasteful in that
12643 all we need to do is test pointer equality for the arguments
12644 to the two ADDR_EXPR nodes. It is incorrect to use
12645 operand_equal_p as that function is NOT equivalent to a
12646 C equality test. It can in fact return false for two
12647 objects which would test as equal using the C equality
12648 operator. */
12649 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12650 return constant_boolean_node (equal
12651 ? code == EQ_EXPR : code != EQ_EXPR,
12652 type);
12655 /* Similarly for a NEGATE_EXPR. */
12656 if (TREE_CODE (arg0) == NEGATE_EXPR
12657 && TREE_CODE (arg1) == INTEGER_CST
12658 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12659 arg1)))
12660 && TREE_CODE (tem) == INTEGER_CST
12661 && !TREE_OVERFLOW (tem))
12662 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12664 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12665 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12666 && TREE_CODE (arg1) == INTEGER_CST
12667 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12668 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12669 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12670 fold_convert_loc (loc,
12671 TREE_TYPE (arg0),
12672 arg1),
12673 TREE_OPERAND (arg0, 1)));
12675 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12676 if ((TREE_CODE (arg0) == PLUS_EXPR
12677 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12678 || TREE_CODE (arg0) == MINUS_EXPR)
12679 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12680 0)),
12681 arg1, 0)
12682 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12683 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12685 tree val = TREE_OPERAND (arg0, 1);
12686 return omit_two_operands_loc (loc, type,
12687 fold_build2_loc (loc, code, type,
12688 val,
12689 build_int_cst (TREE_TYPE (val),
12690 0)),
12691 TREE_OPERAND (arg0, 0), arg1);
12694 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12695 if (TREE_CODE (arg0) == MINUS_EXPR
12696 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12697 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12698 1)),
12699 arg1, 0)
12700 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12702 return omit_two_operands_loc (loc, type,
12703 code == NE_EXPR
12704 ? boolean_true_node : boolean_false_node,
12705 TREE_OPERAND (arg0, 1), arg1);
12708 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12709 if (TREE_CODE (arg0) == ABS_EXPR
12710 && (integer_zerop (arg1) || real_zerop (arg1)))
12711 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12713 /* If this is an EQ or NE comparison with zero and ARG0 is
12714 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12715 two operations, but the latter can be done in one less insn
12716 on machines that have only two-operand insns or on which a
12717 constant cannot be the first operand. */
12718 if (TREE_CODE (arg0) == BIT_AND_EXPR
12719 && integer_zerop (arg1))
12721 tree arg00 = TREE_OPERAND (arg0, 0);
12722 tree arg01 = TREE_OPERAND (arg0, 1);
12723 if (TREE_CODE (arg00) == LSHIFT_EXPR
12724 && integer_onep (TREE_OPERAND (arg00, 0)))
12726 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12727 arg01, TREE_OPERAND (arg00, 1));
12728 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12729 build_int_cst (TREE_TYPE (arg0), 1));
12730 return fold_build2_loc (loc, code, type,
12731 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12732 arg1);
12734 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12735 && integer_onep (TREE_OPERAND (arg01, 0)))
12737 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12738 arg00, TREE_OPERAND (arg01, 1));
12739 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12740 build_int_cst (TREE_TYPE (arg0), 1));
12741 return fold_build2_loc (loc, code, type,
12742 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12743 arg1);
12747 /* If this is an NE or EQ comparison of zero against the result of a
12748 signed MOD operation whose second operand is a power of 2, make
12749 the MOD operation unsigned since it is simpler and equivalent. */
12750 if (integer_zerop (arg1)
12751 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12752 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12753 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12754 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12755 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12756 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12758 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12759 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12760 fold_convert_loc (loc, newtype,
12761 TREE_OPERAND (arg0, 0)),
12762 fold_convert_loc (loc, newtype,
12763 TREE_OPERAND (arg0, 1)));
12765 return fold_build2_loc (loc, code, type, newmod,
12766 fold_convert_loc (loc, newtype, arg1));
12769 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12770 C1 is a valid shift constant, and C2 is a power of two, i.e.
12771 a single bit. */
12772 if (TREE_CODE (arg0) == BIT_AND_EXPR
12773 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12774 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12775 == INTEGER_CST
12776 && integer_pow2p (TREE_OPERAND (arg0, 1))
12777 && integer_zerop (arg1))
12779 tree itype = TREE_TYPE (arg0);
12780 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12781 prec = TYPE_PRECISION (itype);
12783 /* Check for a valid shift count. */
12784 if (wi::ltu_p (arg001, prec))
12786 tree arg01 = TREE_OPERAND (arg0, 1);
12787 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12788 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12789 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12790 can be rewritten as (X & (C2 << C1)) != 0. */
12791 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12793 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12794 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12795 return fold_build2_loc (loc, code, type, tem,
12796 fold_convert_loc (loc, itype, arg1));
12798 /* Otherwise, for signed (arithmetic) shifts,
12799 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12800 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12801 else if (!TYPE_UNSIGNED (itype))
12802 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12803 arg000, build_int_cst (itype, 0));
12804 /* Otherwise, of unsigned (logical) shifts,
12805 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12806 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12807 else
12808 return omit_one_operand_loc (loc, type,
12809 code == EQ_EXPR ? integer_one_node
12810 : integer_zero_node,
12811 arg000);
12815 /* If we have (A & C) == C where C is a power of 2, convert this into
12816 (A & C) != 0. Similarly for NE_EXPR. */
12817 if (TREE_CODE (arg0) == BIT_AND_EXPR
12818 && integer_pow2p (TREE_OPERAND (arg0, 1))
12819 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12820 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12821 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12822 integer_zero_node));
12824 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12825 bit, then fold the expression into A < 0 or A >= 0. */
12826 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12827 if (tem)
12828 return tem;
12830 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12831 Similarly for NE_EXPR. */
12832 if (TREE_CODE (arg0) == BIT_AND_EXPR
12833 && TREE_CODE (arg1) == INTEGER_CST
12834 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12836 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12837 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12838 TREE_OPERAND (arg0, 1));
12839 tree dandnotc
12840 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12841 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12842 notc);
12843 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12844 if (integer_nonzerop (dandnotc))
12845 return omit_one_operand_loc (loc, type, rslt, arg0);
12848 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12849 Similarly for NE_EXPR. */
12850 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12851 && TREE_CODE (arg1) == INTEGER_CST
12852 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12854 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12855 tree candnotd
12856 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12857 TREE_OPERAND (arg0, 1),
12858 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12859 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12860 if (integer_nonzerop (candnotd))
12861 return omit_one_operand_loc (loc, type, rslt, arg0);
12864 /* If this is a comparison of a field, we may be able to simplify it. */
12865 if ((TREE_CODE (arg0) == COMPONENT_REF
12866 || TREE_CODE (arg0) == BIT_FIELD_REF)
12867 /* Handle the constant case even without -O
12868 to make sure the warnings are given. */
12869 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12871 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12872 if (t1)
12873 return t1;
12876 /* Optimize comparisons of strlen vs zero to a compare of the
12877 first character of the string vs zero. To wit,
12878 strlen(ptr) == 0 => *ptr == 0
12879 strlen(ptr) != 0 => *ptr != 0
12880 Other cases should reduce to one of these two (or a constant)
12881 due to the return value of strlen being unsigned. */
12882 if (TREE_CODE (arg0) == CALL_EXPR
12883 && integer_zerop (arg1))
12885 tree fndecl = get_callee_fndecl (arg0);
12887 if (fndecl
12888 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12889 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12890 && call_expr_nargs (arg0) == 1
12891 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12893 tree iref = build_fold_indirect_ref_loc (loc,
12894 CALL_EXPR_ARG (arg0, 0));
12895 return fold_build2_loc (loc, code, type, iref,
12896 build_int_cst (TREE_TYPE (iref), 0));
12900 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12901 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12902 if (TREE_CODE (arg0) == RSHIFT_EXPR
12903 && integer_zerop (arg1)
12904 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12906 tree arg00 = TREE_OPERAND (arg0, 0);
12907 tree arg01 = TREE_OPERAND (arg0, 1);
12908 tree itype = TREE_TYPE (arg00);
12909 if (wi::eq_p (arg01, TYPE_PRECISION (itype) - 1))
12911 if (TYPE_UNSIGNED (itype))
12913 itype = signed_type_for (itype);
12914 arg00 = fold_convert_loc (loc, itype, arg00);
12916 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12917 type, arg00, build_zero_cst (itype));
12921 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12922 if (integer_zerop (arg1)
12923 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12924 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12925 TREE_OPERAND (arg0, 1));
12927 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12928 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12929 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12930 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12931 build_zero_cst (TREE_TYPE (arg0)));
12932 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12933 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12934 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12935 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12936 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12937 build_zero_cst (TREE_TYPE (arg0)));
12939 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12940 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12941 && TREE_CODE (arg1) == INTEGER_CST
12942 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12943 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12944 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12945 TREE_OPERAND (arg0, 1), arg1));
12947 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12948 (X & C) == 0 when C is a single bit. */
12949 if (TREE_CODE (arg0) == BIT_AND_EXPR
12950 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12951 && integer_zerop (arg1)
12952 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12954 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12955 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12956 TREE_OPERAND (arg0, 1));
12957 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12958 type, tem,
12959 fold_convert_loc (loc, TREE_TYPE (arg0),
12960 arg1));
12963 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12964 constant C is a power of two, i.e. a single bit. */
12965 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12966 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12967 && integer_zerop (arg1)
12968 && integer_pow2p (TREE_OPERAND (arg0, 1))
12969 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12970 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12972 tree arg00 = TREE_OPERAND (arg0, 0);
12973 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12974 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12977 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12978 when is C is a power of two, i.e. a single bit. */
12979 if (TREE_CODE (arg0) == BIT_AND_EXPR
12980 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12981 && integer_zerop (arg1)
12982 && integer_pow2p (TREE_OPERAND (arg0, 1))
12983 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12984 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12986 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12987 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12988 arg000, TREE_OPERAND (arg0, 1));
12989 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12990 tem, build_int_cst (TREE_TYPE (tem), 0));
12993 if (integer_zerop (arg1)
12994 && tree_expr_nonzero_p (arg0))
12996 tree res = constant_boolean_node (code==NE_EXPR, type);
12997 return omit_one_operand_loc (loc, type, res, arg0);
13000 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13001 if (TREE_CODE (arg0) == NEGATE_EXPR
13002 && TREE_CODE (arg1) == NEGATE_EXPR)
13003 return fold_build2_loc (loc, code, type,
13004 TREE_OPERAND (arg0, 0),
13005 fold_convert_loc (loc, TREE_TYPE (arg0),
13006 TREE_OPERAND (arg1, 0)));
13008 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13009 if (TREE_CODE (arg0) == BIT_AND_EXPR
13010 && TREE_CODE (arg1) == BIT_AND_EXPR)
13012 tree arg00 = TREE_OPERAND (arg0, 0);
13013 tree arg01 = TREE_OPERAND (arg0, 1);
13014 tree arg10 = TREE_OPERAND (arg1, 0);
13015 tree arg11 = TREE_OPERAND (arg1, 1);
13016 tree itype = TREE_TYPE (arg0);
13018 if (operand_equal_p (arg01, arg11, 0))
13019 return fold_build2_loc (loc, code, type,
13020 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13021 fold_build2_loc (loc,
13022 BIT_XOR_EXPR, itype,
13023 arg00, arg10),
13024 arg01),
13025 build_zero_cst (itype));
13027 if (operand_equal_p (arg01, arg10, 0))
13028 return fold_build2_loc (loc, code, type,
13029 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13030 fold_build2_loc (loc,
13031 BIT_XOR_EXPR, itype,
13032 arg00, arg11),
13033 arg01),
13034 build_zero_cst (itype));
13036 if (operand_equal_p (arg00, arg11, 0))
13037 return fold_build2_loc (loc, code, type,
13038 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13039 fold_build2_loc (loc,
13040 BIT_XOR_EXPR, itype,
13041 arg01, arg10),
13042 arg00),
13043 build_zero_cst (itype));
13045 if (operand_equal_p (arg00, arg10, 0))
13046 return fold_build2_loc (loc, code, type,
13047 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13048 fold_build2_loc (loc,
13049 BIT_XOR_EXPR, itype,
13050 arg01, arg11),
13051 arg00),
13052 build_zero_cst (itype));
13055 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13056 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13058 tree arg00 = TREE_OPERAND (arg0, 0);
13059 tree arg01 = TREE_OPERAND (arg0, 1);
13060 tree arg10 = TREE_OPERAND (arg1, 0);
13061 tree arg11 = TREE_OPERAND (arg1, 1);
13062 tree itype = TREE_TYPE (arg0);
13064 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13065 operand_equal_p guarantees no side-effects so we don't need
13066 to use omit_one_operand on Z. */
13067 if (operand_equal_p (arg01, arg11, 0))
13068 return fold_build2_loc (loc, code, type, arg00,
13069 fold_convert_loc (loc, TREE_TYPE (arg00),
13070 arg10));
13071 if (operand_equal_p (arg01, arg10, 0))
13072 return fold_build2_loc (loc, code, type, arg00,
13073 fold_convert_loc (loc, TREE_TYPE (arg00),
13074 arg11));
13075 if (operand_equal_p (arg00, arg11, 0))
13076 return fold_build2_loc (loc, code, type, arg01,
13077 fold_convert_loc (loc, TREE_TYPE (arg01),
13078 arg10));
13079 if (operand_equal_p (arg00, arg10, 0))
13080 return fold_build2_loc (loc, code, type, arg01,
13081 fold_convert_loc (loc, TREE_TYPE (arg01),
13082 arg11));
13084 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13085 if (TREE_CODE (arg01) == INTEGER_CST
13086 && TREE_CODE (arg11) == INTEGER_CST)
13088 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13089 fold_convert_loc (loc, itype, arg11));
13090 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13091 return fold_build2_loc (loc, code, type, tem,
13092 fold_convert_loc (loc, itype, arg10));
13096 /* Attempt to simplify equality/inequality comparisons of complex
13097 values. Only lower the comparison if the result is known or
13098 can be simplified to a single scalar comparison. */
13099 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13100 || TREE_CODE (arg0) == COMPLEX_CST)
13101 && (TREE_CODE (arg1) == COMPLEX_EXPR
13102 || TREE_CODE (arg1) == COMPLEX_CST))
13104 tree real0, imag0, real1, imag1;
13105 tree rcond, icond;
13107 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13109 real0 = TREE_OPERAND (arg0, 0);
13110 imag0 = TREE_OPERAND (arg0, 1);
13112 else
13114 real0 = TREE_REALPART (arg0);
13115 imag0 = TREE_IMAGPART (arg0);
13118 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13120 real1 = TREE_OPERAND (arg1, 0);
13121 imag1 = TREE_OPERAND (arg1, 1);
13123 else
13125 real1 = TREE_REALPART (arg1);
13126 imag1 = TREE_IMAGPART (arg1);
13129 rcond = fold_binary_loc (loc, code, type, real0, real1);
13130 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13132 if (integer_zerop (rcond))
13134 if (code == EQ_EXPR)
13135 return omit_two_operands_loc (loc, type, boolean_false_node,
13136 imag0, imag1);
13137 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13139 else
13141 if (code == NE_EXPR)
13142 return omit_two_operands_loc (loc, type, boolean_true_node,
13143 imag0, imag1);
13144 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13148 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13149 if (icond && TREE_CODE (icond) == INTEGER_CST)
13151 if (integer_zerop (icond))
13153 if (code == EQ_EXPR)
13154 return omit_two_operands_loc (loc, type, boolean_false_node,
13155 real0, real1);
13156 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13158 else
13160 if (code == NE_EXPR)
13161 return omit_two_operands_loc (loc, type, boolean_true_node,
13162 real0, real1);
13163 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13168 return NULL_TREE;
13170 case LT_EXPR:
13171 case GT_EXPR:
13172 case LE_EXPR:
13173 case GE_EXPR:
13174 tem = fold_comparison (loc, code, type, op0, op1);
13175 if (tem != NULL_TREE)
13176 return tem;
13178 /* Transform comparisons of the form X +- C CMP X. */
13179 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13180 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13181 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13182 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13183 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13184 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13186 tree arg01 = TREE_OPERAND (arg0, 1);
13187 enum tree_code code0 = TREE_CODE (arg0);
13188 int is_positive;
13190 if (TREE_CODE (arg01) == REAL_CST)
13191 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13192 else
13193 is_positive = tree_int_cst_sgn (arg01);
13195 /* (X - c) > X becomes false. */
13196 if (code == GT_EXPR
13197 && ((code0 == MINUS_EXPR && is_positive >= 0)
13198 || (code0 == PLUS_EXPR && is_positive <= 0)))
13200 if (TREE_CODE (arg01) == INTEGER_CST
13201 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13202 fold_overflow_warning (("assuming signed overflow does not "
13203 "occur when assuming that (X - c) > X "
13204 "is always false"),
13205 WARN_STRICT_OVERFLOW_ALL);
13206 return constant_boolean_node (0, type);
13209 /* Likewise (X + c) < X becomes false. */
13210 if (code == LT_EXPR
13211 && ((code0 == PLUS_EXPR && is_positive >= 0)
13212 || (code0 == MINUS_EXPR && is_positive <= 0)))
13214 if (TREE_CODE (arg01) == INTEGER_CST
13215 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13216 fold_overflow_warning (("assuming signed overflow does not "
13217 "occur when assuming that "
13218 "(X + c) < X is always false"),
13219 WARN_STRICT_OVERFLOW_ALL);
13220 return constant_boolean_node (0, type);
13223 /* Convert (X - c) <= X to true. */
13224 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13225 && code == LE_EXPR
13226 && ((code0 == MINUS_EXPR && is_positive >= 0)
13227 || (code0 == PLUS_EXPR && is_positive <= 0)))
13229 if (TREE_CODE (arg01) == INTEGER_CST
13230 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13231 fold_overflow_warning (("assuming signed overflow does not "
13232 "occur when assuming that "
13233 "(X - c) <= X is always true"),
13234 WARN_STRICT_OVERFLOW_ALL);
13235 return constant_boolean_node (1, type);
13238 /* Convert (X + c) >= X to true. */
13239 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13240 && code == GE_EXPR
13241 && ((code0 == PLUS_EXPR && is_positive >= 0)
13242 || (code0 == MINUS_EXPR && is_positive <= 0)))
13244 if (TREE_CODE (arg01) == INTEGER_CST
13245 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13246 fold_overflow_warning (("assuming signed overflow does not "
13247 "occur when assuming that "
13248 "(X + c) >= X is always true"),
13249 WARN_STRICT_OVERFLOW_ALL);
13250 return constant_boolean_node (1, type);
13253 if (TREE_CODE (arg01) == INTEGER_CST)
13255 /* Convert X + c > X and X - c < X to true for integers. */
13256 if (code == GT_EXPR
13257 && ((code0 == PLUS_EXPR && is_positive > 0)
13258 || (code0 == MINUS_EXPR && is_positive < 0)))
13260 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13261 fold_overflow_warning (("assuming signed overflow does "
13262 "not occur when assuming that "
13263 "(X + c) > X is always true"),
13264 WARN_STRICT_OVERFLOW_ALL);
13265 return constant_boolean_node (1, type);
13268 if (code == LT_EXPR
13269 && ((code0 == MINUS_EXPR && is_positive > 0)
13270 || (code0 == PLUS_EXPR && is_positive < 0)))
13272 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13273 fold_overflow_warning (("assuming signed overflow does "
13274 "not occur when assuming that "
13275 "(X - c) < X is always true"),
13276 WARN_STRICT_OVERFLOW_ALL);
13277 return constant_boolean_node (1, type);
13280 /* Convert X + c <= X and X - c >= X to false for integers. */
13281 if (code == LE_EXPR
13282 && ((code0 == PLUS_EXPR && is_positive > 0)
13283 || (code0 == MINUS_EXPR && is_positive < 0)))
13285 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13286 fold_overflow_warning (("assuming signed overflow does "
13287 "not occur when assuming that "
13288 "(X + c) <= X is always false"),
13289 WARN_STRICT_OVERFLOW_ALL);
13290 return constant_boolean_node (0, type);
13293 if (code == GE_EXPR
13294 && ((code0 == MINUS_EXPR && is_positive > 0)
13295 || (code0 == PLUS_EXPR && is_positive < 0)))
13297 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13298 fold_overflow_warning (("assuming signed overflow does "
13299 "not occur when assuming that "
13300 "(X - c) >= X is always false"),
13301 WARN_STRICT_OVERFLOW_ALL);
13302 return constant_boolean_node (0, type);
13307 /* Comparisons with the highest or lowest possible integer of
13308 the specified precision will have known values. */
13310 tree arg1_type = TREE_TYPE (arg1);
13311 unsigned int prec = TYPE_PRECISION (arg1_type);
13313 if (TREE_CODE (arg1) == INTEGER_CST
13314 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13316 wide_int max = wi::max_value (arg1_type);
13317 wide_int signed_max = wi::max_value (prec, SIGNED);
13318 wide_int min = wi::min_value (arg1_type);
13320 if (wi::eq_p (arg1, max))
13321 switch (code)
13323 case GT_EXPR:
13324 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13326 case GE_EXPR:
13327 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13329 case LE_EXPR:
13330 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13332 case LT_EXPR:
13333 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13335 /* The GE_EXPR and LT_EXPR cases above are not normally
13336 reached because of previous transformations. */
13338 default:
13339 break;
13341 else if (wi::eq_p (arg1, max - 1))
13342 switch (code)
13344 case GT_EXPR:
13345 arg1 = const_binop (PLUS_EXPR, arg1,
13346 build_int_cst (TREE_TYPE (arg1), 1));
13347 return fold_build2_loc (loc, EQ_EXPR, type,
13348 fold_convert_loc (loc,
13349 TREE_TYPE (arg1), arg0),
13350 arg1);
13351 case LE_EXPR:
13352 arg1 = const_binop (PLUS_EXPR, arg1,
13353 build_int_cst (TREE_TYPE (arg1), 1));
13354 return fold_build2_loc (loc, NE_EXPR, type,
13355 fold_convert_loc (loc, TREE_TYPE (arg1),
13356 arg0),
13357 arg1);
13358 default:
13359 break;
13361 else if (wi::eq_p (arg1, min))
13362 switch (code)
13364 case LT_EXPR:
13365 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13367 case LE_EXPR:
13368 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13370 case GE_EXPR:
13371 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13373 case GT_EXPR:
13374 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13376 default:
13377 break;
13379 else if (wi::eq_p (arg1, min + 1))
13380 switch (code)
13382 case GE_EXPR:
13383 arg1 = const_binop (MINUS_EXPR, arg1,
13384 build_int_cst (TREE_TYPE (arg1), 1));
13385 return fold_build2_loc (loc, NE_EXPR, type,
13386 fold_convert_loc (loc,
13387 TREE_TYPE (arg1), arg0),
13388 arg1);
13389 case LT_EXPR:
13390 arg1 = const_binop (MINUS_EXPR, arg1,
13391 build_int_cst (TREE_TYPE (arg1), 1));
13392 return fold_build2_loc (loc, EQ_EXPR, type,
13393 fold_convert_loc (loc, TREE_TYPE (arg1),
13394 arg0),
13395 arg1);
13396 default:
13397 break;
13400 else if (wi::eq_p (arg1, signed_max)
13401 && TYPE_UNSIGNED (arg1_type)
13402 /* We will flip the signedness of the comparison operator
13403 associated with the mode of arg1, so the sign bit is
13404 specified by this mode. Check that arg1 is the signed
13405 max associated with this sign bit. */
13406 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13407 /* signed_type does not work on pointer types. */
13408 && INTEGRAL_TYPE_P (arg1_type))
13410 /* The following case also applies to X < signed_max+1
13411 and X >= signed_max+1 because previous transformations. */
13412 if (code == LE_EXPR || code == GT_EXPR)
13414 tree st = signed_type_for (arg1_type);
13415 return fold_build2_loc (loc,
13416 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13417 type, fold_convert_loc (loc, st, arg0),
13418 build_int_cst (st, 0));
13424 /* If we are comparing an ABS_EXPR with a constant, we can
13425 convert all the cases into explicit comparisons, but they may
13426 well not be faster than doing the ABS and one comparison.
13427 But ABS (X) <= C is a range comparison, which becomes a subtraction
13428 and a comparison, and is probably faster. */
13429 if (code == LE_EXPR
13430 && TREE_CODE (arg1) == INTEGER_CST
13431 && TREE_CODE (arg0) == ABS_EXPR
13432 && ! TREE_SIDE_EFFECTS (arg0)
13433 && (0 != (tem = negate_expr (arg1)))
13434 && TREE_CODE (tem) == INTEGER_CST
13435 && !TREE_OVERFLOW (tem))
13436 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13437 build2 (GE_EXPR, type,
13438 TREE_OPERAND (arg0, 0), tem),
13439 build2 (LE_EXPR, type,
13440 TREE_OPERAND (arg0, 0), arg1));
13442 /* Convert ABS_EXPR<x> >= 0 to true. */
13443 strict_overflow_p = false;
13444 if (code == GE_EXPR
13445 && (integer_zerop (arg1)
13446 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13447 && real_zerop (arg1)))
13448 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13450 if (strict_overflow_p)
13451 fold_overflow_warning (("assuming signed overflow does not occur "
13452 "when simplifying comparison of "
13453 "absolute value and zero"),
13454 WARN_STRICT_OVERFLOW_CONDITIONAL);
13455 return omit_one_operand_loc (loc, type,
13456 constant_boolean_node (true, type),
13457 arg0);
13460 /* Convert ABS_EXPR<x> < 0 to false. */
13461 strict_overflow_p = false;
13462 if (code == LT_EXPR
13463 && (integer_zerop (arg1) || real_zerop (arg1))
13464 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13466 if (strict_overflow_p)
13467 fold_overflow_warning (("assuming signed overflow does not occur "
13468 "when simplifying comparison of "
13469 "absolute value and zero"),
13470 WARN_STRICT_OVERFLOW_CONDITIONAL);
13471 return omit_one_operand_loc (loc, type,
13472 constant_boolean_node (false, type),
13473 arg0);
13476 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13477 and similarly for >= into !=. */
13478 if ((code == LT_EXPR || code == GE_EXPR)
13479 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13480 && TREE_CODE (arg1) == LSHIFT_EXPR
13481 && integer_onep (TREE_OPERAND (arg1, 0)))
13482 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13483 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13484 TREE_OPERAND (arg1, 1)),
13485 build_zero_cst (TREE_TYPE (arg0)));
13487 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13488 otherwise Y might be >= # of bits in X's type and thus e.g.
13489 (unsigned char) (1 << Y) for Y 15 might be 0.
13490 If the cast is widening, then 1 << Y should have unsigned type,
13491 otherwise if Y is number of bits in the signed shift type minus 1,
13492 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13493 31 might be 0xffffffff80000000. */
13494 if ((code == LT_EXPR || code == GE_EXPR)
13495 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13496 && CONVERT_EXPR_P (arg1)
13497 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13498 && (TYPE_PRECISION (TREE_TYPE (arg1))
13499 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13500 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13501 || (TYPE_PRECISION (TREE_TYPE (arg1))
13502 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13503 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13505 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13506 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13507 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13508 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13509 build_zero_cst (TREE_TYPE (arg0)));
13512 return NULL_TREE;
13514 case UNORDERED_EXPR:
13515 case ORDERED_EXPR:
13516 case UNLT_EXPR:
13517 case UNLE_EXPR:
13518 case UNGT_EXPR:
13519 case UNGE_EXPR:
13520 case UNEQ_EXPR:
13521 case LTGT_EXPR:
13522 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13524 t1 = fold_relational_const (code, type, arg0, arg1);
13525 if (t1 != NULL_TREE)
13526 return t1;
13529 /* If the first operand is NaN, the result is constant. */
13530 if (TREE_CODE (arg0) == REAL_CST
13531 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13532 && (code != LTGT_EXPR || ! flag_trapping_math))
13534 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13535 ? integer_zero_node
13536 : integer_one_node;
13537 return omit_one_operand_loc (loc, type, t1, arg1);
13540 /* If the second operand is NaN, the result is constant. */
13541 if (TREE_CODE (arg1) == REAL_CST
13542 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13543 && (code != LTGT_EXPR || ! flag_trapping_math))
13545 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13546 ? integer_zero_node
13547 : integer_one_node;
13548 return omit_one_operand_loc (loc, type, t1, arg0);
13551 /* Simplify unordered comparison of something with itself. */
13552 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13553 && operand_equal_p (arg0, arg1, 0))
13554 return constant_boolean_node (1, type);
13556 if (code == LTGT_EXPR
13557 && !flag_trapping_math
13558 && operand_equal_p (arg0, arg1, 0))
13559 return constant_boolean_node (0, type);
13561 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13563 tree targ0 = strip_float_extensions (arg0);
13564 tree targ1 = strip_float_extensions (arg1);
13565 tree newtype = TREE_TYPE (targ0);
13567 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13568 newtype = TREE_TYPE (targ1);
13570 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13571 return fold_build2_loc (loc, code, type,
13572 fold_convert_loc (loc, newtype, targ0),
13573 fold_convert_loc (loc, newtype, targ1));
13576 return NULL_TREE;
13578 case COMPOUND_EXPR:
13579 /* When pedantic, a compound expression can be neither an lvalue
13580 nor an integer constant expression. */
13581 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13582 return NULL_TREE;
13583 /* Don't let (0, 0) be null pointer constant. */
13584 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13585 : fold_convert_loc (loc, type, arg1);
13586 return pedantic_non_lvalue_loc (loc, tem);
13588 case COMPLEX_EXPR:
13589 if ((TREE_CODE (arg0) == REAL_CST
13590 && TREE_CODE (arg1) == REAL_CST)
13591 || (TREE_CODE (arg0) == INTEGER_CST
13592 && TREE_CODE (arg1) == INTEGER_CST))
13593 return build_complex (type, arg0, arg1);
13594 if (TREE_CODE (arg0) == REALPART_EXPR
13595 && TREE_CODE (arg1) == IMAGPART_EXPR
13596 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13597 && operand_equal_p (TREE_OPERAND (arg0, 0),
13598 TREE_OPERAND (arg1, 0), 0))
13599 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13600 TREE_OPERAND (arg1, 0));
13601 return NULL_TREE;
13603 case ASSERT_EXPR:
13604 /* An ASSERT_EXPR should never be passed to fold_binary. */
13605 gcc_unreachable ();
13607 case VEC_PACK_TRUNC_EXPR:
13608 case VEC_PACK_FIX_TRUNC_EXPR:
13610 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13611 tree *elts;
13613 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13614 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13615 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13616 return NULL_TREE;
13618 elts = XALLOCAVEC (tree, nelts);
13619 if (!vec_cst_ctor_to_array (arg0, elts)
13620 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13621 return NULL_TREE;
13623 for (i = 0; i < nelts; i++)
13625 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13626 ? NOP_EXPR : FIX_TRUNC_EXPR,
13627 TREE_TYPE (type), elts[i]);
13628 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13629 return NULL_TREE;
13632 return build_vector (type, elts);
13635 case VEC_WIDEN_MULT_LO_EXPR:
13636 case VEC_WIDEN_MULT_HI_EXPR:
13637 case VEC_WIDEN_MULT_EVEN_EXPR:
13638 case VEC_WIDEN_MULT_ODD_EXPR:
13640 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13641 unsigned int out, ofs, scale;
13642 tree *elts;
13644 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13645 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13646 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13647 return NULL_TREE;
13649 elts = XALLOCAVEC (tree, nelts * 4);
13650 if (!vec_cst_ctor_to_array (arg0, elts)
13651 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13652 return NULL_TREE;
13654 if (code == VEC_WIDEN_MULT_LO_EXPR)
13655 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13656 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13657 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13658 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13659 scale = 1, ofs = 0;
13660 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13661 scale = 1, ofs = 1;
13663 for (out = 0; out < nelts; out++)
13665 unsigned int in1 = (out << scale) + ofs;
13666 unsigned int in2 = in1 + nelts * 2;
13667 tree t1, t2;
13669 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13670 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13672 if (t1 == NULL_TREE || t2 == NULL_TREE)
13673 return NULL_TREE;
13674 elts[out] = const_binop (MULT_EXPR, t1, t2);
13675 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13676 return NULL_TREE;
13679 return build_vector (type, elts);
13682 default:
13683 return NULL_TREE;
13684 } /* switch (code) */
13687 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13688 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13689 of GOTO_EXPR. */
13691 static tree
13692 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13694 switch (TREE_CODE (*tp))
13696 case LABEL_EXPR:
13697 return *tp;
13699 case GOTO_EXPR:
13700 *walk_subtrees = 0;
13702 /* ... fall through ... */
13704 default:
13705 return NULL_TREE;
13709 /* Return whether the sub-tree ST contains a label which is accessible from
13710 outside the sub-tree. */
13712 static bool
13713 contains_label_p (tree st)
13715 return
13716 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13719 /* Fold a ternary expression of code CODE and type TYPE with operands
13720 OP0, OP1, and OP2. Return the folded expression if folding is
13721 successful. Otherwise, return NULL_TREE. */
13723 tree
13724 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13725 tree op0, tree op1, tree op2)
13727 tree tem;
13728 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13729 enum tree_code_class kind = TREE_CODE_CLASS (code);
13731 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13732 && TREE_CODE_LENGTH (code) == 3);
13734 /* If this is a commutative operation, and OP0 is a constant, move it
13735 to OP1 to reduce the number of tests below. */
13736 if (commutative_ternary_tree_code (code)
13737 && tree_swap_operands_p (op0, op1, true))
13738 return fold_build3_loc (loc, code, type, op1, op0, op2);
13740 tem = generic_simplify (loc, code, type, op0, op1, op2);
13741 if (tem)
13742 return tem;
13744 /* Strip any conversions that don't change the mode. This is safe
13745 for every expression, except for a comparison expression because
13746 its signedness is derived from its operands. So, in the latter
13747 case, only strip conversions that don't change the signedness.
13749 Note that this is done as an internal manipulation within the
13750 constant folder, in order to find the simplest representation of
13751 the arguments so that their form can be studied. In any cases,
13752 the appropriate type conversions should be put back in the tree
13753 that will get out of the constant folder. */
13754 if (op0)
13756 arg0 = op0;
13757 STRIP_NOPS (arg0);
13760 if (op1)
13762 arg1 = op1;
13763 STRIP_NOPS (arg1);
13766 if (op2)
13768 arg2 = op2;
13769 STRIP_NOPS (arg2);
13772 switch (code)
13774 case COMPONENT_REF:
13775 if (TREE_CODE (arg0) == CONSTRUCTOR
13776 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13778 unsigned HOST_WIDE_INT idx;
13779 tree field, value;
13780 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13781 if (field == arg1)
13782 return value;
13784 return NULL_TREE;
13786 case COND_EXPR:
13787 case VEC_COND_EXPR:
13788 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13789 so all simple results must be passed through pedantic_non_lvalue. */
13790 if (TREE_CODE (arg0) == INTEGER_CST)
13792 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13793 tem = integer_zerop (arg0) ? op2 : op1;
13794 /* Only optimize constant conditions when the selected branch
13795 has the same type as the COND_EXPR. This avoids optimizing
13796 away "c ? x : throw", where the throw has a void type.
13797 Avoid throwing away that operand which contains label. */
13798 if ((!TREE_SIDE_EFFECTS (unused_op)
13799 || !contains_label_p (unused_op))
13800 && (! VOID_TYPE_P (TREE_TYPE (tem))
13801 || VOID_TYPE_P (type)))
13802 return pedantic_non_lvalue_loc (loc, tem);
13803 return NULL_TREE;
13805 else if (TREE_CODE (arg0) == VECTOR_CST)
13807 if (integer_all_onesp (arg0))
13808 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
13809 if (integer_zerop (arg0))
13810 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
13812 if ((TREE_CODE (arg1) == VECTOR_CST
13813 || TREE_CODE (arg1) == CONSTRUCTOR)
13814 && (TREE_CODE (arg2) == VECTOR_CST
13815 || TREE_CODE (arg2) == CONSTRUCTOR))
13817 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13818 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13819 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13820 for (i = 0; i < nelts; i++)
13822 tree val = VECTOR_CST_ELT (arg0, i);
13823 if (integer_all_onesp (val))
13824 sel[i] = i;
13825 else if (integer_zerop (val))
13826 sel[i] = nelts + i;
13827 else /* Currently unreachable. */
13828 return NULL_TREE;
13830 tree t = fold_vec_perm (type, arg1, arg2, sel);
13831 if (t != NULL_TREE)
13832 return t;
13836 if (operand_equal_p (arg1, op2, 0))
13837 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13839 /* If we have A op B ? A : C, we may be able to convert this to a
13840 simpler expression, depending on the operation and the values
13841 of B and C. Signed zeros prevent all of these transformations,
13842 for reasons given above each one.
13844 Also try swapping the arguments and inverting the conditional. */
13845 if (COMPARISON_CLASS_P (arg0)
13846 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13847 arg1, TREE_OPERAND (arg0, 1))
13848 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13850 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13851 if (tem)
13852 return tem;
13855 if (COMPARISON_CLASS_P (arg0)
13856 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13857 op2,
13858 TREE_OPERAND (arg0, 1))
13859 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13861 location_t loc0 = expr_location_or (arg0, loc);
13862 tem = fold_invert_truthvalue (loc0, arg0);
13863 if (tem && COMPARISON_CLASS_P (tem))
13865 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13866 if (tem)
13867 return tem;
13871 /* If the second operand is simpler than the third, swap them
13872 since that produces better jump optimization results. */
13873 if (truth_value_p (TREE_CODE (arg0))
13874 && tree_swap_operands_p (op1, op2, false))
13876 location_t loc0 = expr_location_or (arg0, loc);
13877 /* See if this can be inverted. If it can't, possibly because
13878 it was a floating-point inequality comparison, don't do
13879 anything. */
13880 tem = fold_invert_truthvalue (loc0, arg0);
13881 if (tem)
13882 return fold_build3_loc (loc, code, type, tem, op2, op1);
13885 /* Convert A ? 1 : 0 to simply A. */
13886 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13887 : (integer_onep (op1)
13888 && !VECTOR_TYPE_P (type)))
13889 && integer_zerop (op2)
13890 /* If we try to convert OP0 to our type, the
13891 call to fold will try to move the conversion inside
13892 a COND, which will recurse. In that case, the COND_EXPR
13893 is probably the best choice, so leave it alone. */
13894 && type == TREE_TYPE (arg0))
13895 return pedantic_non_lvalue_loc (loc, arg0);
13897 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13898 over COND_EXPR in cases such as floating point comparisons. */
13899 if (integer_zerop (op1)
13900 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13901 : (integer_onep (op2)
13902 && !VECTOR_TYPE_P (type)))
13903 && truth_value_p (TREE_CODE (arg0)))
13904 return pedantic_non_lvalue_loc (loc,
13905 fold_convert_loc (loc, type,
13906 invert_truthvalue_loc (loc,
13907 arg0)));
13909 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13910 if (TREE_CODE (arg0) == LT_EXPR
13911 && integer_zerop (TREE_OPERAND (arg0, 1))
13912 && integer_zerop (op2)
13913 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13915 /* sign_bit_p looks through both zero and sign extensions,
13916 but for this optimization only sign extensions are
13917 usable. */
13918 tree tem2 = TREE_OPERAND (arg0, 0);
13919 while (tem != tem2)
13921 if (TREE_CODE (tem2) != NOP_EXPR
13922 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13924 tem = NULL_TREE;
13925 break;
13927 tem2 = TREE_OPERAND (tem2, 0);
13929 /* sign_bit_p only checks ARG1 bits within A's precision.
13930 If <sign bit of A> has wider type than A, bits outside
13931 of A's precision in <sign bit of A> need to be checked.
13932 If they are all 0, this optimization needs to be done
13933 in unsigned A's type, if they are all 1 in signed A's type,
13934 otherwise this can't be done. */
13935 if (tem
13936 && TYPE_PRECISION (TREE_TYPE (tem))
13937 < TYPE_PRECISION (TREE_TYPE (arg1))
13938 && TYPE_PRECISION (TREE_TYPE (tem))
13939 < TYPE_PRECISION (type))
13941 int inner_width, outer_width;
13942 tree tem_type;
13944 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13945 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13946 if (outer_width > TYPE_PRECISION (type))
13947 outer_width = TYPE_PRECISION (type);
13949 wide_int mask = wi::shifted_mask
13950 (inner_width, outer_width - inner_width, false,
13951 TYPE_PRECISION (TREE_TYPE (arg1)));
13953 wide_int common = mask & arg1;
13954 if (common == mask)
13956 tem_type = signed_type_for (TREE_TYPE (tem));
13957 tem = fold_convert_loc (loc, tem_type, tem);
13959 else if (common == 0)
13961 tem_type = unsigned_type_for (TREE_TYPE (tem));
13962 tem = fold_convert_loc (loc, tem_type, tem);
13964 else
13965 tem = NULL;
13968 if (tem)
13969 return
13970 fold_convert_loc (loc, type,
13971 fold_build2_loc (loc, BIT_AND_EXPR,
13972 TREE_TYPE (tem), tem,
13973 fold_convert_loc (loc,
13974 TREE_TYPE (tem),
13975 arg1)));
13978 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13979 already handled above. */
13980 if (TREE_CODE (arg0) == BIT_AND_EXPR
13981 && integer_onep (TREE_OPERAND (arg0, 1))
13982 && integer_zerop (op2)
13983 && integer_pow2p (arg1))
13985 tree tem = TREE_OPERAND (arg0, 0);
13986 STRIP_NOPS (tem);
13987 if (TREE_CODE (tem) == RSHIFT_EXPR
13988 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13989 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13990 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13991 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13992 TREE_OPERAND (tem, 0), arg1);
13995 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13996 is probably obsolete because the first operand should be a
13997 truth value (that's why we have the two cases above), but let's
13998 leave it in until we can confirm this for all front-ends. */
13999 if (integer_zerop (op2)
14000 && TREE_CODE (arg0) == NE_EXPR
14001 && integer_zerop (TREE_OPERAND (arg0, 1))
14002 && integer_pow2p (arg1)
14003 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14004 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14005 arg1, OEP_ONLY_CONST))
14006 return pedantic_non_lvalue_loc (loc,
14007 fold_convert_loc (loc, type,
14008 TREE_OPERAND (arg0, 0)));
14010 /* Disable the transformations below for vectors, since
14011 fold_binary_op_with_conditional_arg may undo them immediately,
14012 yielding an infinite loop. */
14013 if (code == VEC_COND_EXPR)
14014 return NULL_TREE;
14016 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14017 if (integer_zerop (op2)
14018 && truth_value_p (TREE_CODE (arg0))
14019 && truth_value_p (TREE_CODE (arg1))
14020 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14021 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14022 : TRUTH_ANDIF_EXPR,
14023 type, fold_convert_loc (loc, type, arg0), arg1);
14025 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14026 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14027 && truth_value_p (TREE_CODE (arg0))
14028 && truth_value_p (TREE_CODE (arg1))
14029 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14031 location_t loc0 = expr_location_or (arg0, loc);
14032 /* Only perform transformation if ARG0 is easily inverted. */
14033 tem = fold_invert_truthvalue (loc0, arg0);
14034 if (tem)
14035 return fold_build2_loc (loc, code == VEC_COND_EXPR
14036 ? BIT_IOR_EXPR
14037 : TRUTH_ORIF_EXPR,
14038 type, fold_convert_loc (loc, type, tem),
14039 arg1);
14042 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14043 if (integer_zerop (arg1)
14044 && truth_value_p (TREE_CODE (arg0))
14045 && truth_value_p (TREE_CODE (op2))
14046 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14048 location_t loc0 = expr_location_or (arg0, loc);
14049 /* Only perform transformation if ARG0 is easily inverted. */
14050 tem = fold_invert_truthvalue (loc0, arg0);
14051 if (tem)
14052 return fold_build2_loc (loc, code == VEC_COND_EXPR
14053 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14054 type, fold_convert_loc (loc, type, tem),
14055 op2);
14058 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14059 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14060 && truth_value_p (TREE_CODE (arg0))
14061 && truth_value_p (TREE_CODE (op2))
14062 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14063 return fold_build2_loc (loc, code == VEC_COND_EXPR
14064 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14065 type, fold_convert_loc (loc, type, arg0), op2);
14067 return NULL_TREE;
14069 case CALL_EXPR:
14070 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14071 of fold_ternary on them. */
14072 gcc_unreachable ();
14074 case BIT_FIELD_REF:
14075 if ((TREE_CODE (arg0) == VECTOR_CST
14076 || (TREE_CODE (arg0) == CONSTRUCTOR
14077 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14078 && (type == TREE_TYPE (TREE_TYPE (arg0))
14079 || (TREE_CODE (type) == VECTOR_TYPE
14080 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14082 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14083 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14084 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14085 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14087 if (n != 0
14088 && (idx % width) == 0
14089 && (n % width) == 0
14090 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14092 idx = idx / width;
14093 n = n / width;
14095 if (TREE_CODE (arg0) == VECTOR_CST)
14097 if (n == 1)
14098 return VECTOR_CST_ELT (arg0, idx);
14100 tree *vals = XALLOCAVEC (tree, n);
14101 for (unsigned i = 0; i < n; ++i)
14102 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14103 return build_vector (type, vals);
14106 /* Constructor elements can be subvectors. */
14107 unsigned HOST_WIDE_INT k = 1;
14108 if (CONSTRUCTOR_NELTS (arg0) != 0)
14110 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14111 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14112 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14115 /* We keep an exact subset of the constructor elements. */
14116 if ((idx % k) == 0 && (n % k) == 0)
14118 if (CONSTRUCTOR_NELTS (arg0) == 0)
14119 return build_constructor (type, NULL);
14120 idx /= k;
14121 n /= k;
14122 if (n == 1)
14124 if (idx < CONSTRUCTOR_NELTS (arg0))
14125 return CONSTRUCTOR_ELT (arg0, idx)->value;
14126 return build_zero_cst (type);
14129 vec<constructor_elt, va_gc> *vals;
14130 vec_alloc (vals, n);
14131 for (unsigned i = 0;
14132 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14133 ++i)
14134 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14135 CONSTRUCTOR_ELT
14136 (arg0, idx + i)->value);
14137 return build_constructor (type, vals);
14139 /* The bitfield references a single constructor element. */
14140 else if (idx + n <= (idx / k + 1) * k)
14142 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14143 return build_zero_cst (type);
14144 else if (n == k)
14145 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14146 else
14147 return fold_build3_loc (loc, code, type,
14148 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14149 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14154 /* A bit-field-ref that referenced the full argument can be stripped. */
14155 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14156 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14157 && integer_zerop (op2))
14158 return fold_convert_loc (loc, type, arg0);
14160 /* On constants we can use native encode/interpret to constant
14161 fold (nearly) all BIT_FIELD_REFs. */
14162 if (CONSTANT_CLASS_P (arg0)
14163 && can_native_interpret_type_p (type)
14164 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14165 /* This limitation should not be necessary, we just need to
14166 round this up to mode size. */
14167 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14168 /* Need bit-shifting of the buffer to relax the following. */
14169 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14171 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14172 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14173 unsigned HOST_WIDE_INT clen;
14174 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14175 /* ??? We cannot tell native_encode_expr to start at
14176 some random byte only. So limit us to a reasonable amount
14177 of work. */
14178 if (clen <= 4096)
14180 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14181 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14182 if (len > 0
14183 && len * BITS_PER_UNIT >= bitpos + bitsize)
14185 tree v = native_interpret_expr (type,
14186 b + bitpos / BITS_PER_UNIT,
14187 bitsize / BITS_PER_UNIT);
14188 if (v)
14189 return v;
14194 return NULL_TREE;
14196 case FMA_EXPR:
14197 /* For integers we can decompose the FMA if possible. */
14198 if (TREE_CODE (arg0) == INTEGER_CST
14199 && TREE_CODE (arg1) == INTEGER_CST)
14200 return fold_build2_loc (loc, PLUS_EXPR, type,
14201 const_binop (MULT_EXPR, arg0, arg1), arg2);
14202 if (integer_zerop (arg2))
14203 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14205 return fold_fma (loc, type, arg0, arg1, arg2);
14207 case VEC_PERM_EXPR:
14208 if (TREE_CODE (arg2) == VECTOR_CST)
14210 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14211 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14212 bool need_mask_canon = false;
14213 bool all_in_vec0 = true;
14214 bool all_in_vec1 = true;
14215 bool maybe_identity = true;
14216 bool single_arg = (op0 == op1);
14217 bool changed = false;
14219 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14220 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14221 for (i = 0; i < nelts; i++)
14223 tree val = VECTOR_CST_ELT (arg2, i);
14224 if (TREE_CODE (val) != INTEGER_CST)
14225 return NULL_TREE;
14227 /* Make sure that the perm value is in an acceptable
14228 range. */
14229 wide_int t = val;
14230 if (wi::gtu_p (t, mask))
14232 need_mask_canon = true;
14233 sel[i] = t.to_uhwi () & mask;
14235 else
14236 sel[i] = t.to_uhwi ();
14238 if (sel[i] < nelts)
14239 all_in_vec1 = false;
14240 else
14241 all_in_vec0 = false;
14243 if ((sel[i] & (nelts-1)) != i)
14244 maybe_identity = false;
14247 if (maybe_identity)
14249 if (all_in_vec0)
14250 return op0;
14251 if (all_in_vec1)
14252 return op1;
14255 if (all_in_vec0)
14256 op1 = op0;
14257 else if (all_in_vec1)
14259 op0 = op1;
14260 for (i = 0; i < nelts; i++)
14261 sel[i] -= nelts;
14262 need_mask_canon = true;
14265 if ((TREE_CODE (op0) == VECTOR_CST
14266 || TREE_CODE (op0) == CONSTRUCTOR)
14267 && (TREE_CODE (op1) == VECTOR_CST
14268 || TREE_CODE (op1) == CONSTRUCTOR))
14270 tree t = fold_vec_perm (type, op0, op1, sel);
14271 if (t != NULL_TREE)
14272 return t;
14275 if (op0 == op1 && !single_arg)
14276 changed = true;
14278 if (need_mask_canon && arg2 == op2)
14280 tree *tsel = XALLOCAVEC (tree, nelts);
14281 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14282 for (i = 0; i < nelts; i++)
14283 tsel[i] = build_int_cst (eltype, sel[i]);
14284 op2 = build_vector (TREE_TYPE (arg2), tsel);
14285 changed = true;
14288 if (changed)
14289 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14291 return NULL_TREE;
14293 default:
14294 return NULL_TREE;
14295 } /* switch (code) */
14298 /* Perform constant folding and related simplification of EXPR.
14299 The related simplifications include x*1 => x, x*0 => 0, etc.,
14300 and application of the associative law.
14301 NOP_EXPR conversions may be removed freely (as long as we
14302 are careful not to change the type of the overall expression).
14303 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14304 but we can constant-fold them if they have constant operands. */
14306 #ifdef ENABLE_FOLD_CHECKING
14307 # define fold(x) fold_1 (x)
14308 static tree fold_1 (tree);
14309 static
14310 #endif
14311 tree
14312 fold (tree expr)
14314 const tree t = expr;
14315 enum tree_code code = TREE_CODE (t);
14316 enum tree_code_class kind = TREE_CODE_CLASS (code);
14317 tree tem;
14318 location_t loc = EXPR_LOCATION (expr);
14320 /* Return right away if a constant. */
14321 if (kind == tcc_constant)
14322 return t;
14324 /* CALL_EXPR-like objects with variable numbers of operands are
14325 treated specially. */
14326 if (kind == tcc_vl_exp)
14328 if (code == CALL_EXPR)
14330 tem = fold_call_expr (loc, expr, false);
14331 return tem ? tem : expr;
14333 return expr;
14336 if (IS_EXPR_CODE_CLASS (kind))
14338 tree type = TREE_TYPE (t);
14339 tree op0, op1, op2;
14341 switch (TREE_CODE_LENGTH (code))
14343 case 1:
14344 op0 = TREE_OPERAND (t, 0);
14345 tem = fold_unary_loc (loc, code, type, op0);
14346 return tem ? tem : expr;
14347 case 2:
14348 op0 = TREE_OPERAND (t, 0);
14349 op1 = TREE_OPERAND (t, 1);
14350 tem = fold_binary_loc (loc, code, type, op0, op1);
14351 return tem ? tem : expr;
14352 case 3:
14353 op0 = TREE_OPERAND (t, 0);
14354 op1 = TREE_OPERAND (t, 1);
14355 op2 = TREE_OPERAND (t, 2);
14356 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14357 return tem ? tem : expr;
14358 default:
14359 break;
14363 switch (code)
14365 case ARRAY_REF:
14367 tree op0 = TREE_OPERAND (t, 0);
14368 tree op1 = TREE_OPERAND (t, 1);
14370 if (TREE_CODE (op1) == INTEGER_CST
14371 && TREE_CODE (op0) == CONSTRUCTOR
14372 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14374 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14375 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14376 unsigned HOST_WIDE_INT begin = 0;
14378 /* Find a matching index by means of a binary search. */
14379 while (begin != end)
14381 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14382 tree index = (*elts)[middle].index;
14384 if (TREE_CODE (index) == INTEGER_CST
14385 && tree_int_cst_lt (index, op1))
14386 begin = middle + 1;
14387 else if (TREE_CODE (index) == INTEGER_CST
14388 && tree_int_cst_lt (op1, index))
14389 end = middle;
14390 else if (TREE_CODE (index) == RANGE_EXPR
14391 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14392 begin = middle + 1;
14393 else if (TREE_CODE (index) == RANGE_EXPR
14394 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14395 end = middle;
14396 else
14397 return (*elts)[middle].value;
14401 return t;
14404 /* Return a VECTOR_CST if possible. */
14405 case CONSTRUCTOR:
14407 tree type = TREE_TYPE (t);
14408 if (TREE_CODE (type) != VECTOR_TYPE)
14409 return t;
14411 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14412 unsigned HOST_WIDE_INT idx, pos = 0;
14413 tree value;
14415 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14417 if (!CONSTANT_CLASS_P (value))
14418 return t;
14419 if (TREE_CODE (value) == VECTOR_CST)
14421 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14422 vec[pos++] = VECTOR_CST_ELT (value, i);
14424 else
14425 vec[pos++] = value;
14427 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14428 vec[pos] = build_zero_cst (TREE_TYPE (type));
14430 return build_vector (type, vec);
14433 case CONST_DECL:
14434 return fold (DECL_INITIAL (t));
14436 default:
14437 return t;
14438 } /* switch (code) */
14441 #ifdef ENABLE_FOLD_CHECKING
14442 #undef fold
14444 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14445 hash_table<pointer_hash<const tree_node> > *);
14446 static void fold_check_failed (const_tree, const_tree);
14447 void print_fold_checksum (const_tree);
14449 /* When --enable-checking=fold, compute a digest of expr before
14450 and after actual fold call to see if fold did not accidentally
14451 change original expr. */
14453 tree
14454 fold (tree expr)
14456 tree ret;
14457 struct md5_ctx ctx;
14458 unsigned char checksum_before[16], checksum_after[16];
14459 hash_table<pointer_hash<const tree_node> > ht (32);
14461 md5_init_ctx (&ctx);
14462 fold_checksum_tree (expr, &ctx, &ht);
14463 md5_finish_ctx (&ctx, checksum_before);
14464 ht.empty ();
14466 ret = fold_1 (expr);
14468 md5_init_ctx (&ctx);
14469 fold_checksum_tree (expr, &ctx, &ht);
14470 md5_finish_ctx (&ctx, checksum_after);
14472 if (memcmp (checksum_before, checksum_after, 16))
14473 fold_check_failed (expr, ret);
14475 return ret;
14478 void
14479 print_fold_checksum (const_tree expr)
14481 struct md5_ctx ctx;
14482 unsigned char checksum[16], cnt;
14483 hash_table<pointer_hash<const tree_node> > ht (32);
14485 md5_init_ctx (&ctx);
14486 fold_checksum_tree (expr, &ctx, &ht);
14487 md5_finish_ctx (&ctx, checksum);
14488 for (cnt = 0; cnt < 16; ++cnt)
14489 fprintf (stderr, "%02x", checksum[cnt]);
14490 putc ('\n', stderr);
14493 static void
14494 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14496 internal_error ("fold check: original tree changed by fold");
14499 static void
14500 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14501 hash_table<pointer_hash <const tree_node> > *ht)
14503 const tree_node **slot;
14504 enum tree_code code;
14505 union tree_node buf;
14506 int i, len;
14508 recursive_label:
14509 if (expr == NULL)
14510 return;
14511 slot = ht->find_slot (expr, INSERT);
14512 if (*slot != NULL)
14513 return;
14514 *slot = expr;
14515 code = TREE_CODE (expr);
14516 if (TREE_CODE_CLASS (code) == tcc_declaration
14517 && DECL_ASSEMBLER_NAME_SET_P (expr))
14519 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14520 memcpy ((char *) &buf, expr, tree_size (expr));
14521 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14522 expr = (tree) &buf;
14524 else if (TREE_CODE_CLASS (code) == tcc_type
14525 && (TYPE_POINTER_TO (expr)
14526 || TYPE_REFERENCE_TO (expr)
14527 || TYPE_CACHED_VALUES_P (expr)
14528 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14529 || TYPE_NEXT_VARIANT (expr)))
14531 /* Allow these fields to be modified. */
14532 tree tmp;
14533 memcpy ((char *) &buf, expr, tree_size (expr));
14534 expr = tmp = (tree) &buf;
14535 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14536 TYPE_POINTER_TO (tmp) = NULL;
14537 TYPE_REFERENCE_TO (tmp) = NULL;
14538 TYPE_NEXT_VARIANT (tmp) = NULL;
14539 if (TYPE_CACHED_VALUES_P (tmp))
14541 TYPE_CACHED_VALUES_P (tmp) = 0;
14542 TYPE_CACHED_VALUES (tmp) = NULL;
14545 md5_process_bytes (expr, tree_size (expr), ctx);
14546 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14547 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14548 if (TREE_CODE_CLASS (code) != tcc_type
14549 && TREE_CODE_CLASS (code) != tcc_declaration
14550 && code != TREE_LIST
14551 && code != SSA_NAME
14552 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14553 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14554 switch (TREE_CODE_CLASS (code))
14556 case tcc_constant:
14557 switch (code)
14559 case STRING_CST:
14560 md5_process_bytes (TREE_STRING_POINTER (expr),
14561 TREE_STRING_LENGTH (expr), ctx);
14562 break;
14563 case COMPLEX_CST:
14564 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14565 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14566 break;
14567 case VECTOR_CST:
14568 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14569 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14570 break;
14571 default:
14572 break;
14574 break;
14575 case tcc_exceptional:
14576 switch (code)
14578 case TREE_LIST:
14579 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14580 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14581 expr = TREE_CHAIN (expr);
14582 goto recursive_label;
14583 break;
14584 case TREE_VEC:
14585 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14586 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14587 break;
14588 default:
14589 break;
14591 break;
14592 case tcc_expression:
14593 case tcc_reference:
14594 case tcc_comparison:
14595 case tcc_unary:
14596 case tcc_binary:
14597 case tcc_statement:
14598 case tcc_vl_exp:
14599 len = TREE_OPERAND_LENGTH (expr);
14600 for (i = 0; i < len; ++i)
14601 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14602 break;
14603 case tcc_declaration:
14604 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14605 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14606 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14608 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14609 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14610 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14611 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14612 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14615 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14617 if (TREE_CODE (expr) == FUNCTION_DECL)
14619 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14620 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14622 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14624 break;
14625 case tcc_type:
14626 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14627 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14628 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14629 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14630 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14631 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14632 if (INTEGRAL_TYPE_P (expr)
14633 || SCALAR_FLOAT_TYPE_P (expr))
14635 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14636 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14638 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14639 if (TREE_CODE (expr) == RECORD_TYPE
14640 || TREE_CODE (expr) == UNION_TYPE
14641 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14642 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14643 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14644 break;
14645 default:
14646 break;
14650 /* Helper function for outputting the checksum of a tree T. When
14651 debugging with gdb, you can "define mynext" to be "next" followed
14652 by "call debug_fold_checksum (op0)", then just trace down till the
14653 outputs differ. */
14655 DEBUG_FUNCTION void
14656 debug_fold_checksum (const_tree t)
14658 int i;
14659 unsigned char checksum[16];
14660 struct md5_ctx ctx;
14661 hash_table<pointer_hash<const tree_node> > ht (32);
14663 md5_init_ctx (&ctx);
14664 fold_checksum_tree (t, &ctx, &ht);
14665 md5_finish_ctx (&ctx, checksum);
14666 ht.empty ();
14668 for (i = 0; i < 16; i++)
14669 fprintf (stderr, "%d ", checksum[i]);
14671 fprintf (stderr, "\n");
14674 #endif
14676 /* Fold a unary tree expression with code CODE of type TYPE with an
14677 operand OP0. LOC is the location of the resulting expression.
14678 Return a folded expression if successful. Otherwise, return a tree
14679 expression with code CODE of type TYPE with an operand OP0. */
14681 tree
14682 fold_build1_stat_loc (location_t loc,
14683 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14685 tree tem;
14686 #ifdef ENABLE_FOLD_CHECKING
14687 unsigned char checksum_before[16], checksum_after[16];
14688 struct md5_ctx ctx;
14689 hash_table<pointer_hash<const tree_node> > ht (32);
14691 md5_init_ctx (&ctx);
14692 fold_checksum_tree (op0, &ctx, &ht);
14693 md5_finish_ctx (&ctx, checksum_before);
14694 ht.empty ();
14695 #endif
14697 tem = fold_unary_loc (loc, code, type, op0);
14698 if (!tem)
14699 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14701 #ifdef ENABLE_FOLD_CHECKING
14702 md5_init_ctx (&ctx);
14703 fold_checksum_tree (op0, &ctx, &ht);
14704 md5_finish_ctx (&ctx, checksum_after);
14706 if (memcmp (checksum_before, checksum_after, 16))
14707 fold_check_failed (op0, tem);
14708 #endif
14709 return tem;
14712 /* Fold a binary tree expression with code CODE of type TYPE with
14713 operands OP0 and OP1. LOC is the location of the resulting
14714 expression. Return a folded expression if successful. Otherwise,
14715 return a tree expression with code CODE of type TYPE with operands
14716 OP0 and OP1. */
14718 tree
14719 fold_build2_stat_loc (location_t loc,
14720 enum tree_code code, tree type, tree op0, tree op1
14721 MEM_STAT_DECL)
14723 tree tem;
14724 #ifdef ENABLE_FOLD_CHECKING
14725 unsigned char checksum_before_op0[16],
14726 checksum_before_op1[16],
14727 checksum_after_op0[16],
14728 checksum_after_op1[16];
14729 struct md5_ctx ctx;
14730 hash_table<pointer_hash<const tree_node> > ht (32);
14732 md5_init_ctx (&ctx);
14733 fold_checksum_tree (op0, &ctx, &ht);
14734 md5_finish_ctx (&ctx, checksum_before_op0);
14735 ht.empty ();
14737 md5_init_ctx (&ctx);
14738 fold_checksum_tree (op1, &ctx, &ht);
14739 md5_finish_ctx (&ctx, checksum_before_op1);
14740 ht.empty ();
14741 #endif
14743 tem = fold_binary_loc (loc, code, type, op0, op1);
14744 if (!tem)
14745 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14747 #ifdef ENABLE_FOLD_CHECKING
14748 md5_init_ctx (&ctx);
14749 fold_checksum_tree (op0, &ctx, &ht);
14750 md5_finish_ctx (&ctx, checksum_after_op0);
14751 ht.empty ();
14753 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14754 fold_check_failed (op0, tem);
14756 md5_init_ctx (&ctx);
14757 fold_checksum_tree (op1, &ctx, &ht);
14758 md5_finish_ctx (&ctx, checksum_after_op1);
14760 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14761 fold_check_failed (op1, tem);
14762 #endif
14763 return tem;
14766 /* Fold a ternary tree expression with code CODE of type TYPE with
14767 operands OP0, OP1, and OP2. Return a folded expression if
14768 successful. Otherwise, return a tree expression with code CODE of
14769 type TYPE with operands OP0, OP1, and OP2. */
14771 tree
14772 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14773 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14775 tree tem;
14776 #ifdef ENABLE_FOLD_CHECKING
14777 unsigned char checksum_before_op0[16],
14778 checksum_before_op1[16],
14779 checksum_before_op2[16],
14780 checksum_after_op0[16],
14781 checksum_after_op1[16],
14782 checksum_after_op2[16];
14783 struct md5_ctx ctx;
14784 hash_table<pointer_hash<const tree_node> > ht (32);
14786 md5_init_ctx (&ctx);
14787 fold_checksum_tree (op0, &ctx, &ht);
14788 md5_finish_ctx (&ctx, checksum_before_op0);
14789 ht.empty ();
14791 md5_init_ctx (&ctx);
14792 fold_checksum_tree (op1, &ctx, &ht);
14793 md5_finish_ctx (&ctx, checksum_before_op1);
14794 ht.empty ();
14796 md5_init_ctx (&ctx);
14797 fold_checksum_tree (op2, &ctx, &ht);
14798 md5_finish_ctx (&ctx, checksum_before_op2);
14799 ht.empty ();
14800 #endif
14802 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14803 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14804 if (!tem)
14805 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14807 #ifdef ENABLE_FOLD_CHECKING
14808 md5_init_ctx (&ctx);
14809 fold_checksum_tree (op0, &ctx, &ht);
14810 md5_finish_ctx (&ctx, checksum_after_op0);
14811 ht.empty ();
14813 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14814 fold_check_failed (op0, tem);
14816 md5_init_ctx (&ctx);
14817 fold_checksum_tree (op1, &ctx, &ht);
14818 md5_finish_ctx (&ctx, checksum_after_op1);
14819 ht.empty ();
14821 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14822 fold_check_failed (op1, tem);
14824 md5_init_ctx (&ctx);
14825 fold_checksum_tree (op2, &ctx, &ht);
14826 md5_finish_ctx (&ctx, checksum_after_op2);
14828 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14829 fold_check_failed (op2, tem);
14830 #endif
14831 return tem;
14834 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14835 arguments in ARGARRAY, and a null static chain.
14836 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14837 of type TYPE from the given operands as constructed by build_call_array. */
14839 tree
14840 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14841 int nargs, tree *argarray)
14843 tree tem;
14844 #ifdef ENABLE_FOLD_CHECKING
14845 unsigned char checksum_before_fn[16],
14846 checksum_before_arglist[16],
14847 checksum_after_fn[16],
14848 checksum_after_arglist[16];
14849 struct md5_ctx ctx;
14850 hash_table<pointer_hash<const tree_node> > ht (32);
14851 int i;
14853 md5_init_ctx (&ctx);
14854 fold_checksum_tree (fn, &ctx, &ht);
14855 md5_finish_ctx (&ctx, checksum_before_fn);
14856 ht.empty ();
14858 md5_init_ctx (&ctx);
14859 for (i = 0; i < nargs; i++)
14860 fold_checksum_tree (argarray[i], &ctx, &ht);
14861 md5_finish_ctx (&ctx, checksum_before_arglist);
14862 ht.empty ();
14863 #endif
14865 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14867 #ifdef ENABLE_FOLD_CHECKING
14868 md5_init_ctx (&ctx);
14869 fold_checksum_tree (fn, &ctx, &ht);
14870 md5_finish_ctx (&ctx, checksum_after_fn);
14871 ht.empty ();
14873 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14874 fold_check_failed (fn, tem);
14876 md5_init_ctx (&ctx);
14877 for (i = 0; i < nargs; i++)
14878 fold_checksum_tree (argarray[i], &ctx, &ht);
14879 md5_finish_ctx (&ctx, checksum_after_arglist);
14881 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14882 fold_check_failed (NULL_TREE, tem);
14883 #endif
14884 return tem;
14887 /* Perform constant folding and related simplification of initializer
14888 expression EXPR. These behave identically to "fold_buildN" but ignore
14889 potential run-time traps and exceptions that fold must preserve. */
14891 #define START_FOLD_INIT \
14892 int saved_signaling_nans = flag_signaling_nans;\
14893 int saved_trapping_math = flag_trapping_math;\
14894 int saved_rounding_math = flag_rounding_math;\
14895 int saved_trapv = flag_trapv;\
14896 int saved_folding_initializer = folding_initializer;\
14897 flag_signaling_nans = 0;\
14898 flag_trapping_math = 0;\
14899 flag_rounding_math = 0;\
14900 flag_trapv = 0;\
14901 folding_initializer = 1;
14903 #define END_FOLD_INIT \
14904 flag_signaling_nans = saved_signaling_nans;\
14905 flag_trapping_math = saved_trapping_math;\
14906 flag_rounding_math = saved_rounding_math;\
14907 flag_trapv = saved_trapv;\
14908 folding_initializer = saved_folding_initializer;
14910 tree
14911 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14912 tree type, tree op)
14914 tree result;
14915 START_FOLD_INIT;
14917 result = fold_build1_loc (loc, code, type, op);
14919 END_FOLD_INIT;
14920 return result;
14923 tree
14924 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14925 tree type, tree op0, tree op1)
14927 tree result;
14928 START_FOLD_INIT;
14930 result = fold_build2_loc (loc, code, type, op0, op1);
14932 END_FOLD_INIT;
14933 return result;
14936 tree
14937 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14938 int nargs, tree *argarray)
14940 tree result;
14941 START_FOLD_INIT;
14943 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14945 END_FOLD_INIT;
14946 return result;
14949 #undef START_FOLD_INIT
14950 #undef END_FOLD_INIT
14952 /* Determine if first argument is a multiple of second argument. Return 0 if
14953 it is not, or we cannot easily determined it to be.
14955 An example of the sort of thing we care about (at this point; this routine
14956 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14957 fold cases do now) is discovering that
14959 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14961 is a multiple of
14963 SAVE_EXPR (J * 8)
14965 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14967 This code also handles discovering that
14969 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14971 is a multiple of 8 so we don't have to worry about dealing with a
14972 possible remainder.
14974 Note that we *look* inside a SAVE_EXPR only to determine how it was
14975 calculated; it is not safe for fold to do much of anything else with the
14976 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14977 at run time. For example, the latter example above *cannot* be implemented
14978 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14979 evaluation time of the original SAVE_EXPR is not necessarily the same at
14980 the time the new expression is evaluated. The only optimization of this
14981 sort that would be valid is changing
14983 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14985 divided by 8 to
14987 SAVE_EXPR (I) * SAVE_EXPR (J)
14989 (where the same SAVE_EXPR (J) is used in the original and the
14990 transformed version). */
14993 multiple_of_p (tree type, const_tree top, const_tree bottom)
14995 if (operand_equal_p (top, bottom, 0))
14996 return 1;
14998 if (TREE_CODE (type) != INTEGER_TYPE)
14999 return 0;
15001 switch (TREE_CODE (top))
15003 case BIT_AND_EXPR:
15004 /* Bitwise and provides a power of two multiple. If the mask is
15005 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15006 if (!integer_pow2p (bottom))
15007 return 0;
15008 /* FALLTHRU */
15010 case MULT_EXPR:
15011 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15012 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15014 case PLUS_EXPR:
15015 case MINUS_EXPR:
15016 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15017 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15019 case LSHIFT_EXPR:
15020 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15022 tree op1, t1;
15024 op1 = TREE_OPERAND (top, 1);
15025 /* const_binop may not detect overflow correctly,
15026 so check for it explicitly here. */
15027 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
15028 && 0 != (t1 = fold_convert (type,
15029 const_binop (LSHIFT_EXPR,
15030 size_one_node,
15031 op1)))
15032 && !TREE_OVERFLOW (t1))
15033 return multiple_of_p (type, t1, bottom);
15035 return 0;
15037 case NOP_EXPR:
15038 /* Can't handle conversions from non-integral or wider integral type. */
15039 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15040 || (TYPE_PRECISION (type)
15041 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15042 return 0;
15044 /* .. fall through ... */
15046 case SAVE_EXPR:
15047 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15049 case COND_EXPR:
15050 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15051 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15053 case INTEGER_CST:
15054 if (TREE_CODE (bottom) != INTEGER_CST
15055 || integer_zerop (bottom)
15056 || (TYPE_UNSIGNED (type)
15057 && (tree_int_cst_sgn (top) < 0
15058 || tree_int_cst_sgn (bottom) < 0)))
15059 return 0;
15060 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
15061 SIGNED);
15063 default:
15064 return 0;
15068 /* Return true if CODE or TYPE is known to be non-negative. */
15070 static bool
15071 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15073 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15074 && truth_value_p (code))
15075 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15076 have a signed:1 type (where the value is -1 and 0). */
15077 return true;
15078 return false;
15081 /* Return true if (CODE OP0) is known to be non-negative. If the return
15082 value is based on the assumption that signed overflow is undefined,
15083 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15084 *STRICT_OVERFLOW_P. */
15086 bool
15087 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15088 bool *strict_overflow_p)
15090 if (TYPE_UNSIGNED (type))
15091 return true;
15093 switch (code)
15095 case ABS_EXPR:
15096 /* We can't return 1 if flag_wrapv is set because
15097 ABS_EXPR<INT_MIN> = INT_MIN. */
15098 if (!INTEGRAL_TYPE_P (type))
15099 return true;
15100 if (TYPE_OVERFLOW_UNDEFINED (type))
15102 *strict_overflow_p = true;
15103 return true;
15105 break;
15107 case NON_LVALUE_EXPR:
15108 case FLOAT_EXPR:
15109 case FIX_TRUNC_EXPR:
15110 return tree_expr_nonnegative_warnv_p (op0,
15111 strict_overflow_p);
15113 case NOP_EXPR:
15115 tree inner_type = TREE_TYPE (op0);
15116 tree outer_type = type;
15118 if (TREE_CODE (outer_type) == REAL_TYPE)
15120 if (TREE_CODE (inner_type) == REAL_TYPE)
15121 return tree_expr_nonnegative_warnv_p (op0,
15122 strict_overflow_p);
15123 if (INTEGRAL_TYPE_P (inner_type))
15125 if (TYPE_UNSIGNED (inner_type))
15126 return true;
15127 return tree_expr_nonnegative_warnv_p (op0,
15128 strict_overflow_p);
15131 else if (INTEGRAL_TYPE_P (outer_type))
15133 if (TREE_CODE (inner_type) == REAL_TYPE)
15134 return tree_expr_nonnegative_warnv_p (op0,
15135 strict_overflow_p);
15136 if (INTEGRAL_TYPE_P (inner_type))
15137 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15138 && TYPE_UNSIGNED (inner_type);
15141 break;
15143 default:
15144 return tree_simple_nonnegative_warnv_p (code, type);
15147 /* We don't know sign of `t', so be conservative and return false. */
15148 return false;
15151 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15152 value is based on the assumption that signed overflow is undefined,
15153 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15154 *STRICT_OVERFLOW_P. */
15156 bool
15157 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15158 tree op1, bool *strict_overflow_p)
15160 if (TYPE_UNSIGNED (type))
15161 return true;
15163 switch (code)
15165 case POINTER_PLUS_EXPR:
15166 case PLUS_EXPR:
15167 if (FLOAT_TYPE_P (type))
15168 return (tree_expr_nonnegative_warnv_p (op0,
15169 strict_overflow_p)
15170 && tree_expr_nonnegative_warnv_p (op1,
15171 strict_overflow_p));
15173 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15174 both unsigned and at least 2 bits shorter than the result. */
15175 if (TREE_CODE (type) == INTEGER_TYPE
15176 && TREE_CODE (op0) == NOP_EXPR
15177 && TREE_CODE (op1) == NOP_EXPR)
15179 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15180 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15181 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15182 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15184 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15185 TYPE_PRECISION (inner2)) + 1;
15186 return prec < TYPE_PRECISION (type);
15189 break;
15191 case MULT_EXPR:
15192 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15194 /* x * x is always non-negative for floating point x
15195 or without overflow. */
15196 if (operand_equal_p (op0, op1, 0)
15197 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15198 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15200 if (TYPE_OVERFLOW_UNDEFINED (type))
15201 *strict_overflow_p = true;
15202 return true;
15206 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15207 both unsigned and their total bits is shorter than the result. */
15208 if (TREE_CODE (type) == INTEGER_TYPE
15209 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15210 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15212 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15213 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15214 : TREE_TYPE (op0);
15215 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15216 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15217 : TREE_TYPE (op1);
15219 bool unsigned0 = TYPE_UNSIGNED (inner0);
15220 bool unsigned1 = TYPE_UNSIGNED (inner1);
15222 if (TREE_CODE (op0) == INTEGER_CST)
15223 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15225 if (TREE_CODE (op1) == INTEGER_CST)
15226 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15228 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15229 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15231 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15232 ? tree_int_cst_min_precision (op0, UNSIGNED)
15233 : TYPE_PRECISION (inner0);
15235 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15236 ? tree_int_cst_min_precision (op1, UNSIGNED)
15237 : TYPE_PRECISION (inner1);
15239 return precision0 + precision1 < TYPE_PRECISION (type);
15242 return false;
15244 case BIT_AND_EXPR:
15245 case MAX_EXPR:
15246 return (tree_expr_nonnegative_warnv_p (op0,
15247 strict_overflow_p)
15248 || tree_expr_nonnegative_warnv_p (op1,
15249 strict_overflow_p));
15251 case BIT_IOR_EXPR:
15252 case BIT_XOR_EXPR:
15253 case MIN_EXPR:
15254 case RDIV_EXPR:
15255 case TRUNC_DIV_EXPR:
15256 case CEIL_DIV_EXPR:
15257 case FLOOR_DIV_EXPR:
15258 case ROUND_DIV_EXPR:
15259 return (tree_expr_nonnegative_warnv_p (op0,
15260 strict_overflow_p)
15261 && tree_expr_nonnegative_warnv_p (op1,
15262 strict_overflow_p));
15264 case TRUNC_MOD_EXPR:
15265 case CEIL_MOD_EXPR:
15266 case FLOOR_MOD_EXPR:
15267 case ROUND_MOD_EXPR:
15268 return tree_expr_nonnegative_warnv_p (op0,
15269 strict_overflow_p);
15270 default:
15271 return tree_simple_nonnegative_warnv_p (code, type);
15274 /* We don't know sign of `t', so be conservative and return false. */
15275 return false;
15278 /* Return true if T is known to be non-negative. If the return
15279 value is based on the assumption that signed overflow is undefined,
15280 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15281 *STRICT_OVERFLOW_P. */
15283 bool
15284 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15286 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15287 return true;
15289 switch (TREE_CODE (t))
15291 case INTEGER_CST:
15292 return tree_int_cst_sgn (t) >= 0;
15294 case REAL_CST:
15295 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15297 case FIXED_CST:
15298 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15300 case COND_EXPR:
15301 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15302 strict_overflow_p)
15303 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15304 strict_overflow_p));
15305 default:
15306 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15307 TREE_TYPE (t));
15309 /* We don't know sign of `t', so be conservative and return false. */
15310 return false;
15313 /* Return true if T is known to be non-negative. If the return
15314 value is based on the assumption that signed overflow is undefined,
15315 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15316 *STRICT_OVERFLOW_P. */
15318 bool
15319 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15320 tree arg0, tree arg1, bool *strict_overflow_p)
15322 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15323 switch (DECL_FUNCTION_CODE (fndecl))
15325 CASE_FLT_FN (BUILT_IN_ACOS):
15326 CASE_FLT_FN (BUILT_IN_ACOSH):
15327 CASE_FLT_FN (BUILT_IN_CABS):
15328 CASE_FLT_FN (BUILT_IN_COSH):
15329 CASE_FLT_FN (BUILT_IN_ERFC):
15330 CASE_FLT_FN (BUILT_IN_EXP):
15331 CASE_FLT_FN (BUILT_IN_EXP10):
15332 CASE_FLT_FN (BUILT_IN_EXP2):
15333 CASE_FLT_FN (BUILT_IN_FABS):
15334 CASE_FLT_FN (BUILT_IN_FDIM):
15335 CASE_FLT_FN (BUILT_IN_HYPOT):
15336 CASE_FLT_FN (BUILT_IN_POW10):
15337 CASE_INT_FN (BUILT_IN_FFS):
15338 CASE_INT_FN (BUILT_IN_PARITY):
15339 CASE_INT_FN (BUILT_IN_POPCOUNT):
15340 CASE_INT_FN (BUILT_IN_CLZ):
15341 CASE_INT_FN (BUILT_IN_CLRSB):
15342 case BUILT_IN_BSWAP32:
15343 case BUILT_IN_BSWAP64:
15344 /* Always true. */
15345 return true;
15347 CASE_FLT_FN (BUILT_IN_SQRT):
15348 /* sqrt(-0.0) is -0.0. */
15349 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15350 return true;
15351 return tree_expr_nonnegative_warnv_p (arg0,
15352 strict_overflow_p);
15354 CASE_FLT_FN (BUILT_IN_ASINH):
15355 CASE_FLT_FN (BUILT_IN_ATAN):
15356 CASE_FLT_FN (BUILT_IN_ATANH):
15357 CASE_FLT_FN (BUILT_IN_CBRT):
15358 CASE_FLT_FN (BUILT_IN_CEIL):
15359 CASE_FLT_FN (BUILT_IN_ERF):
15360 CASE_FLT_FN (BUILT_IN_EXPM1):
15361 CASE_FLT_FN (BUILT_IN_FLOOR):
15362 CASE_FLT_FN (BUILT_IN_FMOD):
15363 CASE_FLT_FN (BUILT_IN_FREXP):
15364 CASE_FLT_FN (BUILT_IN_ICEIL):
15365 CASE_FLT_FN (BUILT_IN_IFLOOR):
15366 CASE_FLT_FN (BUILT_IN_IRINT):
15367 CASE_FLT_FN (BUILT_IN_IROUND):
15368 CASE_FLT_FN (BUILT_IN_LCEIL):
15369 CASE_FLT_FN (BUILT_IN_LDEXP):
15370 CASE_FLT_FN (BUILT_IN_LFLOOR):
15371 CASE_FLT_FN (BUILT_IN_LLCEIL):
15372 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15373 CASE_FLT_FN (BUILT_IN_LLRINT):
15374 CASE_FLT_FN (BUILT_IN_LLROUND):
15375 CASE_FLT_FN (BUILT_IN_LRINT):
15376 CASE_FLT_FN (BUILT_IN_LROUND):
15377 CASE_FLT_FN (BUILT_IN_MODF):
15378 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15379 CASE_FLT_FN (BUILT_IN_RINT):
15380 CASE_FLT_FN (BUILT_IN_ROUND):
15381 CASE_FLT_FN (BUILT_IN_SCALB):
15382 CASE_FLT_FN (BUILT_IN_SCALBLN):
15383 CASE_FLT_FN (BUILT_IN_SCALBN):
15384 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15385 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15386 CASE_FLT_FN (BUILT_IN_SINH):
15387 CASE_FLT_FN (BUILT_IN_TANH):
15388 CASE_FLT_FN (BUILT_IN_TRUNC):
15389 /* True if the 1st argument is nonnegative. */
15390 return tree_expr_nonnegative_warnv_p (arg0,
15391 strict_overflow_p);
15393 CASE_FLT_FN (BUILT_IN_FMAX):
15394 /* True if the 1st OR 2nd arguments are nonnegative. */
15395 return (tree_expr_nonnegative_warnv_p (arg0,
15396 strict_overflow_p)
15397 || (tree_expr_nonnegative_warnv_p (arg1,
15398 strict_overflow_p)));
15400 CASE_FLT_FN (BUILT_IN_FMIN):
15401 /* True if the 1st AND 2nd arguments are nonnegative. */
15402 return (tree_expr_nonnegative_warnv_p (arg0,
15403 strict_overflow_p)
15404 && (tree_expr_nonnegative_warnv_p (arg1,
15405 strict_overflow_p)));
15407 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15408 /* True if the 2nd argument is nonnegative. */
15409 return tree_expr_nonnegative_warnv_p (arg1,
15410 strict_overflow_p);
15412 CASE_FLT_FN (BUILT_IN_POWI):
15413 /* True if the 1st argument is nonnegative or the second
15414 argument is an even integer. */
15415 if (TREE_CODE (arg1) == INTEGER_CST
15416 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15417 return true;
15418 return tree_expr_nonnegative_warnv_p (arg0,
15419 strict_overflow_p);
15421 CASE_FLT_FN (BUILT_IN_POW):
15422 /* True if the 1st argument is nonnegative or the second
15423 argument is an even integer valued real. */
15424 if (TREE_CODE (arg1) == REAL_CST)
15426 REAL_VALUE_TYPE c;
15427 HOST_WIDE_INT n;
15429 c = TREE_REAL_CST (arg1);
15430 n = real_to_integer (&c);
15431 if ((n & 1) == 0)
15433 REAL_VALUE_TYPE cint;
15434 real_from_integer (&cint, VOIDmode, n, SIGNED);
15435 if (real_identical (&c, &cint))
15436 return true;
15439 return tree_expr_nonnegative_warnv_p (arg0,
15440 strict_overflow_p);
15442 default:
15443 break;
15445 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15446 type);
15449 /* Return true if T is known to be non-negative. If the return
15450 value is based on the assumption that signed overflow is undefined,
15451 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15452 *STRICT_OVERFLOW_P. */
15454 static bool
15455 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15457 enum tree_code code = TREE_CODE (t);
15458 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15459 return true;
15461 switch (code)
15463 case TARGET_EXPR:
15465 tree temp = TARGET_EXPR_SLOT (t);
15466 t = TARGET_EXPR_INITIAL (t);
15468 /* If the initializer is non-void, then it's a normal expression
15469 that will be assigned to the slot. */
15470 if (!VOID_TYPE_P (t))
15471 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15473 /* Otherwise, the initializer sets the slot in some way. One common
15474 way is an assignment statement at the end of the initializer. */
15475 while (1)
15477 if (TREE_CODE (t) == BIND_EXPR)
15478 t = expr_last (BIND_EXPR_BODY (t));
15479 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15480 || TREE_CODE (t) == TRY_CATCH_EXPR)
15481 t = expr_last (TREE_OPERAND (t, 0));
15482 else if (TREE_CODE (t) == STATEMENT_LIST)
15483 t = expr_last (t);
15484 else
15485 break;
15487 if (TREE_CODE (t) == MODIFY_EXPR
15488 && TREE_OPERAND (t, 0) == temp)
15489 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15490 strict_overflow_p);
15492 return false;
15495 case CALL_EXPR:
15497 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15498 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15500 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15501 get_callee_fndecl (t),
15502 arg0,
15503 arg1,
15504 strict_overflow_p);
15506 case COMPOUND_EXPR:
15507 case MODIFY_EXPR:
15508 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15509 strict_overflow_p);
15510 case BIND_EXPR:
15511 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15512 strict_overflow_p);
15513 case SAVE_EXPR:
15514 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15515 strict_overflow_p);
15517 default:
15518 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15519 TREE_TYPE (t));
15522 /* We don't know sign of `t', so be conservative and return false. */
15523 return false;
15526 /* Return true if T is known to be non-negative. If the return
15527 value is based on the assumption that signed overflow is undefined,
15528 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15529 *STRICT_OVERFLOW_P. */
15531 bool
15532 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15534 enum tree_code code;
15535 if (t == error_mark_node)
15536 return false;
15538 code = TREE_CODE (t);
15539 switch (TREE_CODE_CLASS (code))
15541 case tcc_binary:
15542 case tcc_comparison:
15543 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15544 TREE_TYPE (t),
15545 TREE_OPERAND (t, 0),
15546 TREE_OPERAND (t, 1),
15547 strict_overflow_p);
15549 case tcc_unary:
15550 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15551 TREE_TYPE (t),
15552 TREE_OPERAND (t, 0),
15553 strict_overflow_p);
15555 case tcc_constant:
15556 case tcc_declaration:
15557 case tcc_reference:
15558 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15560 default:
15561 break;
15564 switch (code)
15566 case TRUTH_AND_EXPR:
15567 case TRUTH_OR_EXPR:
15568 case TRUTH_XOR_EXPR:
15569 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15570 TREE_TYPE (t),
15571 TREE_OPERAND (t, 0),
15572 TREE_OPERAND (t, 1),
15573 strict_overflow_p);
15574 case TRUTH_NOT_EXPR:
15575 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15576 TREE_TYPE (t),
15577 TREE_OPERAND (t, 0),
15578 strict_overflow_p);
15580 case COND_EXPR:
15581 case CONSTRUCTOR:
15582 case OBJ_TYPE_REF:
15583 case ASSERT_EXPR:
15584 case ADDR_EXPR:
15585 case WITH_SIZE_EXPR:
15586 case SSA_NAME:
15587 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15589 default:
15590 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15594 /* Return true if `t' is known to be non-negative. Handle warnings
15595 about undefined signed overflow. */
15597 bool
15598 tree_expr_nonnegative_p (tree t)
15600 bool ret, strict_overflow_p;
15602 strict_overflow_p = false;
15603 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15604 if (strict_overflow_p)
15605 fold_overflow_warning (("assuming signed overflow does not occur when "
15606 "determining that expression is always "
15607 "non-negative"),
15608 WARN_STRICT_OVERFLOW_MISC);
15609 return ret;
15613 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15614 For floating point we further ensure that T is not denormal.
15615 Similar logic is present in nonzero_address in rtlanal.h.
15617 If the return value is based on the assumption that signed overflow
15618 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15619 change *STRICT_OVERFLOW_P. */
15621 bool
15622 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15623 bool *strict_overflow_p)
15625 switch (code)
15627 case ABS_EXPR:
15628 return tree_expr_nonzero_warnv_p (op0,
15629 strict_overflow_p);
15631 case NOP_EXPR:
15633 tree inner_type = TREE_TYPE (op0);
15634 tree outer_type = type;
15636 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15637 && tree_expr_nonzero_warnv_p (op0,
15638 strict_overflow_p));
15640 break;
15642 case NON_LVALUE_EXPR:
15643 return tree_expr_nonzero_warnv_p (op0,
15644 strict_overflow_p);
15646 default:
15647 break;
15650 return false;
15653 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15654 For floating point we further ensure that T is not denormal.
15655 Similar logic is present in nonzero_address in rtlanal.h.
15657 If the return value is based on the assumption that signed overflow
15658 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15659 change *STRICT_OVERFLOW_P. */
15661 bool
15662 tree_binary_nonzero_warnv_p (enum tree_code code,
15663 tree type,
15664 tree op0,
15665 tree op1, bool *strict_overflow_p)
15667 bool sub_strict_overflow_p;
15668 switch (code)
15670 case POINTER_PLUS_EXPR:
15671 case PLUS_EXPR:
15672 if (TYPE_OVERFLOW_UNDEFINED (type))
15674 /* With the presence of negative values it is hard
15675 to say something. */
15676 sub_strict_overflow_p = false;
15677 if (!tree_expr_nonnegative_warnv_p (op0,
15678 &sub_strict_overflow_p)
15679 || !tree_expr_nonnegative_warnv_p (op1,
15680 &sub_strict_overflow_p))
15681 return false;
15682 /* One of operands must be positive and the other non-negative. */
15683 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15684 overflows, on a twos-complement machine the sum of two
15685 nonnegative numbers can never be zero. */
15686 return (tree_expr_nonzero_warnv_p (op0,
15687 strict_overflow_p)
15688 || tree_expr_nonzero_warnv_p (op1,
15689 strict_overflow_p));
15691 break;
15693 case MULT_EXPR:
15694 if (TYPE_OVERFLOW_UNDEFINED (type))
15696 if (tree_expr_nonzero_warnv_p (op0,
15697 strict_overflow_p)
15698 && tree_expr_nonzero_warnv_p (op1,
15699 strict_overflow_p))
15701 *strict_overflow_p = true;
15702 return true;
15705 break;
15707 case MIN_EXPR:
15708 sub_strict_overflow_p = false;
15709 if (tree_expr_nonzero_warnv_p (op0,
15710 &sub_strict_overflow_p)
15711 && tree_expr_nonzero_warnv_p (op1,
15712 &sub_strict_overflow_p))
15714 if (sub_strict_overflow_p)
15715 *strict_overflow_p = true;
15717 break;
15719 case MAX_EXPR:
15720 sub_strict_overflow_p = false;
15721 if (tree_expr_nonzero_warnv_p (op0,
15722 &sub_strict_overflow_p))
15724 if (sub_strict_overflow_p)
15725 *strict_overflow_p = true;
15727 /* When both operands are nonzero, then MAX must be too. */
15728 if (tree_expr_nonzero_warnv_p (op1,
15729 strict_overflow_p))
15730 return true;
15732 /* MAX where operand 0 is positive is positive. */
15733 return tree_expr_nonnegative_warnv_p (op0,
15734 strict_overflow_p);
15736 /* MAX where operand 1 is positive is positive. */
15737 else if (tree_expr_nonzero_warnv_p (op1,
15738 &sub_strict_overflow_p)
15739 && tree_expr_nonnegative_warnv_p (op1,
15740 &sub_strict_overflow_p))
15742 if (sub_strict_overflow_p)
15743 *strict_overflow_p = true;
15744 return true;
15746 break;
15748 case BIT_IOR_EXPR:
15749 return (tree_expr_nonzero_warnv_p (op1,
15750 strict_overflow_p)
15751 || tree_expr_nonzero_warnv_p (op0,
15752 strict_overflow_p));
15754 default:
15755 break;
15758 return false;
15761 /* Return true when T is an address and is known to be nonzero.
15762 For floating point we further ensure that T is not denormal.
15763 Similar logic is present in nonzero_address in rtlanal.h.
15765 If the return value is based on the assumption that signed overflow
15766 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15767 change *STRICT_OVERFLOW_P. */
15769 bool
15770 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15772 bool sub_strict_overflow_p;
15773 switch (TREE_CODE (t))
15775 case INTEGER_CST:
15776 return !integer_zerop (t);
15778 case ADDR_EXPR:
15780 tree base = TREE_OPERAND (t, 0);
15782 if (!DECL_P (base))
15783 base = get_base_address (base);
15785 if (!base)
15786 return false;
15788 /* For objects in symbol table check if we know they are non-zero.
15789 Don't do anything for variables and functions before symtab is built;
15790 it is quite possible that they will be declared weak later. */
15791 if (DECL_P (base) && decl_in_symtab_p (base))
15793 struct symtab_node *symbol;
15795 symbol = symtab_node::get_create (base);
15796 if (symbol)
15797 return symbol->nonzero_address ();
15798 else
15799 return false;
15802 /* Function local objects are never NULL. */
15803 if (DECL_P (base)
15804 && (DECL_CONTEXT (base)
15805 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15806 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15807 return true;
15809 /* Constants are never weak. */
15810 if (CONSTANT_CLASS_P (base))
15811 return true;
15813 return false;
15816 case COND_EXPR:
15817 sub_strict_overflow_p = false;
15818 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15819 &sub_strict_overflow_p)
15820 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15821 &sub_strict_overflow_p))
15823 if (sub_strict_overflow_p)
15824 *strict_overflow_p = true;
15825 return true;
15827 break;
15829 default:
15830 break;
15832 return false;
15835 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15836 attempt to fold the expression to a constant without modifying TYPE,
15837 OP0 or OP1.
15839 If the expression could be simplified to a constant, then return
15840 the constant. If the expression would not be simplified to a
15841 constant, then return NULL_TREE. */
15843 tree
15844 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15846 tree tem = fold_binary (code, type, op0, op1);
15847 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15850 /* Given the components of a unary expression CODE, TYPE and OP0,
15851 attempt to fold the expression to a constant without modifying
15852 TYPE or OP0.
15854 If the expression could be simplified to a constant, then return
15855 the constant. If the expression would not be simplified to a
15856 constant, then return NULL_TREE. */
15858 tree
15859 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15861 tree tem = fold_unary (code, type, op0);
15862 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15865 /* If EXP represents referencing an element in a constant string
15866 (either via pointer arithmetic or array indexing), return the
15867 tree representing the value accessed, otherwise return NULL. */
15869 tree
15870 fold_read_from_constant_string (tree exp)
15872 if ((TREE_CODE (exp) == INDIRECT_REF
15873 || TREE_CODE (exp) == ARRAY_REF)
15874 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15876 tree exp1 = TREE_OPERAND (exp, 0);
15877 tree index;
15878 tree string;
15879 location_t loc = EXPR_LOCATION (exp);
15881 if (TREE_CODE (exp) == INDIRECT_REF)
15882 string = string_constant (exp1, &index);
15883 else
15885 tree low_bound = array_ref_low_bound (exp);
15886 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15888 /* Optimize the special-case of a zero lower bound.
15890 We convert the low_bound to sizetype to avoid some problems
15891 with constant folding. (E.g. suppose the lower bound is 1,
15892 and its mode is QI. Without the conversion,l (ARRAY
15893 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15894 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15895 if (! integer_zerop (low_bound))
15896 index = size_diffop_loc (loc, index,
15897 fold_convert_loc (loc, sizetype, low_bound));
15899 string = exp1;
15902 if (string
15903 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15904 && TREE_CODE (string) == STRING_CST
15905 && TREE_CODE (index) == INTEGER_CST
15906 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15907 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15908 == MODE_INT)
15909 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15910 return build_int_cst_type (TREE_TYPE (exp),
15911 (TREE_STRING_POINTER (string)
15912 [TREE_INT_CST_LOW (index)]));
15914 return NULL;
15917 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15918 an integer constant, real, or fixed-point constant.
15920 TYPE is the type of the result. */
15922 static tree
15923 fold_negate_const (tree arg0, tree type)
15925 tree t = NULL_TREE;
15927 switch (TREE_CODE (arg0))
15929 case INTEGER_CST:
15931 bool overflow;
15932 wide_int val = wi::neg (arg0, &overflow);
15933 t = force_fit_type (type, val, 1,
15934 (overflow | TREE_OVERFLOW (arg0))
15935 && !TYPE_UNSIGNED (type));
15936 break;
15939 case REAL_CST:
15940 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15941 break;
15943 case FIXED_CST:
15945 FIXED_VALUE_TYPE f;
15946 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15947 &(TREE_FIXED_CST (arg0)), NULL,
15948 TYPE_SATURATING (type));
15949 t = build_fixed (type, f);
15950 /* Propagate overflow flags. */
15951 if (overflow_p | TREE_OVERFLOW (arg0))
15952 TREE_OVERFLOW (t) = 1;
15953 break;
15956 default:
15957 gcc_unreachable ();
15960 return t;
15963 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15964 an integer constant or real constant.
15966 TYPE is the type of the result. */
15968 tree
15969 fold_abs_const (tree arg0, tree type)
15971 tree t = NULL_TREE;
15973 switch (TREE_CODE (arg0))
15975 case INTEGER_CST:
15977 /* If the value is unsigned or non-negative, then the absolute value
15978 is the same as the ordinary value. */
15979 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15980 t = arg0;
15982 /* If the value is negative, then the absolute value is
15983 its negation. */
15984 else
15986 bool overflow;
15987 wide_int val = wi::neg (arg0, &overflow);
15988 t = force_fit_type (type, val, -1,
15989 overflow | TREE_OVERFLOW (arg0));
15992 break;
15994 case REAL_CST:
15995 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15996 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15997 else
15998 t = arg0;
15999 break;
16001 default:
16002 gcc_unreachable ();
16005 return t;
16008 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16009 constant. TYPE is the type of the result. */
16011 static tree
16012 fold_not_const (const_tree arg0, tree type)
16014 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16016 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
16019 /* Given CODE, a relational operator, the target type, TYPE and two
16020 constant operands OP0 and OP1, return the result of the
16021 relational operation. If the result is not a compile time
16022 constant, then return NULL_TREE. */
16024 static tree
16025 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16027 int result, invert;
16029 /* From here on, the only cases we handle are when the result is
16030 known to be a constant. */
16032 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16034 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16035 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16037 /* Handle the cases where either operand is a NaN. */
16038 if (real_isnan (c0) || real_isnan (c1))
16040 switch (code)
16042 case EQ_EXPR:
16043 case ORDERED_EXPR:
16044 result = 0;
16045 break;
16047 case NE_EXPR:
16048 case UNORDERED_EXPR:
16049 case UNLT_EXPR:
16050 case UNLE_EXPR:
16051 case UNGT_EXPR:
16052 case UNGE_EXPR:
16053 case UNEQ_EXPR:
16054 result = 1;
16055 break;
16057 case LT_EXPR:
16058 case LE_EXPR:
16059 case GT_EXPR:
16060 case GE_EXPR:
16061 case LTGT_EXPR:
16062 if (flag_trapping_math)
16063 return NULL_TREE;
16064 result = 0;
16065 break;
16067 default:
16068 gcc_unreachable ();
16071 return constant_boolean_node (result, type);
16074 return constant_boolean_node (real_compare (code, c0, c1), type);
16077 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16079 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16080 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16081 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16084 /* Handle equality/inequality of complex constants. */
16085 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16087 tree rcond = fold_relational_const (code, type,
16088 TREE_REALPART (op0),
16089 TREE_REALPART (op1));
16090 tree icond = fold_relational_const (code, type,
16091 TREE_IMAGPART (op0),
16092 TREE_IMAGPART (op1));
16093 if (code == EQ_EXPR)
16094 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16095 else if (code == NE_EXPR)
16096 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16097 else
16098 return NULL_TREE;
16101 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16103 unsigned count = VECTOR_CST_NELTS (op0);
16104 tree *elts = XALLOCAVEC (tree, count);
16105 gcc_assert (VECTOR_CST_NELTS (op1) == count
16106 && TYPE_VECTOR_SUBPARTS (type) == count);
16108 for (unsigned i = 0; i < count; i++)
16110 tree elem_type = TREE_TYPE (type);
16111 tree elem0 = VECTOR_CST_ELT (op0, i);
16112 tree elem1 = VECTOR_CST_ELT (op1, i);
16114 tree tem = fold_relational_const (code, elem_type,
16115 elem0, elem1);
16117 if (tem == NULL_TREE)
16118 return NULL_TREE;
16120 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16123 return build_vector (type, elts);
16126 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16128 To compute GT, swap the arguments and do LT.
16129 To compute GE, do LT and invert the result.
16130 To compute LE, swap the arguments, do LT and invert the result.
16131 To compute NE, do EQ and invert the result.
16133 Therefore, the code below must handle only EQ and LT. */
16135 if (code == LE_EXPR || code == GT_EXPR)
16137 tree tem = op0;
16138 op0 = op1;
16139 op1 = tem;
16140 code = swap_tree_comparison (code);
16143 /* Note that it is safe to invert for real values here because we
16144 have already handled the one case that it matters. */
16146 invert = 0;
16147 if (code == NE_EXPR || code == GE_EXPR)
16149 invert = 1;
16150 code = invert_tree_comparison (code, false);
16153 /* Compute a result for LT or EQ if args permit;
16154 Otherwise return T. */
16155 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16157 if (code == EQ_EXPR)
16158 result = tree_int_cst_equal (op0, op1);
16159 else
16160 result = tree_int_cst_lt (op0, op1);
16162 else
16163 return NULL_TREE;
16165 if (invert)
16166 result ^= 1;
16167 return constant_boolean_node (result, type);
16170 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16171 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16172 itself. */
16174 tree
16175 fold_build_cleanup_point_expr (tree type, tree expr)
16177 /* If the expression does not have side effects then we don't have to wrap
16178 it with a cleanup point expression. */
16179 if (!TREE_SIDE_EFFECTS (expr))
16180 return expr;
16182 /* If the expression is a return, check to see if the expression inside the
16183 return has no side effects or the right hand side of the modify expression
16184 inside the return. If either don't have side effects set we don't need to
16185 wrap the expression in a cleanup point expression. Note we don't check the
16186 left hand side of the modify because it should always be a return decl. */
16187 if (TREE_CODE (expr) == RETURN_EXPR)
16189 tree op = TREE_OPERAND (expr, 0);
16190 if (!op || !TREE_SIDE_EFFECTS (op))
16191 return expr;
16192 op = TREE_OPERAND (op, 1);
16193 if (!TREE_SIDE_EFFECTS (op))
16194 return expr;
16197 return build1 (CLEANUP_POINT_EXPR, type, expr);
16200 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16201 of an indirection through OP0, or NULL_TREE if no simplification is
16202 possible. */
16204 tree
16205 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16207 tree sub = op0;
16208 tree subtype;
16210 STRIP_NOPS (sub);
16211 subtype = TREE_TYPE (sub);
16212 if (!POINTER_TYPE_P (subtype))
16213 return NULL_TREE;
16215 if (TREE_CODE (sub) == ADDR_EXPR)
16217 tree op = TREE_OPERAND (sub, 0);
16218 tree optype = TREE_TYPE (op);
16219 /* *&CONST_DECL -> to the value of the const decl. */
16220 if (TREE_CODE (op) == CONST_DECL)
16221 return DECL_INITIAL (op);
16222 /* *&p => p; make sure to handle *&"str"[cst] here. */
16223 if (type == optype)
16225 tree fop = fold_read_from_constant_string (op);
16226 if (fop)
16227 return fop;
16228 else
16229 return op;
16231 /* *(foo *)&fooarray => fooarray[0] */
16232 else if (TREE_CODE (optype) == ARRAY_TYPE
16233 && type == TREE_TYPE (optype)
16234 && (!in_gimple_form
16235 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16237 tree type_domain = TYPE_DOMAIN (optype);
16238 tree min_val = size_zero_node;
16239 if (type_domain && TYPE_MIN_VALUE (type_domain))
16240 min_val = TYPE_MIN_VALUE (type_domain);
16241 if (in_gimple_form
16242 && TREE_CODE (min_val) != INTEGER_CST)
16243 return NULL_TREE;
16244 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16245 NULL_TREE, NULL_TREE);
16247 /* *(foo *)&complexfoo => __real__ complexfoo */
16248 else if (TREE_CODE (optype) == COMPLEX_TYPE
16249 && type == TREE_TYPE (optype))
16250 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16251 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16252 else if (TREE_CODE (optype) == VECTOR_TYPE
16253 && type == TREE_TYPE (optype))
16255 tree part_width = TYPE_SIZE (type);
16256 tree index = bitsize_int (0);
16257 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16261 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16262 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16264 tree op00 = TREE_OPERAND (sub, 0);
16265 tree op01 = TREE_OPERAND (sub, 1);
16267 STRIP_NOPS (op00);
16268 if (TREE_CODE (op00) == ADDR_EXPR)
16270 tree op00type;
16271 op00 = TREE_OPERAND (op00, 0);
16272 op00type = TREE_TYPE (op00);
16274 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16275 if (TREE_CODE (op00type) == VECTOR_TYPE
16276 && type == TREE_TYPE (op00type))
16278 HOST_WIDE_INT offset = tree_to_shwi (op01);
16279 tree part_width = TYPE_SIZE (type);
16280 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16281 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16282 tree index = bitsize_int (indexi);
16284 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16285 return fold_build3_loc (loc,
16286 BIT_FIELD_REF, type, op00,
16287 part_width, index);
16290 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16291 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16292 && type == TREE_TYPE (op00type))
16294 tree size = TYPE_SIZE_UNIT (type);
16295 if (tree_int_cst_equal (size, op01))
16296 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16298 /* ((foo *)&fooarray)[1] => fooarray[1] */
16299 else if (TREE_CODE (op00type) == ARRAY_TYPE
16300 && type == TREE_TYPE (op00type))
16302 tree type_domain = TYPE_DOMAIN (op00type);
16303 tree min_val = size_zero_node;
16304 if (type_domain && TYPE_MIN_VALUE (type_domain))
16305 min_val = TYPE_MIN_VALUE (type_domain);
16306 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16307 TYPE_SIZE_UNIT (type));
16308 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16309 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16310 NULL_TREE, NULL_TREE);
16315 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16316 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16317 && type == TREE_TYPE (TREE_TYPE (subtype))
16318 && (!in_gimple_form
16319 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16321 tree type_domain;
16322 tree min_val = size_zero_node;
16323 sub = build_fold_indirect_ref_loc (loc, sub);
16324 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16325 if (type_domain && TYPE_MIN_VALUE (type_domain))
16326 min_val = TYPE_MIN_VALUE (type_domain);
16327 if (in_gimple_form
16328 && TREE_CODE (min_val) != INTEGER_CST)
16329 return NULL_TREE;
16330 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16331 NULL_TREE);
16334 return NULL_TREE;
16337 /* Builds an expression for an indirection through T, simplifying some
16338 cases. */
16340 tree
16341 build_fold_indirect_ref_loc (location_t loc, tree t)
16343 tree type = TREE_TYPE (TREE_TYPE (t));
16344 tree sub = fold_indirect_ref_1 (loc, type, t);
16346 if (sub)
16347 return sub;
16349 return build1_loc (loc, INDIRECT_REF, type, t);
16352 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16354 tree
16355 fold_indirect_ref_loc (location_t loc, tree t)
16357 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16359 if (sub)
16360 return sub;
16361 else
16362 return t;
16365 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16366 whose result is ignored. The type of the returned tree need not be
16367 the same as the original expression. */
16369 tree
16370 fold_ignored_result (tree t)
16372 if (!TREE_SIDE_EFFECTS (t))
16373 return integer_zero_node;
16375 for (;;)
16376 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16378 case tcc_unary:
16379 t = TREE_OPERAND (t, 0);
16380 break;
16382 case tcc_binary:
16383 case tcc_comparison:
16384 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16385 t = TREE_OPERAND (t, 0);
16386 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16387 t = TREE_OPERAND (t, 1);
16388 else
16389 return t;
16390 break;
16392 case tcc_expression:
16393 switch (TREE_CODE (t))
16395 case COMPOUND_EXPR:
16396 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16397 return t;
16398 t = TREE_OPERAND (t, 0);
16399 break;
16401 case COND_EXPR:
16402 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16403 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16404 return t;
16405 t = TREE_OPERAND (t, 0);
16406 break;
16408 default:
16409 return t;
16411 break;
16413 default:
16414 return t;
16418 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16420 tree
16421 round_up_loc (location_t loc, tree value, unsigned int divisor)
16423 tree div = NULL_TREE;
16425 if (divisor == 1)
16426 return value;
16428 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16429 have to do anything. Only do this when we are not given a const,
16430 because in that case, this check is more expensive than just
16431 doing it. */
16432 if (TREE_CODE (value) != INTEGER_CST)
16434 div = build_int_cst (TREE_TYPE (value), divisor);
16436 if (multiple_of_p (TREE_TYPE (value), value, div))
16437 return value;
16440 /* If divisor is a power of two, simplify this to bit manipulation. */
16441 if (divisor == (divisor & -divisor))
16443 if (TREE_CODE (value) == INTEGER_CST)
16445 wide_int val = value;
16446 bool overflow_p;
16448 if ((val & (divisor - 1)) == 0)
16449 return value;
16451 overflow_p = TREE_OVERFLOW (value);
16452 val &= ~(divisor - 1);
16453 val += divisor;
16454 if (val == 0)
16455 overflow_p = true;
16457 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16459 else
16461 tree t;
16463 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16464 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16465 t = build_int_cst (TREE_TYPE (value), -divisor);
16466 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16469 else
16471 if (!div)
16472 div = build_int_cst (TREE_TYPE (value), divisor);
16473 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16474 value = size_binop_loc (loc, MULT_EXPR, value, div);
16477 return value;
16480 /* Likewise, but round down. */
16482 tree
16483 round_down_loc (location_t loc, tree value, int divisor)
16485 tree div = NULL_TREE;
16487 gcc_assert (divisor > 0);
16488 if (divisor == 1)
16489 return value;
16491 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16492 have to do anything. Only do this when we are not given a const,
16493 because in that case, this check is more expensive than just
16494 doing it. */
16495 if (TREE_CODE (value) != INTEGER_CST)
16497 div = build_int_cst (TREE_TYPE (value), divisor);
16499 if (multiple_of_p (TREE_TYPE (value), value, div))
16500 return value;
16503 /* If divisor is a power of two, simplify this to bit manipulation. */
16504 if (divisor == (divisor & -divisor))
16506 tree t;
16508 t = build_int_cst (TREE_TYPE (value), -divisor);
16509 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16511 else
16513 if (!div)
16514 div = build_int_cst (TREE_TYPE (value), divisor);
16515 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16516 value = size_binop_loc (loc, MULT_EXPR, value, div);
16519 return value;
16522 /* Returns the pointer to the base of the object addressed by EXP and
16523 extracts the information about the offset of the access, storing it
16524 to PBITPOS and POFFSET. */
16526 static tree
16527 split_address_to_core_and_offset (tree exp,
16528 HOST_WIDE_INT *pbitpos, tree *poffset)
16530 tree core;
16531 machine_mode mode;
16532 int unsignedp, volatilep;
16533 HOST_WIDE_INT bitsize;
16534 location_t loc = EXPR_LOCATION (exp);
16536 if (TREE_CODE (exp) == ADDR_EXPR)
16538 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16539 poffset, &mode, &unsignedp, &volatilep,
16540 false);
16541 core = build_fold_addr_expr_loc (loc, core);
16543 else
16545 core = exp;
16546 *pbitpos = 0;
16547 *poffset = NULL_TREE;
16550 return core;
16553 /* Returns true if addresses of E1 and E2 differ by a constant, false
16554 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16556 bool
16557 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16559 tree core1, core2;
16560 HOST_WIDE_INT bitpos1, bitpos2;
16561 tree toffset1, toffset2, tdiff, type;
16563 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16564 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16566 if (bitpos1 % BITS_PER_UNIT != 0
16567 || bitpos2 % BITS_PER_UNIT != 0
16568 || !operand_equal_p (core1, core2, 0))
16569 return false;
16571 if (toffset1 && toffset2)
16573 type = TREE_TYPE (toffset1);
16574 if (type != TREE_TYPE (toffset2))
16575 toffset2 = fold_convert (type, toffset2);
16577 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16578 if (!cst_and_fits_in_hwi (tdiff))
16579 return false;
16581 *diff = int_cst_value (tdiff);
16583 else if (toffset1 || toffset2)
16585 /* If only one of the offsets is non-constant, the difference cannot
16586 be a constant. */
16587 return false;
16589 else
16590 *diff = 0;
16592 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16593 return true;
16596 /* Simplify the floating point expression EXP when the sign of the
16597 result is not significant. Return NULL_TREE if no simplification
16598 is possible. */
16600 tree
16601 fold_strip_sign_ops (tree exp)
16603 tree arg0, arg1;
16604 location_t loc = EXPR_LOCATION (exp);
16606 switch (TREE_CODE (exp))
16608 case ABS_EXPR:
16609 case NEGATE_EXPR:
16610 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16611 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16613 case MULT_EXPR:
16614 case RDIV_EXPR:
16615 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16616 return NULL_TREE;
16617 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16618 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16619 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16620 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16621 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16622 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16623 break;
16625 case COMPOUND_EXPR:
16626 arg0 = TREE_OPERAND (exp, 0);
16627 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16628 if (arg1)
16629 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16630 break;
16632 case COND_EXPR:
16633 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16634 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16635 if (arg0 || arg1)
16636 return fold_build3_loc (loc,
16637 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16638 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16639 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16640 break;
16642 case CALL_EXPR:
16644 const enum built_in_function fcode = builtin_mathfn_code (exp);
16645 switch (fcode)
16647 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16648 /* Strip copysign function call, return the 1st argument. */
16649 arg0 = CALL_EXPR_ARG (exp, 0);
16650 arg1 = CALL_EXPR_ARG (exp, 1);
16651 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16653 default:
16654 /* Strip sign ops from the argument of "odd" math functions. */
16655 if (negate_mathfn_p (fcode))
16657 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16658 if (arg0)
16659 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16661 break;
16664 break;
16666 default:
16667 break;
16669 return NULL_TREE;