Merge from trunk @217148.
[official-gcc.git] / gcc / fold-const.c
blob5646a796e1976a7d21caeeb8120ed0284617761d
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "predict.h"
62 #include "vec.h"
63 #include "hashtab.h"
64 #include "hash-set.h"
65 #include "machmode.h"
66 #include "hard-reg-set.h"
67 #include "input.h"
68 #include "function.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "tree-eh.h"
73 #include "gimple-expr.h"
74 #include "is-a.h"
75 #include "gimple.h"
76 #include "gimplify.h"
77 #include "tree-dfa.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
79 #include "builtins.h"
80 #include "hash-map.h"
81 #include "plugin-api.h"
82 #include "ipa-ref.h"
83 #include "cgraph.h"
84 #include "generic-match.h"
85 #include "optabs.h"
87 /* Nonzero if we are folding constants inside an initializer; zero
88 otherwise. */
89 int folding_initializer = 0;
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code {
95 COMPCODE_FALSE = 0,
96 COMPCODE_LT = 1,
97 COMPCODE_EQ = 2,
98 COMPCODE_LE = 3,
99 COMPCODE_GT = 4,
100 COMPCODE_LTGT = 5,
101 COMPCODE_GE = 6,
102 COMPCODE_ORD = 7,
103 COMPCODE_UNORD = 8,
104 COMPCODE_UNLT = 9,
105 COMPCODE_UNEQ = 10,
106 COMPCODE_UNLE = 11,
107 COMPCODE_UNGT = 12,
108 COMPCODE_NE = 13,
109 COMPCODE_UNGE = 14,
110 COMPCODE_TRUE = 15
113 static bool negate_mathfn_p (enum built_in_function);
114 static bool negate_expr_p (tree);
115 static tree negate_expr (tree);
116 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
117 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
118 static tree const_binop (enum tree_code, tree, tree);
119 static enum comparison_code comparison_to_compcode (enum tree_code);
120 static enum tree_code compcode_to_comparison (enum comparison_code);
121 static int operand_equal_for_comparison_p (tree, tree, tree);
122 static int twoval_comparison_p (tree, tree *, tree *, int *);
123 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
124 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
125 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
126 static tree make_bit_field_ref (location_t, tree, tree,
127 HOST_WIDE_INT, HOST_WIDE_INT, int, int);
128 static tree optimize_bit_field_compare (location_t, enum tree_code,
129 tree, tree, tree);
130 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
131 HOST_WIDE_INT *,
132 machine_mode *, int *, int *, int *,
133 tree *, tree *);
134 static tree sign_bit_p (tree, const_tree);
135 static int simple_operand_p (const_tree);
136 static bool simple_operand_p_2 (tree);
137 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
138 static tree range_predecessor (tree);
139 static tree range_successor (tree);
140 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
141 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
142 static tree unextend (tree, int, int, tree);
143 static tree optimize_minmax_comparison (location_t, enum tree_code,
144 tree, tree, tree);
145 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
146 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
147 static tree fold_binary_op_with_conditional_arg (location_t,
148 enum tree_code, tree,
149 tree, tree,
150 tree, tree, int);
151 static tree fold_mathfn_compare (location_t,
152 enum built_in_function, enum tree_code,
153 tree, tree, tree);
154 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
155 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
156 static bool reorder_operands_p (const_tree, const_tree);
157 static tree fold_negate_const (tree, tree);
158 static tree fold_not_const (const_tree, tree);
159 static tree fold_relational_const (enum tree_code, tree, tree, tree);
160 static tree fold_convert_const (enum tree_code, tree, tree);
162 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
163 Otherwise, return LOC. */
165 static location_t
166 expr_location_or (tree t, location_t loc)
168 location_t tloc = EXPR_LOCATION (t);
169 return tloc == UNKNOWN_LOCATION ? loc : tloc;
172 /* Similar to protected_set_expr_location, but never modify x in place,
173 if location can and needs to be set, unshare it. */
175 static inline tree
176 protected_set_expr_location_unshare (tree x, location_t loc)
178 if (CAN_HAVE_LOCATION_P (x)
179 && EXPR_LOCATION (x) != loc
180 && !(TREE_CODE (x) == SAVE_EXPR
181 || TREE_CODE (x) == TARGET_EXPR
182 || TREE_CODE (x) == BIND_EXPR))
184 x = copy_node (x);
185 SET_EXPR_LOCATION (x, loc);
187 return x;
190 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
191 division and returns the quotient. Otherwise returns
192 NULL_TREE. */
194 tree
195 div_if_zero_remainder (const_tree arg1, const_tree arg2)
197 widest_int quo;
199 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
200 SIGNED, &quo))
201 return wide_int_to_tree (TREE_TYPE (arg1), quo);
203 return NULL_TREE;
206 /* This is nonzero if we should defer warnings about undefined
207 overflow. This facility exists because these warnings are a
208 special case. The code to estimate loop iterations does not want
209 to issue any warnings, since it works with expressions which do not
210 occur in user code. Various bits of cleanup code call fold(), but
211 only use the result if it has certain characteristics (e.g., is a
212 constant); that code only wants to issue a warning if the result is
213 used. */
215 static int fold_deferring_overflow_warnings;
217 /* If a warning about undefined overflow is deferred, this is the
218 warning. Note that this may cause us to turn two warnings into
219 one, but that is fine since it is sufficient to only give one
220 warning per expression. */
222 static const char* fold_deferred_overflow_warning;
224 /* If a warning about undefined overflow is deferred, this is the
225 level at which the warning should be emitted. */
227 static enum warn_strict_overflow_code fold_deferred_overflow_code;
229 /* Start deferring overflow warnings. We could use a stack here to
230 permit nested calls, but at present it is not necessary. */
232 void
233 fold_defer_overflow_warnings (void)
235 ++fold_deferring_overflow_warnings;
238 /* Stop deferring overflow warnings. If there is a pending warning,
239 and ISSUE is true, then issue the warning if appropriate. STMT is
240 the statement with which the warning should be associated (used for
241 location information); STMT may be NULL. CODE is the level of the
242 warning--a warn_strict_overflow_code value. This function will use
243 the smaller of CODE and the deferred code when deciding whether to
244 issue the warning. CODE may be zero to mean to always use the
245 deferred code. */
247 void
248 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
250 const char *warnmsg;
251 location_t locus;
253 gcc_assert (fold_deferring_overflow_warnings > 0);
254 --fold_deferring_overflow_warnings;
255 if (fold_deferring_overflow_warnings > 0)
257 if (fold_deferred_overflow_warning != NULL
258 && code != 0
259 && code < (int) fold_deferred_overflow_code)
260 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
261 return;
264 warnmsg = fold_deferred_overflow_warning;
265 fold_deferred_overflow_warning = NULL;
267 if (!issue || warnmsg == NULL)
268 return;
270 if (gimple_no_warning_p (stmt))
271 return;
273 /* Use the smallest code level when deciding to issue the
274 warning. */
275 if (code == 0 || code > (int) fold_deferred_overflow_code)
276 code = fold_deferred_overflow_code;
278 if (!issue_strict_overflow_warning (code))
279 return;
281 if (stmt == NULL)
282 locus = input_location;
283 else
284 locus = gimple_location (stmt);
285 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
288 /* Stop deferring overflow warnings, ignoring any deferred
289 warnings. */
291 void
292 fold_undefer_and_ignore_overflow_warnings (void)
294 fold_undefer_overflow_warnings (false, NULL, 0);
297 /* Whether we are deferring overflow warnings. */
299 bool
300 fold_deferring_overflow_warnings_p (void)
302 return fold_deferring_overflow_warnings > 0;
305 /* This is called when we fold something based on the fact that signed
306 overflow is undefined. */
308 static void
309 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
311 if (fold_deferring_overflow_warnings > 0)
313 if (fold_deferred_overflow_warning == NULL
314 || wc < fold_deferred_overflow_code)
316 fold_deferred_overflow_warning = gmsgid;
317 fold_deferred_overflow_code = wc;
320 else if (issue_strict_overflow_warning (wc))
321 warning (OPT_Wstrict_overflow, gmsgid);
324 /* Return true if the built-in mathematical function specified by CODE
325 is odd, i.e. -f(x) == f(-x). */
327 static bool
328 negate_mathfn_p (enum built_in_function code)
330 switch (code)
332 CASE_FLT_FN (BUILT_IN_ASIN):
333 CASE_FLT_FN (BUILT_IN_ASINH):
334 CASE_FLT_FN (BUILT_IN_ATAN):
335 CASE_FLT_FN (BUILT_IN_ATANH):
336 CASE_FLT_FN (BUILT_IN_CASIN):
337 CASE_FLT_FN (BUILT_IN_CASINH):
338 CASE_FLT_FN (BUILT_IN_CATAN):
339 CASE_FLT_FN (BUILT_IN_CATANH):
340 CASE_FLT_FN (BUILT_IN_CBRT):
341 CASE_FLT_FN (BUILT_IN_CPROJ):
342 CASE_FLT_FN (BUILT_IN_CSIN):
343 CASE_FLT_FN (BUILT_IN_CSINH):
344 CASE_FLT_FN (BUILT_IN_CTAN):
345 CASE_FLT_FN (BUILT_IN_CTANH):
346 CASE_FLT_FN (BUILT_IN_ERF):
347 CASE_FLT_FN (BUILT_IN_LLROUND):
348 CASE_FLT_FN (BUILT_IN_LROUND):
349 CASE_FLT_FN (BUILT_IN_ROUND):
350 CASE_FLT_FN (BUILT_IN_SIN):
351 CASE_FLT_FN (BUILT_IN_SINH):
352 CASE_FLT_FN (BUILT_IN_TAN):
353 CASE_FLT_FN (BUILT_IN_TANH):
354 CASE_FLT_FN (BUILT_IN_TRUNC):
355 return true;
357 CASE_FLT_FN (BUILT_IN_LLRINT):
358 CASE_FLT_FN (BUILT_IN_LRINT):
359 CASE_FLT_FN (BUILT_IN_NEARBYINT):
360 CASE_FLT_FN (BUILT_IN_RINT):
361 return !flag_rounding_math;
363 default:
364 break;
366 return false;
369 /* Check whether we may negate an integer constant T without causing
370 overflow. */
372 bool
373 may_negate_without_overflow_p (const_tree t)
375 tree type;
377 gcc_assert (TREE_CODE (t) == INTEGER_CST);
379 type = TREE_TYPE (t);
380 if (TYPE_UNSIGNED (type))
381 return false;
383 return !wi::only_sign_bit_p (t);
386 /* Determine whether an expression T can be cheaply negated using
387 the function negate_expr without introducing undefined overflow. */
389 static bool
390 negate_expr_p (tree t)
392 tree type;
394 if (t == 0)
395 return false;
397 type = TREE_TYPE (t);
399 STRIP_SIGN_NOPS (t);
400 switch (TREE_CODE (t))
402 case INTEGER_CST:
403 if (TYPE_OVERFLOW_WRAPS (type))
404 return true;
406 /* Check that -CST will not overflow type. */
407 return may_negate_without_overflow_p (t);
408 case BIT_NOT_EXPR:
409 return (INTEGRAL_TYPE_P (type)
410 && TYPE_OVERFLOW_WRAPS (type));
412 case FIXED_CST:
413 case NEGATE_EXPR:
414 return true;
416 case REAL_CST:
417 /* We want to canonicalize to positive real constants. Pretend
418 that only negative ones can be easily negated. */
419 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
421 case COMPLEX_CST:
422 return negate_expr_p (TREE_REALPART (t))
423 && negate_expr_p (TREE_IMAGPART (t));
425 case VECTOR_CST:
427 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
428 return true;
430 int count = TYPE_VECTOR_SUBPARTS (type), i;
432 for (i = 0; i < count; i++)
433 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
434 return false;
436 return true;
439 case COMPLEX_EXPR:
440 return negate_expr_p (TREE_OPERAND (t, 0))
441 && negate_expr_p (TREE_OPERAND (t, 1));
443 case CONJ_EXPR:
444 return negate_expr_p (TREE_OPERAND (t, 0));
446 case PLUS_EXPR:
447 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
448 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
449 return false;
450 /* -(A + B) -> (-B) - A. */
451 if (negate_expr_p (TREE_OPERAND (t, 1))
452 && reorder_operands_p (TREE_OPERAND (t, 0),
453 TREE_OPERAND (t, 1)))
454 return true;
455 /* -(A + B) -> (-A) - B. */
456 return negate_expr_p (TREE_OPERAND (t, 0));
458 case MINUS_EXPR:
459 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
460 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
461 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
462 && reorder_operands_p (TREE_OPERAND (t, 0),
463 TREE_OPERAND (t, 1));
465 case MULT_EXPR:
466 if (TYPE_UNSIGNED (TREE_TYPE (t)))
467 break;
469 /* Fall through. */
471 case RDIV_EXPR:
472 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
473 return negate_expr_p (TREE_OPERAND (t, 1))
474 || negate_expr_p (TREE_OPERAND (t, 0));
475 break;
477 case TRUNC_DIV_EXPR:
478 case ROUND_DIV_EXPR:
479 case EXACT_DIV_EXPR:
480 /* In general we can't negate A / B, because if A is INT_MIN and
481 B is 1, we may turn this into INT_MIN / -1 which is undefined
482 and actually traps on some architectures. But if overflow is
483 undefined, we can negate, because - (INT_MIN / 1) is an
484 overflow. */
485 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
487 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
488 break;
489 /* If overflow is undefined then we have to be careful because
490 we ask whether it's ok to associate the negate with the
491 division which is not ok for example for
492 -((a - b) / c) where (-(a - b)) / c may invoke undefined
493 overflow because of negating INT_MIN. So do not use
494 negate_expr_p here but open-code the two important cases. */
495 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
496 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
497 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
498 return true;
500 else if (negate_expr_p (TREE_OPERAND (t, 0)))
501 return true;
502 return negate_expr_p (TREE_OPERAND (t, 1));
504 case NOP_EXPR:
505 /* Negate -((double)float) as (double)(-float). */
506 if (TREE_CODE (type) == REAL_TYPE)
508 tree tem = strip_float_extensions (t);
509 if (tem != t)
510 return negate_expr_p (tem);
512 break;
514 case CALL_EXPR:
515 /* Negate -f(x) as f(-x). */
516 if (negate_mathfn_p (builtin_mathfn_code (t)))
517 return negate_expr_p (CALL_EXPR_ARG (t, 0));
518 break;
520 case RSHIFT_EXPR:
521 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
522 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
524 tree op1 = TREE_OPERAND (t, 1);
525 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
526 return true;
528 break;
530 default:
531 break;
533 return false;
536 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
537 simplification is possible.
538 If negate_expr_p would return true for T, NULL_TREE will never be
539 returned. */
541 static tree
542 fold_negate_expr (location_t loc, tree t)
544 tree type = TREE_TYPE (t);
545 tree tem;
547 switch (TREE_CODE (t))
549 /* Convert - (~A) to A + 1. */
550 case BIT_NOT_EXPR:
551 if (INTEGRAL_TYPE_P (type))
552 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
553 build_one_cst (type));
554 break;
556 case INTEGER_CST:
557 tem = fold_negate_const (t, type);
558 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
559 || !TYPE_OVERFLOW_TRAPS (type))
560 return tem;
561 break;
563 case REAL_CST:
564 tem = fold_negate_const (t, type);
565 /* Two's complement FP formats, such as c4x, may overflow. */
566 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
567 return tem;
568 break;
570 case FIXED_CST:
571 tem = fold_negate_const (t, type);
572 return tem;
574 case COMPLEX_CST:
576 tree rpart = negate_expr (TREE_REALPART (t));
577 tree ipart = negate_expr (TREE_IMAGPART (t));
579 if ((TREE_CODE (rpart) == REAL_CST
580 && TREE_CODE (ipart) == REAL_CST)
581 || (TREE_CODE (rpart) == INTEGER_CST
582 && TREE_CODE (ipart) == INTEGER_CST))
583 return build_complex (type, rpart, ipart);
585 break;
587 case VECTOR_CST:
589 int count = TYPE_VECTOR_SUBPARTS (type), i;
590 tree *elts = XALLOCAVEC (tree, count);
592 for (i = 0; i < count; i++)
594 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
595 if (elts[i] == NULL_TREE)
596 return NULL_TREE;
599 return build_vector (type, elts);
602 case COMPLEX_EXPR:
603 if (negate_expr_p (t))
604 return fold_build2_loc (loc, COMPLEX_EXPR, type,
605 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
606 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
607 break;
609 case CONJ_EXPR:
610 if (negate_expr_p (t))
611 return fold_build1_loc (loc, CONJ_EXPR, type,
612 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
613 break;
615 case NEGATE_EXPR:
616 return TREE_OPERAND (t, 0);
618 case PLUS_EXPR:
619 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
620 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
622 /* -(A + B) -> (-B) - A. */
623 if (negate_expr_p (TREE_OPERAND (t, 1))
624 && reorder_operands_p (TREE_OPERAND (t, 0),
625 TREE_OPERAND (t, 1)))
627 tem = negate_expr (TREE_OPERAND (t, 1));
628 return fold_build2_loc (loc, MINUS_EXPR, type,
629 tem, TREE_OPERAND (t, 0));
632 /* -(A + B) -> (-A) - B. */
633 if (negate_expr_p (TREE_OPERAND (t, 0)))
635 tem = negate_expr (TREE_OPERAND (t, 0));
636 return fold_build2_loc (loc, MINUS_EXPR, type,
637 tem, TREE_OPERAND (t, 1));
640 break;
642 case MINUS_EXPR:
643 /* - (A - B) -> B - A */
644 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
645 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
646 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
647 return fold_build2_loc (loc, MINUS_EXPR, type,
648 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
649 break;
651 case MULT_EXPR:
652 if (TYPE_UNSIGNED (type))
653 break;
655 /* Fall through. */
657 case RDIV_EXPR:
658 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
660 tem = TREE_OPERAND (t, 1);
661 if (negate_expr_p (tem))
662 return fold_build2_loc (loc, TREE_CODE (t), type,
663 TREE_OPERAND (t, 0), negate_expr (tem));
664 tem = TREE_OPERAND (t, 0);
665 if (negate_expr_p (tem))
666 return fold_build2_loc (loc, TREE_CODE (t), type,
667 negate_expr (tem), TREE_OPERAND (t, 1));
669 break;
671 case TRUNC_DIV_EXPR:
672 case ROUND_DIV_EXPR:
673 case EXACT_DIV_EXPR:
674 /* In general we can't negate A / B, because if A is INT_MIN and
675 B is 1, we may turn this into INT_MIN / -1 which is undefined
676 and actually traps on some architectures. But if overflow is
677 undefined, we can negate, because - (INT_MIN / 1) is an
678 overflow. */
679 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
681 const char * const warnmsg = G_("assuming signed overflow does not "
682 "occur when negating a division");
683 tem = TREE_OPERAND (t, 1);
684 if (negate_expr_p (tem))
686 if (INTEGRAL_TYPE_P (type)
687 && (TREE_CODE (tem) != INTEGER_CST
688 || integer_onep (tem)))
689 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
690 return fold_build2_loc (loc, TREE_CODE (t), type,
691 TREE_OPERAND (t, 0), negate_expr (tem));
693 /* If overflow is undefined then we have to be careful because
694 we ask whether it's ok to associate the negate with the
695 division which is not ok for example for
696 -((a - b) / c) where (-(a - b)) / c may invoke undefined
697 overflow because of negating INT_MIN. So do not use
698 negate_expr_p here but open-code the two important cases. */
699 tem = TREE_OPERAND (t, 0);
700 if ((INTEGRAL_TYPE_P (type)
701 && (TREE_CODE (tem) == NEGATE_EXPR
702 || (TREE_CODE (tem) == INTEGER_CST
703 && may_negate_without_overflow_p (tem))))
704 || !INTEGRAL_TYPE_P (type))
705 return fold_build2_loc (loc, TREE_CODE (t), type,
706 negate_expr (tem), TREE_OPERAND (t, 1));
708 break;
710 case NOP_EXPR:
711 /* Convert -((double)float) into (double)(-float). */
712 if (TREE_CODE (type) == REAL_TYPE)
714 tem = strip_float_extensions (t);
715 if (tem != t && negate_expr_p (tem))
716 return fold_convert_loc (loc, type, negate_expr (tem));
718 break;
720 case CALL_EXPR:
721 /* Negate -f(x) as f(-x). */
722 if (negate_mathfn_p (builtin_mathfn_code (t))
723 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
725 tree fndecl, arg;
727 fndecl = get_callee_fndecl (t);
728 arg = negate_expr (CALL_EXPR_ARG (t, 0));
729 return build_call_expr_loc (loc, fndecl, 1, arg);
731 break;
733 case RSHIFT_EXPR:
734 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
735 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
737 tree op1 = TREE_OPERAND (t, 1);
738 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
740 tree ntype = TYPE_UNSIGNED (type)
741 ? signed_type_for (type)
742 : unsigned_type_for (type);
743 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
744 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
745 return fold_convert_loc (loc, type, temp);
748 break;
750 default:
751 break;
754 return NULL_TREE;
757 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
758 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
759 return NULL_TREE. */
761 static tree
762 negate_expr (tree t)
764 tree type, tem;
765 location_t loc;
767 if (t == NULL_TREE)
768 return NULL_TREE;
770 loc = EXPR_LOCATION (t);
771 type = TREE_TYPE (t);
772 STRIP_SIGN_NOPS (t);
774 tem = fold_negate_expr (loc, t);
775 if (!tem)
776 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
777 return fold_convert_loc (loc, type, tem);
780 /* Split a tree IN into a constant, literal and variable parts that could be
781 combined with CODE to make IN. "constant" means an expression with
782 TREE_CONSTANT but that isn't an actual constant. CODE must be a
783 commutative arithmetic operation. Store the constant part into *CONP,
784 the literal in *LITP and return the variable part. If a part isn't
785 present, set it to null. If the tree does not decompose in this way,
786 return the entire tree as the variable part and the other parts as null.
788 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
789 case, we negate an operand that was subtracted. Except if it is a
790 literal for which we use *MINUS_LITP instead.
792 If NEGATE_P is true, we are negating all of IN, again except a literal
793 for which we use *MINUS_LITP instead.
795 If IN is itself a literal or constant, return it as appropriate.
797 Note that we do not guarantee that any of the three values will be the
798 same type as IN, but they will have the same signedness and mode. */
800 static tree
801 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
802 tree *minus_litp, int negate_p)
804 tree var = 0;
806 *conp = 0;
807 *litp = 0;
808 *minus_litp = 0;
810 /* Strip any conversions that don't change the machine mode or signedness. */
811 STRIP_SIGN_NOPS (in);
813 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
814 || TREE_CODE (in) == FIXED_CST)
815 *litp = in;
816 else if (TREE_CODE (in) == code
817 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
818 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
819 /* We can associate addition and subtraction together (even
820 though the C standard doesn't say so) for integers because
821 the value is not affected. For reals, the value might be
822 affected, so we can't. */
823 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
824 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
826 tree op0 = TREE_OPERAND (in, 0);
827 tree op1 = TREE_OPERAND (in, 1);
828 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
829 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
831 /* First see if either of the operands is a literal, then a constant. */
832 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
833 || TREE_CODE (op0) == FIXED_CST)
834 *litp = op0, op0 = 0;
835 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
836 || TREE_CODE (op1) == FIXED_CST)
837 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
839 if (op0 != 0 && TREE_CONSTANT (op0))
840 *conp = op0, op0 = 0;
841 else if (op1 != 0 && TREE_CONSTANT (op1))
842 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
844 /* If we haven't dealt with either operand, this is not a case we can
845 decompose. Otherwise, VAR is either of the ones remaining, if any. */
846 if (op0 != 0 && op1 != 0)
847 var = in;
848 else if (op0 != 0)
849 var = op0;
850 else
851 var = op1, neg_var_p = neg1_p;
853 /* Now do any needed negations. */
854 if (neg_litp_p)
855 *minus_litp = *litp, *litp = 0;
856 if (neg_conp_p)
857 *conp = negate_expr (*conp);
858 if (neg_var_p)
859 var = negate_expr (var);
861 else if (TREE_CODE (in) == BIT_NOT_EXPR
862 && code == PLUS_EXPR)
864 /* -X - 1 is folded to ~X, undo that here. */
865 *minus_litp = build_one_cst (TREE_TYPE (in));
866 var = negate_expr (TREE_OPERAND (in, 0));
868 else if (TREE_CONSTANT (in))
869 *conp = in;
870 else
871 var = in;
873 if (negate_p)
875 if (*litp)
876 *minus_litp = *litp, *litp = 0;
877 else if (*minus_litp)
878 *litp = *minus_litp, *minus_litp = 0;
879 *conp = negate_expr (*conp);
880 var = negate_expr (var);
883 return var;
886 /* Re-associate trees split by the above function. T1 and T2 are
887 either expressions to associate or null. Return the new
888 expression, if any. LOC is the location of the new expression. If
889 we build an operation, do it in TYPE and with CODE. */
891 static tree
892 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
894 if (t1 == 0)
895 return t2;
896 else if (t2 == 0)
897 return t1;
899 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
900 try to fold this since we will have infinite recursion. But do
901 deal with any NEGATE_EXPRs. */
902 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
903 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
905 if (code == PLUS_EXPR)
907 if (TREE_CODE (t1) == NEGATE_EXPR)
908 return build2_loc (loc, MINUS_EXPR, type,
909 fold_convert_loc (loc, type, t2),
910 fold_convert_loc (loc, type,
911 TREE_OPERAND (t1, 0)));
912 else if (TREE_CODE (t2) == NEGATE_EXPR)
913 return build2_loc (loc, MINUS_EXPR, type,
914 fold_convert_loc (loc, type, t1),
915 fold_convert_loc (loc, type,
916 TREE_OPERAND (t2, 0)));
917 else if (integer_zerop (t2))
918 return fold_convert_loc (loc, type, t1);
920 else if (code == MINUS_EXPR)
922 if (integer_zerop (t2))
923 return fold_convert_loc (loc, type, t1);
926 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
927 fold_convert_loc (loc, type, t2));
930 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
931 fold_convert_loc (loc, type, t2));
934 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
935 for use in int_const_binop, size_binop and size_diffop. */
937 static bool
938 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
940 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
941 return false;
942 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
943 return false;
945 switch (code)
947 case LSHIFT_EXPR:
948 case RSHIFT_EXPR:
949 case LROTATE_EXPR:
950 case RROTATE_EXPR:
951 return true;
953 default:
954 break;
957 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
958 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
959 && TYPE_MODE (type1) == TYPE_MODE (type2);
963 /* Combine two integer constants ARG1 and ARG2 under operation CODE
964 to produce a new constant. Return NULL_TREE if we don't know how
965 to evaluate CODE at compile-time. */
967 static tree
968 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
969 int overflowable)
971 wide_int res;
972 tree t;
973 tree type = TREE_TYPE (arg1);
974 signop sign = TYPE_SIGN (type);
975 bool overflow = false;
977 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
978 TYPE_SIGN (TREE_TYPE (parg2)));
980 switch (code)
982 case BIT_IOR_EXPR:
983 res = wi::bit_or (arg1, arg2);
984 break;
986 case BIT_XOR_EXPR:
987 res = wi::bit_xor (arg1, arg2);
988 break;
990 case BIT_AND_EXPR:
991 res = wi::bit_and (arg1, arg2);
992 break;
994 case RSHIFT_EXPR:
995 case LSHIFT_EXPR:
996 if (wi::neg_p (arg2))
998 arg2 = -arg2;
999 if (code == RSHIFT_EXPR)
1000 code = LSHIFT_EXPR;
1001 else
1002 code = RSHIFT_EXPR;
1005 if (code == RSHIFT_EXPR)
1006 /* It's unclear from the C standard whether shifts can overflow.
1007 The following code ignores overflow; perhaps a C standard
1008 interpretation ruling is needed. */
1009 res = wi::rshift (arg1, arg2, sign);
1010 else
1011 res = wi::lshift (arg1, arg2);
1012 break;
1014 case RROTATE_EXPR:
1015 case LROTATE_EXPR:
1016 if (wi::neg_p (arg2))
1018 arg2 = -arg2;
1019 if (code == RROTATE_EXPR)
1020 code = LROTATE_EXPR;
1021 else
1022 code = RROTATE_EXPR;
1025 if (code == RROTATE_EXPR)
1026 res = wi::rrotate (arg1, arg2);
1027 else
1028 res = wi::lrotate (arg1, arg2);
1029 break;
1031 case PLUS_EXPR:
1032 res = wi::add (arg1, arg2, sign, &overflow);
1033 break;
1035 case MINUS_EXPR:
1036 res = wi::sub (arg1, arg2, sign, &overflow);
1037 break;
1039 case MULT_EXPR:
1040 res = wi::mul (arg1, arg2, sign, &overflow);
1041 break;
1043 case MULT_HIGHPART_EXPR:
1044 res = wi::mul_high (arg1, arg2, sign);
1045 break;
1047 case TRUNC_DIV_EXPR:
1048 case EXACT_DIV_EXPR:
1049 if (arg2 == 0)
1050 return NULL_TREE;
1051 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1052 break;
1054 case FLOOR_DIV_EXPR:
1055 if (arg2 == 0)
1056 return NULL_TREE;
1057 res = wi::div_floor (arg1, arg2, sign, &overflow);
1058 break;
1060 case CEIL_DIV_EXPR:
1061 if (arg2 == 0)
1062 return NULL_TREE;
1063 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1064 break;
1066 case ROUND_DIV_EXPR:
1067 if (arg2 == 0)
1068 return NULL_TREE;
1069 res = wi::div_round (arg1, arg2, sign, &overflow);
1070 break;
1072 case TRUNC_MOD_EXPR:
1073 if (arg2 == 0)
1074 return NULL_TREE;
1075 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1076 break;
1078 case FLOOR_MOD_EXPR:
1079 if (arg2 == 0)
1080 return NULL_TREE;
1081 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1082 break;
1084 case CEIL_MOD_EXPR:
1085 if (arg2 == 0)
1086 return NULL_TREE;
1087 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1088 break;
1090 case ROUND_MOD_EXPR:
1091 if (arg2 == 0)
1092 return NULL_TREE;
1093 res = wi::mod_round (arg1, arg2, sign, &overflow);
1094 break;
1096 case MIN_EXPR:
1097 res = wi::min (arg1, arg2, sign);
1098 break;
1100 case MAX_EXPR:
1101 res = wi::max (arg1, arg2, sign);
1102 break;
1104 default:
1105 return NULL_TREE;
1108 t = force_fit_type (type, res, overflowable,
1109 (((sign == SIGNED || overflowable == -1)
1110 && overflow)
1111 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1113 return t;
1116 tree
1117 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1119 return int_const_binop_1 (code, arg1, arg2, 1);
1122 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1123 constant. We assume ARG1 and ARG2 have the same data type, or at least
1124 are the same kind of constant and the same machine mode. Return zero if
1125 combining the constants is not allowed in the current operating mode. */
1127 static tree
1128 const_binop (enum tree_code code, tree arg1, tree arg2)
1130 /* Sanity check for the recursive cases. */
1131 if (!arg1 || !arg2)
1132 return NULL_TREE;
1134 STRIP_NOPS (arg1);
1135 STRIP_NOPS (arg2);
1137 if (TREE_CODE (arg1) == INTEGER_CST)
1138 return int_const_binop (code, arg1, arg2);
1140 if (TREE_CODE (arg1) == REAL_CST)
1142 machine_mode mode;
1143 REAL_VALUE_TYPE d1;
1144 REAL_VALUE_TYPE d2;
1145 REAL_VALUE_TYPE value;
1146 REAL_VALUE_TYPE result;
1147 bool inexact;
1148 tree t, type;
1150 /* The following codes are handled by real_arithmetic. */
1151 switch (code)
1153 case PLUS_EXPR:
1154 case MINUS_EXPR:
1155 case MULT_EXPR:
1156 case RDIV_EXPR:
1157 case MIN_EXPR:
1158 case MAX_EXPR:
1159 break;
1161 default:
1162 return NULL_TREE;
1165 d1 = TREE_REAL_CST (arg1);
1166 d2 = TREE_REAL_CST (arg2);
1168 type = TREE_TYPE (arg1);
1169 mode = TYPE_MODE (type);
1171 /* Don't perform operation if we honor signaling NaNs and
1172 either operand is a NaN. */
1173 if (HONOR_SNANS (mode)
1174 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1175 return NULL_TREE;
1177 /* Don't perform operation if it would raise a division
1178 by zero exception. */
1179 if (code == RDIV_EXPR
1180 && REAL_VALUES_EQUAL (d2, dconst0)
1181 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1182 return NULL_TREE;
1184 /* If either operand is a NaN, just return it. Otherwise, set up
1185 for floating-point trap; we return an overflow. */
1186 if (REAL_VALUE_ISNAN (d1))
1187 return arg1;
1188 else if (REAL_VALUE_ISNAN (d2))
1189 return arg2;
1191 inexact = real_arithmetic (&value, code, &d1, &d2);
1192 real_convert (&result, mode, &value);
1194 /* Don't constant fold this floating point operation if
1195 the result has overflowed and flag_trapping_math. */
1196 if (flag_trapping_math
1197 && MODE_HAS_INFINITIES (mode)
1198 && REAL_VALUE_ISINF (result)
1199 && !REAL_VALUE_ISINF (d1)
1200 && !REAL_VALUE_ISINF (d2))
1201 return NULL_TREE;
1203 /* Don't constant fold this floating point operation if the
1204 result may dependent upon the run-time rounding mode and
1205 flag_rounding_math is set, or if GCC's software emulation
1206 is unable to accurately represent the result. */
1207 if ((flag_rounding_math
1208 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1209 && (inexact || !real_identical (&result, &value)))
1210 return NULL_TREE;
1212 t = build_real (type, result);
1214 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1215 return t;
1218 if (TREE_CODE (arg1) == FIXED_CST)
1220 FIXED_VALUE_TYPE f1;
1221 FIXED_VALUE_TYPE f2;
1222 FIXED_VALUE_TYPE result;
1223 tree t, type;
1224 int sat_p;
1225 bool overflow_p;
1227 /* The following codes are handled by fixed_arithmetic. */
1228 switch (code)
1230 case PLUS_EXPR:
1231 case MINUS_EXPR:
1232 case MULT_EXPR:
1233 case TRUNC_DIV_EXPR:
1234 f2 = TREE_FIXED_CST (arg2);
1235 break;
1237 case LSHIFT_EXPR:
1238 case RSHIFT_EXPR:
1240 wide_int w2 = arg2;
1241 f2.data.high = w2.elt (1);
1242 f2.data.low = w2.elt (0);
1243 f2.mode = SImode;
1245 break;
1247 default:
1248 return NULL_TREE;
1251 f1 = TREE_FIXED_CST (arg1);
1252 type = TREE_TYPE (arg1);
1253 sat_p = TYPE_SATURATING (type);
1254 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1255 t = build_fixed (type, result);
1256 /* Propagate overflow flags. */
1257 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1258 TREE_OVERFLOW (t) = 1;
1259 return t;
1262 if (TREE_CODE (arg1) == COMPLEX_CST)
1264 tree type = TREE_TYPE (arg1);
1265 tree r1 = TREE_REALPART (arg1);
1266 tree i1 = TREE_IMAGPART (arg1);
1267 tree r2 = TREE_REALPART (arg2);
1268 tree i2 = TREE_IMAGPART (arg2);
1269 tree real, imag;
1271 switch (code)
1273 case PLUS_EXPR:
1274 case MINUS_EXPR:
1275 real = const_binop (code, r1, r2);
1276 imag = const_binop (code, i1, i2);
1277 break;
1279 case MULT_EXPR:
1280 if (COMPLEX_FLOAT_TYPE_P (type))
1281 return do_mpc_arg2 (arg1, arg2, type,
1282 /* do_nonfinite= */ folding_initializer,
1283 mpc_mul);
1285 real = const_binop (MINUS_EXPR,
1286 const_binop (MULT_EXPR, r1, r2),
1287 const_binop (MULT_EXPR, i1, i2));
1288 imag = const_binop (PLUS_EXPR,
1289 const_binop (MULT_EXPR, r1, i2),
1290 const_binop (MULT_EXPR, i1, r2));
1291 break;
1293 case RDIV_EXPR:
1294 if (COMPLEX_FLOAT_TYPE_P (type))
1295 return do_mpc_arg2 (arg1, arg2, type,
1296 /* do_nonfinite= */ folding_initializer,
1297 mpc_div);
1298 /* Fallthru ... */
1299 case TRUNC_DIV_EXPR:
1300 case CEIL_DIV_EXPR:
1301 case FLOOR_DIV_EXPR:
1302 case ROUND_DIV_EXPR:
1303 if (flag_complex_method == 0)
1305 /* Keep this algorithm in sync with
1306 tree-complex.c:expand_complex_div_straight().
1308 Expand complex division to scalars, straightforward algorithm.
1309 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1310 t = br*br + bi*bi
1312 tree magsquared
1313 = const_binop (PLUS_EXPR,
1314 const_binop (MULT_EXPR, r2, r2),
1315 const_binop (MULT_EXPR, i2, i2));
1316 tree t1
1317 = const_binop (PLUS_EXPR,
1318 const_binop (MULT_EXPR, r1, r2),
1319 const_binop (MULT_EXPR, i1, i2));
1320 tree t2
1321 = const_binop (MINUS_EXPR,
1322 const_binop (MULT_EXPR, i1, r2),
1323 const_binop (MULT_EXPR, r1, i2));
1325 real = const_binop (code, t1, magsquared);
1326 imag = const_binop (code, t2, magsquared);
1328 else
1330 /* Keep this algorithm in sync with
1331 tree-complex.c:expand_complex_div_wide().
1333 Expand complex division to scalars, modified algorithm to minimize
1334 overflow with wide input ranges. */
1335 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1336 fold_abs_const (r2, TREE_TYPE (type)),
1337 fold_abs_const (i2, TREE_TYPE (type)));
1339 if (integer_nonzerop (compare))
1341 /* In the TRUE branch, we compute
1342 ratio = br/bi;
1343 div = (br * ratio) + bi;
1344 tr = (ar * ratio) + ai;
1345 ti = (ai * ratio) - ar;
1346 tr = tr / div;
1347 ti = ti / div; */
1348 tree ratio = const_binop (code, r2, i2);
1349 tree div = const_binop (PLUS_EXPR, i2,
1350 const_binop (MULT_EXPR, r2, ratio));
1351 real = const_binop (MULT_EXPR, r1, ratio);
1352 real = const_binop (PLUS_EXPR, real, i1);
1353 real = const_binop (code, real, div);
1355 imag = const_binop (MULT_EXPR, i1, ratio);
1356 imag = const_binop (MINUS_EXPR, imag, r1);
1357 imag = const_binop (code, imag, div);
1359 else
1361 /* In the FALSE branch, we compute
1362 ratio = d/c;
1363 divisor = (d * ratio) + c;
1364 tr = (b * ratio) + a;
1365 ti = b - (a * ratio);
1366 tr = tr / div;
1367 ti = ti / div; */
1368 tree ratio = const_binop (code, i2, r2);
1369 tree div = const_binop (PLUS_EXPR, r2,
1370 const_binop (MULT_EXPR, i2, ratio));
1372 real = const_binop (MULT_EXPR, i1, ratio);
1373 real = const_binop (PLUS_EXPR, real, r1);
1374 real = const_binop (code, real, div);
1376 imag = const_binop (MULT_EXPR, r1, ratio);
1377 imag = const_binop (MINUS_EXPR, i1, imag);
1378 imag = const_binop (code, imag, div);
1381 break;
1383 default:
1384 return NULL_TREE;
1387 if (real && imag)
1388 return build_complex (type, real, imag);
1391 if (TREE_CODE (arg1) == VECTOR_CST
1392 && TREE_CODE (arg2) == VECTOR_CST)
1394 tree type = TREE_TYPE (arg1);
1395 int count = TYPE_VECTOR_SUBPARTS (type), i;
1396 tree *elts = XALLOCAVEC (tree, count);
1398 for (i = 0; i < count; i++)
1400 tree elem1 = VECTOR_CST_ELT (arg1, i);
1401 tree elem2 = VECTOR_CST_ELT (arg2, i);
1403 elts[i] = const_binop (code, elem1, elem2);
1405 /* It is possible that const_binop cannot handle the given
1406 code and return NULL_TREE */
1407 if (elts[i] == NULL_TREE)
1408 return NULL_TREE;
1411 return build_vector (type, elts);
1414 /* Shifts allow a scalar offset for a vector. */
1415 if (TREE_CODE (arg1) == VECTOR_CST
1416 && TREE_CODE (arg2) == INTEGER_CST)
1418 tree type = TREE_TYPE (arg1);
1419 int count = TYPE_VECTOR_SUBPARTS (type), i;
1420 tree *elts = XALLOCAVEC (tree, count);
1422 if (code == VEC_RSHIFT_EXPR)
1424 if (!tree_fits_uhwi_p (arg2))
1425 return NULL_TREE;
1427 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1428 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1429 unsigned HOST_WIDE_INT innerc
1430 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1431 if (shiftc >= outerc || (shiftc % innerc) != 0)
1432 return NULL_TREE;
1433 int offset = shiftc / innerc;
1434 /* The direction of VEC_RSHIFT_EXPR is endian dependent.
1435 For reductions, if !BYTES_BIG_ENDIAN then compiler picks first
1436 vector element, but last element if BYTES_BIG_ENDIAN. */
1437 if (BYTES_BIG_ENDIAN)
1438 offset = -offset;
1439 tree zero = build_zero_cst (TREE_TYPE (type));
1440 for (i = 0; i < count; i++)
1442 if (i + offset < 0 || i + offset >= count)
1443 elts[i] = zero;
1444 else
1445 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1448 else
1449 for (i = 0; i < count; i++)
1451 tree elem1 = VECTOR_CST_ELT (arg1, i);
1453 elts[i] = const_binop (code, elem1, arg2);
1455 /* It is possible that const_binop cannot handle the given
1456 code and return NULL_TREE */
1457 if (elts[i] == NULL_TREE)
1458 return NULL_TREE;
1461 return build_vector (type, elts);
1463 return NULL_TREE;
1466 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1467 indicates which particular sizetype to create. */
1469 tree
1470 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1472 return build_int_cst (sizetype_tab[(int) kind], number);
1475 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1476 is a tree code. The type of the result is taken from the operands.
1477 Both must be equivalent integer types, ala int_binop_types_match_p.
1478 If the operands are constant, so is the result. */
1480 tree
1481 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1483 tree type = TREE_TYPE (arg0);
1485 if (arg0 == error_mark_node || arg1 == error_mark_node)
1486 return error_mark_node;
1488 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1489 TREE_TYPE (arg1)));
1491 /* Handle the special case of two integer constants faster. */
1492 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1494 /* And some specific cases even faster than that. */
1495 if (code == PLUS_EXPR)
1497 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1498 return arg1;
1499 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1500 return arg0;
1502 else if (code == MINUS_EXPR)
1504 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1505 return arg0;
1507 else if (code == MULT_EXPR)
1509 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1510 return arg1;
1513 /* Handle general case of two integer constants. For sizetype
1514 constant calculations we always want to know about overflow,
1515 even in the unsigned case. */
1516 return int_const_binop_1 (code, arg0, arg1, -1);
1519 return fold_build2_loc (loc, code, type, arg0, arg1);
1522 /* Given two values, either both of sizetype or both of bitsizetype,
1523 compute the difference between the two values. Return the value
1524 in signed type corresponding to the type of the operands. */
1526 tree
1527 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1529 tree type = TREE_TYPE (arg0);
1530 tree ctype;
1532 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1533 TREE_TYPE (arg1)));
1535 /* If the type is already signed, just do the simple thing. */
1536 if (!TYPE_UNSIGNED (type))
1537 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1539 if (type == sizetype)
1540 ctype = ssizetype;
1541 else if (type == bitsizetype)
1542 ctype = sbitsizetype;
1543 else
1544 ctype = signed_type_for (type);
1546 /* If either operand is not a constant, do the conversions to the signed
1547 type and subtract. The hardware will do the right thing with any
1548 overflow in the subtraction. */
1549 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1550 return size_binop_loc (loc, MINUS_EXPR,
1551 fold_convert_loc (loc, ctype, arg0),
1552 fold_convert_loc (loc, ctype, arg1));
1554 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1555 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1556 overflow) and negate (which can't either). Special-case a result
1557 of zero while we're here. */
1558 if (tree_int_cst_equal (arg0, arg1))
1559 return build_int_cst (ctype, 0);
1560 else if (tree_int_cst_lt (arg1, arg0))
1561 return fold_convert_loc (loc, ctype,
1562 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1563 else
1564 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1565 fold_convert_loc (loc, ctype,
1566 size_binop_loc (loc,
1567 MINUS_EXPR,
1568 arg1, arg0)));
1571 /* A subroutine of fold_convert_const handling conversions of an
1572 INTEGER_CST to another integer type. */
1574 static tree
1575 fold_convert_const_int_from_int (tree type, const_tree arg1)
1577 /* Given an integer constant, make new constant with new type,
1578 appropriately sign-extended or truncated. Use widest_int
1579 so that any extension is done according ARG1's type. */
1580 return force_fit_type (type, wi::to_widest (arg1),
1581 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1582 TREE_OVERFLOW (arg1));
1585 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1586 to an integer type. */
1588 static tree
1589 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1591 bool overflow = false;
1592 tree t;
1594 /* The following code implements the floating point to integer
1595 conversion rules required by the Java Language Specification,
1596 that IEEE NaNs are mapped to zero and values that overflow
1597 the target precision saturate, i.e. values greater than
1598 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1599 are mapped to INT_MIN. These semantics are allowed by the
1600 C and C++ standards that simply state that the behavior of
1601 FP-to-integer conversion is unspecified upon overflow. */
1603 wide_int val;
1604 REAL_VALUE_TYPE r;
1605 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1607 switch (code)
1609 case FIX_TRUNC_EXPR:
1610 real_trunc (&r, VOIDmode, &x);
1611 break;
1613 default:
1614 gcc_unreachable ();
1617 /* If R is NaN, return zero and show we have an overflow. */
1618 if (REAL_VALUE_ISNAN (r))
1620 overflow = true;
1621 val = wi::zero (TYPE_PRECISION (type));
1624 /* See if R is less than the lower bound or greater than the
1625 upper bound. */
1627 if (! overflow)
1629 tree lt = TYPE_MIN_VALUE (type);
1630 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1631 if (REAL_VALUES_LESS (r, l))
1633 overflow = true;
1634 val = lt;
1638 if (! overflow)
1640 tree ut = TYPE_MAX_VALUE (type);
1641 if (ut)
1643 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1644 if (REAL_VALUES_LESS (u, r))
1646 overflow = true;
1647 val = ut;
1652 if (! overflow)
1653 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1655 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1656 return t;
1659 /* A subroutine of fold_convert_const handling conversions of a
1660 FIXED_CST to an integer type. */
1662 static tree
1663 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1665 tree t;
1666 double_int temp, temp_trunc;
1667 unsigned int mode;
1669 /* Right shift FIXED_CST to temp by fbit. */
1670 temp = TREE_FIXED_CST (arg1).data;
1671 mode = TREE_FIXED_CST (arg1).mode;
1672 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1674 temp = temp.rshift (GET_MODE_FBIT (mode),
1675 HOST_BITS_PER_DOUBLE_INT,
1676 SIGNED_FIXED_POINT_MODE_P (mode));
1678 /* Left shift temp to temp_trunc by fbit. */
1679 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1680 HOST_BITS_PER_DOUBLE_INT,
1681 SIGNED_FIXED_POINT_MODE_P (mode));
1683 else
1685 temp = double_int_zero;
1686 temp_trunc = double_int_zero;
1689 /* If FIXED_CST is negative, we need to round the value toward 0.
1690 By checking if the fractional bits are not zero to add 1 to temp. */
1691 if (SIGNED_FIXED_POINT_MODE_P (mode)
1692 && temp_trunc.is_negative ()
1693 && TREE_FIXED_CST (arg1).data != temp_trunc)
1694 temp += double_int_one;
1696 /* Given a fixed-point constant, make new constant with new type,
1697 appropriately sign-extended or truncated. */
1698 t = force_fit_type (type, temp, -1,
1699 (temp.is_negative ()
1700 && (TYPE_UNSIGNED (type)
1701 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1702 | TREE_OVERFLOW (arg1));
1704 return t;
1707 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1708 to another floating point type. */
1710 static tree
1711 fold_convert_const_real_from_real (tree type, const_tree arg1)
1713 REAL_VALUE_TYPE value;
1714 tree t;
1716 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1717 t = build_real (type, value);
1719 /* If converting an infinity or NAN to a representation that doesn't
1720 have one, set the overflow bit so that we can produce some kind of
1721 error message at the appropriate point if necessary. It's not the
1722 most user-friendly message, but it's better than nothing. */
1723 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1724 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1725 TREE_OVERFLOW (t) = 1;
1726 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1727 && !MODE_HAS_NANS (TYPE_MODE (type)))
1728 TREE_OVERFLOW (t) = 1;
1729 /* Regular overflow, conversion produced an infinity in a mode that
1730 can't represent them. */
1731 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1732 && REAL_VALUE_ISINF (value)
1733 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1734 TREE_OVERFLOW (t) = 1;
1735 else
1736 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1737 return t;
1740 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1741 to a floating point type. */
1743 static tree
1744 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1746 REAL_VALUE_TYPE value;
1747 tree t;
1749 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1750 t = build_real (type, value);
1752 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1753 return t;
1756 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1757 to another fixed-point type. */
1759 static tree
1760 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1762 FIXED_VALUE_TYPE value;
1763 tree t;
1764 bool overflow_p;
1766 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1767 TYPE_SATURATING (type));
1768 t = build_fixed (type, value);
1770 /* Propagate overflow flags. */
1771 if (overflow_p | TREE_OVERFLOW (arg1))
1772 TREE_OVERFLOW (t) = 1;
1773 return t;
1776 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1777 to a fixed-point type. */
1779 static tree
1780 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1782 FIXED_VALUE_TYPE value;
1783 tree t;
1784 bool overflow_p;
1785 double_int di;
1787 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1789 di.low = TREE_INT_CST_ELT (arg1, 0);
1790 if (TREE_INT_CST_NUNITS (arg1) == 1)
1791 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1792 else
1793 di.high = TREE_INT_CST_ELT (arg1, 1);
1795 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1796 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1797 TYPE_SATURATING (type));
1798 t = build_fixed (type, value);
1800 /* Propagate overflow flags. */
1801 if (overflow_p | TREE_OVERFLOW (arg1))
1802 TREE_OVERFLOW (t) = 1;
1803 return t;
1806 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1807 to a fixed-point type. */
1809 static tree
1810 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1812 FIXED_VALUE_TYPE value;
1813 tree t;
1814 bool overflow_p;
1816 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1817 &TREE_REAL_CST (arg1),
1818 TYPE_SATURATING (type));
1819 t = build_fixed (type, value);
1821 /* Propagate overflow flags. */
1822 if (overflow_p | TREE_OVERFLOW (arg1))
1823 TREE_OVERFLOW (t) = 1;
1824 return t;
1827 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1828 type TYPE. If no simplification can be done return NULL_TREE. */
1830 static tree
1831 fold_convert_const (enum tree_code code, tree type, tree arg1)
1833 if (TREE_TYPE (arg1) == type)
1834 return arg1;
1836 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1837 || TREE_CODE (type) == OFFSET_TYPE)
1839 if (TREE_CODE (arg1) == INTEGER_CST)
1840 return fold_convert_const_int_from_int (type, arg1);
1841 else if (TREE_CODE (arg1) == REAL_CST)
1842 return fold_convert_const_int_from_real (code, type, arg1);
1843 else if (TREE_CODE (arg1) == FIXED_CST)
1844 return fold_convert_const_int_from_fixed (type, arg1);
1846 else if (TREE_CODE (type) == REAL_TYPE)
1848 if (TREE_CODE (arg1) == INTEGER_CST)
1849 return build_real_from_int_cst (type, arg1);
1850 else if (TREE_CODE (arg1) == REAL_CST)
1851 return fold_convert_const_real_from_real (type, arg1);
1852 else if (TREE_CODE (arg1) == FIXED_CST)
1853 return fold_convert_const_real_from_fixed (type, arg1);
1855 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1857 if (TREE_CODE (arg1) == FIXED_CST)
1858 return fold_convert_const_fixed_from_fixed (type, arg1);
1859 else if (TREE_CODE (arg1) == INTEGER_CST)
1860 return fold_convert_const_fixed_from_int (type, arg1);
1861 else if (TREE_CODE (arg1) == REAL_CST)
1862 return fold_convert_const_fixed_from_real (type, arg1);
1864 return NULL_TREE;
1867 /* Construct a vector of zero elements of vector type TYPE. */
1869 static tree
1870 build_zero_vector (tree type)
1872 tree t;
1874 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1875 return build_vector_from_val (type, t);
1878 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1880 bool
1881 fold_convertible_p (const_tree type, const_tree arg)
1883 tree orig = TREE_TYPE (arg);
1885 if (type == orig)
1886 return true;
1888 if (TREE_CODE (arg) == ERROR_MARK
1889 || TREE_CODE (type) == ERROR_MARK
1890 || TREE_CODE (orig) == ERROR_MARK)
1891 return false;
1893 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1894 return true;
1896 switch (TREE_CODE (type))
1898 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1899 case POINTER_TYPE: case REFERENCE_TYPE:
1900 case OFFSET_TYPE:
1901 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1902 || TREE_CODE (orig) == OFFSET_TYPE)
1903 return true;
1904 return (TREE_CODE (orig) == VECTOR_TYPE
1905 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1907 case REAL_TYPE:
1908 case FIXED_POINT_TYPE:
1909 case COMPLEX_TYPE:
1910 case VECTOR_TYPE:
1911 case VOID_TYPE:
1912 return TREE_CODE (type) == TREE_CODE (orig);
1914 default:
1915 return false;
1919 /* Convert expression ARG to type TYPE. Used by the middle-end for
1920 simple conversions in preference to calling the front-end's convert. */
1922 tree
1923 fold_convert_loc (location_t loc, tree type, tree arg)
1925 tree orig = TREE_TYPE (arg);
1926 tree tem;
1928 if (type == orig)
1929 return arg;
1931 if (TREE_CODE (arg) == ERROR_MARK
1932 || TREE_CODE (type) == ERROR_MARK
1933 || TREE_CODE (orig) == ERROR_MARK)
1934 return error_mark_node;
1936 switch (TREE_CODE (type))
1938 case POINTER_TYPE:
1939 case REFERENCE_TYPE:
1940 /* Handle conversions between pointers to different address spaces. */
1941 if (POINTER_TYPE_P (orig)
1942 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1943 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1944 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1945 /* fall through */
1947 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1948 case OFFSET_TYPE:
1949 if (TREE_CODE (arg) == INTEGER_CST)
1951 tem = fold_convert_const (NOP_EXPR, type, arg);
1952 if (tem != NULL_TREE)
1953 return tem;
1955 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1956 || TREE_CODE (orig) == OFFSET_TYPE)
1957 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1958 if (TREE_CODE (orig) == COMPLEX_TYPE)
1959 return fold_convert_loc (loc, type,
1960 fold_build1_loc (loc, REALPART_EXPR,
1961 TREE_TYPE (orig), arg));
1962 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1963 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1964 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1966 case REAL_TYPE:
1967 if (TREE_CODE (arg) == INTEGER_CST)
1969 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1970 if (tem != NULL_TREE)
1971 return tem;
1973 else if (TREE_CODE (arg) == REAL_CST)
1975 tem = fold_convert_const (NOP_EXPR, type, arg);
1976 if (tem != NULL_TREE)
1977 return tem;
1979 else if (TREE_CODE (arg) == FIXED_CST)
1981 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1982 if (tem != NULL_TREE)
1983 return tem;
1986 switch (TREE_CODE (orig))
1988 case INTEGER_TYPE:
1989 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1990 case POINTER_TYPE: case REFERENCE_TYPE:
1991 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1993 case REAL_TYPE:
1994 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1996 case FIXED_POINT_TYPE:
1997 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1999 case COMPLEX_TYPE:
2000 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2001 return fold_convert_loc (loc, type, tem);
2003 default:
2004 gcc_unreachable ();
2007 case FIXED_POINT_TYPE:
2008 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2009 || TREE_CODE (arg) == REAL_CST)
2011 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2012 if (tem != NULL_TREE)
2013 goto fold_convert_exit;
2016 switch (TREE_CODE (orig))
2018 case FIXED_POINT_TYPE:
2019 case INTEGER_TYPE:
2020 case ENUMERAL_TYPE:
2021 case BOOLEAN_TYPE:
2022 case REAL_TYPE:
2023 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2025 case COMPLEX_TYPE:
2026 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2027 return fold_convert_loc (loc, type, tem);
2029 default:
2030 gcc_unreachable ();
2033 case COMPLEX_TYPE:
2034 switch (TREE_CODE (orig))
2036 case INTEGER_TYPE:
2037 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2038 case POINTER_TYPE: case REFERENCE_TYPE:
2039 case REAL_TYPE:
2040 case FIXED_POINT_TYPE:
2041 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2042 fold_convert_loc (loc, TREE_TYPE (type), arg),
2043 fold_convert_loc (loc, TREE_TYPE (type),
2044 integer_zero_node));
2045 case COMPLEX_TYPE:
2047 tree rpart, ipart;
2049 if (TREE_CODE (arg) == COMPLEX_EXPR)
2051 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2052 TREE_OPERAND (arg, 0));
2053 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2054 TREE_OPERAND (arg, 1));
2055 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2058 arg = save_expr (arg);
2059 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2060 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2061 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2062 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2063 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2066 default:
2067 gcc_unreachable ();
2070 case VECTOR_TYPE:
2071 if (integer_zerop (arg))
2072 return build_zero_vector (type);
2073 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2074 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2075 || TREE_CODE (orig) == VECTOR_TYPE);
2076 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2078 case VOID_TYPE:
2079 tem = fold_ignored_result (arg);
2080 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2082 default:
2083 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2084 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2085 gcc_unreachable ();
2087 fold_convert_exit:
2088 protected_set_expr_location_unshare (tem, loc);
2089 return tem;
2092 /* Return false if expr can be assumed not to be an lvalue, true
2093 otherwise. */
2095 static bool
2096 maybe_lvalue_p (const_tree x)
2098 /* We only need to wrap lvalue tree codes. */
2099 switch (TREE_CODE (x))
2101 case VAR_DECL:
2102 case PARM_DECL:
2103 case RESULT_DECL:
2104 case LABEL_DECL:
2105 case FUNCTION_DECL:
2106 case SSA_NAME:
2108 case COMPONENT_REF:
2109 case MEM_REF:
2110 case INDIRECT_REF:
2111 case ARRAY_REF:
2112 case ARRAY_RANGE_REF:
2113 case BIT_FIELD_REF:
2114 case OBJ_TYPE_REF:
2116 case REALPART_EXPR:
2117 case IMAGPART_EXPR:
2118 case PREINCREMENT_EXPR:
2119 case PREDECREMENT_EXPR:
2120 case SAVE_EXPR:
2121 case TRY_CATCH_EXPR:
2122 case WITH_CLEANUP_EXPR:
2123 case COMPOUND_EXPR:
2124 case MODIFY_EXPR:
2125 case TARGET_EXPR:
2126 case COND_EXPR:
2127 case BIND_EXPR:
2128 break;
2130 default:
2131 /* Assume the worst for front-end tree codes. */
2132 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2133 break;
2134 return false;
2137 return true;
2140 /* Return an expr equal to X but certainly not valid as an lvalue. */
2142 tree
2143 non_lvalue_loc (location_t loc, tree x)
2145 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2146 us. */
2147 if (in_gimple_form)
2148 return x;
2150 if (! maybe_lvalue_p (x))
2151 return x;
2152 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2155 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2156 Zero means allow extended lvalues. */
2158 int pedantic_lvalues;
2160 /* When pedantic, return an expr equal to X but certainly not valid as a
2161 pedantic lvalue. Otherwise, return X. */
2163 static tree
2164 pedantic_non_lvalue_loc (location_t loc, tree x)
2166 if (pedantic_lvalues)
2167 return non_lvalue_loc (loc, x);
2169 return protected_set_expr_location_unshare (x, loc);
2172 /* Given a tree comparison code, return the code that is the logical inverse.
2173 It is generally not safe to do this for floating-point comparisons, except
2174 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2175 ERROR_MARK in this case. */
2177 enum tree_code
2178 invert_tree_comparison (enum tree_code code, bool honor_nans)
2180 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2181 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2182 return ERROR_MARK;
2184 switch (code)
2186 case EQ_EXPR:
2187 return NE_EXPR;
2188 case NE_EXPR:
2189 return EQ_EXPR;
2190 case GT_EXPR:
2191 return honor_nans ? UNLE_EXPR : LE_EXPR;
2192 case GE_EXPR:
2193 return honor_nans ? UNLT_EXPR : LT_EXPR;
2194 case LT_EXPR:
2195 return honor_nans ? UNGE_EXPR : GE_EXPR;
2196 case LE_EXPR:
2197 return honor_nans ? UNGT_EXPR : GT_EXPR;
2198 case LTGT_EXPR:
2199 return UNEQ_EXPR;
2200 case UNEQ_EXPR:
2201 return LTGT_EXPR;
2202 case UNGT_EXPR:
2203 return LE_EXPR;
2204 case UNGE_EXPR:
2205 return LT_EXPR;
2206 case UNLT_EXPR:
2207 return GE_EXPR;
2208 case UNLE_EXPR:
2209 return GT_EXPR;
2210 case ORDERED_EXPR:
2211 return UNORDERED_EXPR;
2212 case UNORDERED_EXPR:
2213 return ORDERED_EXPR;
2214 default:
2215 gcc_unreachable ();
2219 /* Similar, but return the comparison that results if the operands are
2220 swapped. This is safe for floating-point. */
2222 enum tree_code
2223 swap_tree_comparison (enum tree_code code)
2225 switch (code)
2227 case EQ_EXPR:
2228 case NE_EXPR:
2229 case ORDERED_EXPR:
2230 case UNORDERED_EXPR:
2231 case LTGT_EXPR:
2232 case UNEQ_EXPR:
2233 return code;
2234 case GT_EXPR:
2235 return LT_EXPR;
2236 case GE_EXPR:
2237 return LE_EXPR;
2238 case LT_EXPR:
2239 return GT_EXPR;
2240 case LE_EXPR:
2241 return GE_EXPR;
2242 case UNGT_EXPR:
2243 return UNLT_EXPR;
2244 case UNGE_EXPR:
2245 return UNLE_EXPR;
2246 case UNLT_EXPR:
2247 return UNGT_EXPR;
2248 case UNLE_EXPR:
2249 return UNGE_EXPR;
2250 default:
2251 gcc_unreachable ();
2256 /* Convert a comparison tree code from an enum tree_code representation
2257 into a compcode bit-based encoding. This function is the inverse of
2258 compcode_to_comparison. */
2260 static enum comparison_code
2261 comparison_to_compcode (enum tree_code code)
2263 switch (code)
2265 case LT_EXPR:
2266 return COMPCODE_LT;
2267 case EQ_EXPR:
2268 return COMPCODE_EQ;
2269 case LE_EXPR:
2270 return COMPCODE_LE;
2271 case GT_EXPR:
2272 return COMPCODE_GT;
2273 case NE_EXPR:
2274 return COMPCODE_NE;
2275 case GE_EXPR:
2276 return COMPCODE_GE;
2277 case ORDERED_EXPR:
2278 return COMPCODE_ORD;
2279 case UNORDERED_EXPR:
2280 return COMPCODE_UNORD;
2281 case UNLT_EXPR:
2282 return COMPCODE_UNLT;
2283 case UNEQ_EXPR:
2284 return COMPCODE_UNEQ;
2285 case UNLE_EXPR:
2286 return COMPCODE_UNLE;
2287 case UNGT_EXPR:
2288 return COMPCODE_UNGT;
2289 case LTGT_EXPR:
2290 return COMPCODE_LTGT;
2291 case UNGE_EXPR:
2292 return COMPCODE_UNGE;
2293 default:
2294 gcc_unreachable ();
2298 /* Convert a compcode bit-based encoding of a comparison operator back
2299 to GCC's enum tree_code representation. This function is the
2300 inverse of comparison_to_compcode. */
2302 static enum tree_code
2303 compcode_to_comparison (enum comparison_code code)
2305 switch (code)
2307 case COMPCODE_LT:
2308 return LT_EXPR;
2309 case COMPCODE_EQ:
2310 return EQ_EXPR;
2311 case COMPCODE_LE:
2312 return LE_EXPR;
2313 case COMPCODE_GT:
2314 return GT_EXPR;
2315 case COMPCODE_NE:
2316 return NE_EXPR;
2317 case COMPCODE_GE:
2318 return GE_EXPR;
2319 case COMPCODE_ORD:
2320 return ORDERED_EXPR;
2321 case COMPCODE_UNORD:
2322 return UNORDERED_EXPR;
2323 case COMPCODE_UNLT:
2324 return UNLT_EXPR;
2325 case COMPCODE_UNEQ:
2326 return UNEQ_EXPR;
2327 case COMPCODE_UNLE:
2328 return UNLE_EXPR;
2329 case COMPCODE_UNGT:
2330 return UNGT_EXPR;
2331 case COMPCODE_LTGT:
2332 return LTGT_EXPR;
2333 case COMPCODE_UNGE:
2334 return UNGE_EXPR;
2335 default:
2336 gcc_unreachable ();
2340 /* Return a tree for the comparison which is the combination of
2341 doing the AND or OR (depending on CODE) of the two operations LCODE
2342 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2343 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2344 if this makes the transformation invalid. */
2346 tree
2347 combine_comparisons (location_t loc,
2348 enum tree_code code, enum tree_code lcode,
2349 enum tree_code rcode, tree truth_type,
2350 tree ll_arg, tree lr_arg)
2352 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2353 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2354 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2355 int compcode;
2357 switch (code)
2359 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2360 compcode = lcompcode & rcompcode;
2361 break;
2363 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2364 compcode = lcompcode | rcompcode;
2365 break;
2367 default:
2368 return NULL_TREE;
2371 if (!honor_nans)
2373 /* Eliminate unordered comparisons, as well as LTGT and ORD
2374 which are not used unless the mode has NaNs. */
2375 compcode &= ~COMPCODE_UNORD;
2376 if (compcode == COMPCODE_LTGT)
2377 compcode = COMPCODE_NE;
2378 else if (compcode == COMPCODE_ORD)
2379 compcode = COMPCODE_TRUE;
2381 else if (flag_trapping_math)
2383 /* Check that the original operation and the optimized ones will trap
2384 under the same condition. */
2385 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2386 && (lcompcode != COMPCODE_EQ)
2387 && (lcompcode != COMPCODE_ORD);
2388 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2389 && (rcompcode != COMPCODE_EQ)
2390 && (rcompcode != COMPCODE_ORD);
2391 bool trap = (compcode & COMPCODE_UNORD) == 0
2392 && (compcode != COMPCODE_EQ)
2393 && (compcode != COMPCODE_ORD);
2395 /* In a short-circuited boolean expression the LHS might be
2396 such that the RHS, if evaluated, will never trap. For
2397 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2398 if neither x nor y is NaN. (This is a mixed blessing: for
2399 example, the expression above will never trap, hence
2400 optimizing it to x < y would be invalid). */
2401 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2402 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2403 rtrap = false;
2405 /* If the comparison was short-circuited, and only the RHS
2406 trapped, we may now generate a spurious trap. */
2407 if (rtrap && !ltrap
2408 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2409 return NULL_TREE;
2411 /* If we changed the conditions that cause a trap, we lose. */
2412 if ((ltrap || rtrap) != trap)
2413 return NULL_TREE;
2416 if (compcode == COMPCODE_TRUE)
2417 return constant_boolean_node (true, truth_type);
2418 else if (compcode == COMPCODE_FALSE)
2419 return constant_boolean_node (false, truth_type);
2420 else
2422 enum tree_code tcode;
2424 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2425 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2429 /* Return nonzero if two operands (typically of the same tree node)
2430 are necessarily equal. If either argument has side-effects this
2431 function returns zero. FLAGS modifies behavior as follows:
2433 If OEP_ONLY_CONST is set, only return nonzero for constants.
2434 This function tests whether the operands are indistinguishable;
2435 it does not test whether they are equal using C's == operation.
2436 The distinction is important for IEEE floating point, because
2437 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2438 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2440 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2441 even though it may hold multiple values during a function.
2442 This is because a GCC tree node guarantees that nothing else is
2443 executed between the evaluation of its "operands" (which may often
2444 be evaluated in arbitrary order). Hence if the operands themselves
2445 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2446 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2447 unset means assuming isochronic (or instantaneous) tree equivalence.
2448 Unless comparing arbitrary expression trees, such as from different
2449 statements, this flag can usually be left unset.
2451 If OEP_PURE_SAME is set, then pure functions with identical arguments
2452 are considered the same. It is used when the caller has other ways
2453 to ensure that global memory is unchanged in between. */
2456 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2458 /* If either is ERROR_MARK, they aren't equal. */
2459 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2460 || TREE_TYPE (arg0) == error_mark_node
2461 || TREE_TYPE (arg1) == error_mark_node)
2462 return 0;
2464 /* Similar, if either does not have a type (like a released SSA name),
2465 they aren't equal. */
2466 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2467 return 0;
2469 /* Check equality of integer constants before bailing out due to
2470 precision differences. */
2471 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2472 return tree_int_cst_equal (arg0, arg1);
2474 /* If both types don't have the same signedness, then we can't consider
2475 them equal. We must check this before the STRIP_NOPS calls
2476 because they may change the signedness of the arguments. As pointers
2477 strictly don't have a signedness, require either two pointers or
2478 two non-pointers as well. */
2479 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2480 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2481 return 0;
2483 /* We cannot consider pointers to different address space equal. */
2484 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2485 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2486 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2487 return 0;
2489 /* If both types don't have the same precision, then it is not safe
2490 to strip NOPs. */
2491 if (element_precision (TREE_TYPE (arg0))
2492 != element_precision (TREE_TYPE (arg1)))
2493 return 0;
2495 STRIP_NOPS (arg0);
2496 STRIP_NOPS (arg1);
2498 /* In case both args are comparisons but with different comparison
2499 code, try to swap the comparison operands of one arg to produce
2500 a match and compare that variant. */
2501 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2502 && COMPARISON_CLASS_P (arg0)
2503 && COMPARISON_CLASS_P (arg1))
2505 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2507 if (TREE_CODE (arg0) == swap_code)
2508 return operand_equal_p (TREE_OPERAND (arg0, 0),
2509 TREE_OPERAND (arg1, 1), flags)
2510 && operand_equal_p (TREE_OPERAND (arg0, 1),
2511 TREE_OPERAND (arg1, 0), flags);
2514 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2515 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2516 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2517 return 0;
2519 /* This is needed for conversions and for COMPONENT_REF.
2520 Might as well play it safe and always test this. */
2521 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2522 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2523 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2524 return 0;
2526 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2527 We don't care about side effects in that case because the SAVE_EXPR
2528 takes care of that for us. In all other cases, two expressions are
2529 equal if they have no side effects. If we have two identical
2530 expressions with side effects that should be treated the same due
2531 to the only side effects being identical SAVE_EXPR's, that will
2532 be detected in the recursive calls below.
2533 If we are taking an invariant address of two identical objects
2534 they are necessarily equal as well. */
2535 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2536 && (TREE_CODE (arg0) == SAVE_EXPR
2537 || (flags & OEP_CONSTANT_ADDRESS_OF)
2538 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2539 return 1;
2541 /* Next handle constant cases, those for which we can return 1 even
2542 if ONLY_CONST is set. */
2543 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2544 switch (TREE_CODE (arg0))
2546 case INTEGER_CST:
2547 return tree_int_cst_equal (arg0, arg1);
2549 case FIXED_CST:
2550 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2551 TREE_FIXED_CST (arg1));
2553 case REAL_CST:
2554 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2555 TREE_REAL_CST (arg1)))
2556 return 1;
2559 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2561 /* If we do not distinguish between signed and unsigned zero,
2562 consider them equal. */
2563 if (real_zerop (arg0) && real_zerop (arg1))
2564 return 1;
2566 return 0;
2568 case VECTOR_CST:
2570 unsigned i;
2572 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2573 return 0;
2575 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2577 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2578 VECTOR_CST_ELT (arg1, i), flags))
2579 return 0;
2581 return 1;
2584 case COMPLEX_CST:
2585 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2586 flags)
2587 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2588 flags));
2590 case STRING_CST:
2591 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2592 && ! memcmp (TREE_STRING_POINTER (arg0),
2593 TREE_STRING_POINTER (arg1),
2594 TREE_STRING_LENGTH (arg0)));
2596 case ADDR_EXPR:
2597 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2598 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2599 ? OEP_CONSTANT_ADDRESS_OF : 0);
2600 default:
2601 break;
2604 if (flags & OEP_ONLY_CONST)
2605 return 0;
2607 /* Define macros to test an operand from arg0 and arg1 for equality and a
2608 variant that allows null and views null as being different from any
2609 non-null value. In the latter case, if either is null, the both
2610 must be; otherwise, do the normal comparison. */
2611 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2612 TREE_OPERAND (arg1, N), flags)
2614 #define OP_SAME_WITH_NULL(N) \
2615 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2616 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2618 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2620 case tcc_unary:
2621 /* Two conversions are equal only if signedness and modes match. */
2622 switch (TREE_CODE (arg0))
2624 CASE_CONVERT:
2625 case FIX_TRUNC_EXPR:
2626 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2627 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2628 return 0;
2629 break;
2630 default:
2631 break;
2634 return OP_SAME (0);
2637 case tcc_comparison:
2638 case tcc_binary:
2639 if (OP_SAME (0) && OP_SAME (1))
2640 return 1;
2642 /* For commutative ops, allow the other order. */
2643 return (commutative_tree_code (TREE_CODE (arg0))
2644 && operand_equal_p (TREE_OPERAND (arg0, 0),
2645 TREE_OPERAND (arg1, 1), flags)
2646 && operand_equal_p (TREE_OPERAND (arg0, 1),
2647 TREE_OPERAND (arg1, 0), flags));
2649 case tcc_reference:
2650 /* If either of the pointer (or reference) expressions we are
2651 dereferencing contain a side effect, these cannot be equal,
2652 but their addresses can be. */
2653 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2654 && (TREE_SIDE_EFFECTS (arg0)
2655 || TREE_SIDE_EFFECTS (arg1)))
2656 return 0;
2658 switch (TREE_CODE (arg0))
2660 case INDIRECT_REF:
2661 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2662 return OP_SAME (0);
2664 case REALPART_EXPR:
2665 case IMAGPART_EXPR:
2666 return OP_SAME (0);
2668 case TARGET_MEM_REF:
2669 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2670 /* Require equal extra operands and then fall through to MEM_REF
2671 handling of the two common operands. */
2672 if (!OP_SAME_WITH_NULL (2)
2673 || !OP_SAME_WITH_NULL (3)
2674 || !OP_SAME_WITH_NULL (4))
2675 return 0;
2676 /* Fallthru. */
2677 case MEM_REF:
2678 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2679 /* Require equal access sizes, and similar pointer types.
2680 We can have incomplete types for array references of
2681 variable-sized arrays from the Fortran frontend
2682 though. Also verify the types are compatible. */
2683 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2684 || (TYPE_SIZE (TREE_TYPE (arg0))
2685 && TYPE_SIZE (TREE_TYPE (arg1))
2686 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2687 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2688 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2689 && alias_ptr_types_compatible_p
2690 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2691 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2692 && OP_SAME (0) && OP_SAME (1));
2694 case ARRAY_REF:
2695 case ARRAY_RANGE_REF:
2696 /* Operands 2 and 3 may be null.
2697 Compare the array index by value if it is constant first as we
2698 may have different types but same value here. */
2699 if (!OP_SAME (0))
2700 return 0;
2701 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2702 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2703 TREE_OPERAND (arg1, 1))
2704 || OP_SAME (1))
2705 && OP_SAME_WITH_NULL (2)
2706 && OP_SAME_WITH_NULL (3));
2708 case COMPONENT_REF:
2709 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2710 may be NULL when we're called to compare MEM_EXPRs. */
2711 if (!OP_SAME_WITH_NULL (0)
2712 || !OP_SAME (1))
2713 return 0;
2714 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2715 return OP_SAME_WITH_NULL (2);
2717 case BIT_FIELD_REF:
2718 if (!OP_SAME (0))
2719 return 0;
2720 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2721 return OP_SAME (1) && OP_SAME (2);
2723 default:
2724 return 0;
2727 case tcc_expression:
2728 switch (TREE_CODE (arg0))
2730 case ADDR_EXPR:
2731 case TRUTH_NOT_EXPR:
2732 return OP_SAME (0);
2734 case TRUTH_ANDIF_EXPR:
2735 case TRUTH_ORIF_EXPR:
2736 return OP_SAME (0) && OP_SAME (1);
2738 case FMA_EXPR:
2739 case WIDEN_MULT_PLUS_EXPR:
2740 case WIDEN_MULT_MINUS_EXPR:
2741 if (!OP_SAME (2))
2742 return 0;
2743 /* The multiplcation operands are commutative. */
2744 /* FALLTHRU */
2746 case TRUTH_AND_EXPR:
2747 case TRUTH_OR_EXPR:
2748 case TRUTH_XOR_EXPR:
2749 if (OP_SAME (0) && OP_SAME (1))
2750 return 1;
2752 /* Otherwise take into account this is a commutative operation. */
2753 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2754 TREE_OPERAND (arg1, 1), flags)
2755 && operand_equal_p (TREE_OPERAND (arg0, 1),
2756 TREE_OPERAND (arg1, 0), flags));
2758 case COND_EXPR:
2759 case VEC_COND_EXPR:
2760 case DOT_PROD_EXPR:
2761 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2763 default:
2764 return 0;
2767 case tcc_vl_exp:
2768 switch (TREE_CODE (arg0))
2770 case CALL_EXPR:
2771 /* If the CALL_EXPRs call different functions, then they
2772 clearly can not be equal. */
2773 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2774 flags))
2775 return 0;
2778 unsigned int cef = call_expr_flags (arg0);
2779 if (flags & OEP_PURE_SAME)
2780 cef &= ECF_CONST | ECF_PURE;
2781 else
2782 cef &= ECF_CONST;
2783 if (!cef)
2784 return 0;
2787 /* Now see if all the arguments are the same. */
2789 const_call_expr_arg_iterator iter0, iter1;
2790 const_tree a0, a1;
2791 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2792 a1 = first_const_call_expr_arg (arg1, &iter1);
2793 a0 && a1;
2794 a0 = next_const_call_expr_arg (&iter0),
2795 a1 = next_const_call_expr_arg (&iter1))
2796 if (! operand_equal_p (a0, a1, flags))
2797 return 0;
2799 /* If we get here and both argument lists are exhausted
2800 then the CALL_EXPRs are equal. */
2801 return ! (a0 || a1);
2803 default:
2804 return 0;
2807 case tcc_declaration:
2808 /* Consider __builtin_sqrt equal to sqrt. */
2809 return (TREE_CODE (arg0) == FUNCTION_DECL
2810 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2811 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2812 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2814 default:
2815 return 0;
2818 #undef OP_SAME
2819 #undef OP_SAME_WITH_NULL
2822 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2823 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2825 When in doubt, return 0. */
2827 static int
2828 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2830 int unsignedp1, unsignedpo;
2831 tree primarg0, primarg1, primother;
2832 unsigned int correct_width;
2834 if (operand_equal_p (arg0, arg1, 0))
2835 return 1;
2837 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2838 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2839 return 0;
2841 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2842 and see if the inner values are the same. This removes any
2843 signedness comparison, which doesn't matter here. */
2844 primarg0 = arg0, primarg1 = arg1;
2845 STRIP_NOPS (primarg0);
2846 STRIP_NOPS (primarg1);
2847 if (operand_equal_p (primarg0, primarg1, 0))
2848 return 1;
2850 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2851 actual comparison operand, ARG0.
2853 First throw away any conversions to wider types
2854 already present in the operands. */
2856 primarg1 = get_narrower (arg1, &unsignedp1);
2857 primother = get_narrower (other, &unsignedpo);
2859 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2860 if (unsignedp1 == unsignedpo
2861 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2862 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2864 tree type = TREE_TYPE (arg0);
2866 /* Make sure shorter operand is extended the right way
2867 to match the longer operand. */
2868 primarg1 = fold_convert (signed_or_unsigned_type_for
2869 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2871 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2872 return 1;
2875 return 0;
2878 /* See if ARG is an expression that is either a comparison or is performing
2879 arithmetic on comparisons. The comparisons must only be comparing
2880 two different values, which will be stored in *CVAL1 and *CVAL2; if
2881 they are nonzero it means that some operands have already been found.
2882 No variables may be used anywhere else in the expression except in the
2883 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2884 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2886 If this is true, return 1. Otherwise, return zero. */
2888 static int
2889 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2891 enum tree_code code = TREE_CODE (arg);
2892 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2894 /* We can handle some of the tcc_expression cases here. */
2895 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2896 tclass = tcc_unary;
2897 else if (tclass == tcc_expression
2898 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2899 || code == COMPOUND_EXPR))
2900 tclass = tcc_binary;
2902 else if (tclass == tcc_expression && code == SAVE_EXPR
2903 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2905 /* If we've already found a CVAL1 or CVAL2, this expression is
2906 two complex to handle. */
2907 if (*cval1 || *cval2)
2908 return 0;
2910 tclass = tcc_unary;
2911 *save_p = 1;
2914 switch (tclass)
2916 case tcc_unary:
2917 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2919 case tcc_binary:
2920 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2921 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2922 cval1, cval2, save_p));
2924 case tcc_constant:
2925 return 1;
2927 case tcc_expression:
2928 if (code == COND_EXPR)
2929 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2930 cval1, cval2, save_p)
2931 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2932 cval1, cval2, save_p)
2933 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2934 cval1, cval2, save_p));
2935 return 0;
2937 case tcc_comparison:
2938 /* First see if we can handle the first operand, then the second. For
2939 the second operand, we know *CVAL1 can't be zero. It must be that
2940 one side of the comparison is each of the values; test for the
2941 case where this isn't true by failing if the two operands
2942 are the same. */
2944 if (operand_equal_p (TREE_OPERAND (arg, 0),
2945 TREE_OPERAND (arg, 1), 0))
2946 return 0;
2948 if (*cval1 == 0)
2949 *cval1 = TREE_OPERAND (arg, 0);
2950 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2952 else if (*cval2 == 0)
2953 *cval2 = TREE_OPERAND (arg, 0);
2954 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2956 else
2957 return 0;
2959 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2961 else if (*cval2 == 0)
2962 *cval2 = TREE_OPERAND (arg, 1);
2963 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2965 else
2966 return 0;
2968 return 1;
2970 default:
2971 return 0;
2975 /* ARG is a tree that is known to contain just arithmetic operations and
2976 comparisons. Evaluate the operations in the tree substituting NEW0 for
2977 any occurrence of OLD0 as an operand of a comparison and likewise for
2978 NEW1 and OLD1. */
2980 static tree
2981 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2982 tree old1, tree new1)
2984 tree type = TREE_TYPE (arg);
2985 enum tree_code code = TREE_CODE (arg);
2986 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2988 /* We can handle some of the tcc_expression cases here. */
2989 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2990 tclass = tcc_unary;
2991 else if (tclass == tcc_expression
2992 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2993 tclass = tcc_binary;
2995 switch (tclass)
2997 case tcc_unary:
2998 return fold_build1_loc (loc, code, type,
2999 eval_subst (loc, TREE_OPERAND (arg, 0),
3000 old0, new0, old1, new1));
3002 case tcc_binary:
3003 return fold_build2_loc (loc, code, type,
3004 eval_subst (loc, TREE_OPERAND (arg, 0),
3005 old0, new0, old1, new1),
3006 eval_subst (loc, TREE_OPERAND (arg, 1),
3007 old0, new0, old1, new1));
3009 case tcc_expression:
3010 switch (code)
3012 case SAVE_EXPR:
3013 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3014 old1, new1);
3016 case COMPOUND_EXPR:
3017 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3018 old1, new1);
3020 case COND_EXPR:
3021 return fold_build3_loc (loc, code, type,
3022 eval_subst (loc, TREE_OPERAND (arg, 0),
3023 old0, new0, old1, new1),
3024 eval_subst (loc, TREE_OPERAND (arg, 1),
3025 old0, new0, old1, new1),
3026 eval_subst (loc, TREE_OPERAND (arg, 2),
3027 old0, new0, old1, new1));
3028 default:
3029 break;
3031 /* Fall through - ??? */
3033 case tcc_comparison:
3035 tree arg0 = TREE_OPERAND (arg, 0);
3036 tree arg1 = TREE_OPERAND (arg, 1);
3038 /* We need to check both for exact equality and tree equality. The
3039 former will be true if the operand has a side-effect. In that
3040 case, we know the operand occurred exactly once. */
3042 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3043 arg0 = new0;
3044 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3045 arg0 = new1;
3047 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3048 arg1 = new0;
3049 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3050 arg1 = new1;
3052 return fold_build2_loc (loc, code, type, arg0, arg1);
3055 default:
3056 return arg;
3060 /* Return a tree for the case when the result of an expression is RESULT
3061 converted to TYPE and OMITTED was previously an operand of the expression
3062 but is now not needed (e.g., we folded OMITTED * 0).
3064 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3065 the conversion of RESULT to TYPE. */
3067 tree
3068 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3070 tree t = fold_convert_loc (loc, type, result);
3072 /* If the resulting operand is an empty statement, just return the omitted
3073 statement casted to void. */
3074 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3075 return build1_loc (loc, NOP_EXPR, void_type_node,
3076 fold_ignored_result (omitted));
3078 if (TREE_SIDE_EFFECTS (omitted))
3079 return build2_loc (loc, COMPOUND_EXPR, type,
3080 fold_ignored_result (omitted), t);
3082 return non_lvalue_loc (loc, t);
3085 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3087 static tree
3088 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3089 tree omitted)
3091 tree t = fold_convert_loc (loc, type, result);
3093 /* If the resulting operand is an empty statement, just return the omitted
3094 statement casted to void. */
3095 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3096 return build1_loc (loc, NOP_EXPR, void_type_node,
3097 fold_ignored_result (omitted));
3099 if (TREE_SIDE_EFFECTS (omitted))
3100 return build2_loc (loc, COMPOUND_EXPR, type,
3101 fold_ignored_result (omitted), t);
3103 return pedantic_non_lvalue_loc (loc, t);
3106 /* Return a tree for the case when the result of an expression is RESULT
3107 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3108 of the expression but are now not needed.
3110 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3111 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3112 evaluated before OMITTED2. Otherwise, if neither has side effects,
3113 just do the conversion of RESULT to TYPE. */
3115 tree
3116 omit_two_operands_loc (location_t loc, tree type, tree result,
3117 tree omitted1, tree omitted2)
3119 tree t = fold_convert_loc (loc, type, result);
3121 if (TREE_SIDE_EFFECTS (omitted2))
3122 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3123 if (TREE_SIDE_EFFECTS (omitted1))
3124 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3126 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3130 /* Return a simplified tree node for the truth-negation of ARG. This
3131 never alters ARG itself. We assume that ARG is an operation that
3132 returns a truth value (0 or 1).
3134 FIXME: one would think we would fold the result, but it causes
3135 problems with the dominator optimizer. */
3137 static tree
3138 fold_truth_not_expr (location_t loc, tree arg)
3140 tree type = TREE_TYPE (arg);
3141 enum tree_code code = TREE_CODE (arg);
3142 location_t loc1, loc2;
3144 /* If this is a comparison, we can simply invert it, except for
3145 floating-point non-equality comparisons, in which case we just
3146 enclose a TRUTH_NOT_EXPR around what we have. */
3148 if (TREE_CODE_CLASS (code) == tcc_comparison)
3150 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3151 if (FLOAT_TYPE_P (op_type)
3152 && flag_trapping_math
3153 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3154 && code != NE_EXPR && code != EQ_EXPR)
3155 return NULL_TREE;
3157 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3158 if (code == ERROR_MARK)
3159 return NULL_TREE;
3161 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3162 TREE_OPERAND (arg, 1));
3165 switch (code)
3167 case INTEGER_CST:
3168 return constant_boolean_node (integer_zerop (arg), type);
3170 case TRUTH_AND_EXPR:
3171 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3172 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3173 return build2_loc (loc, TRUTH_OR_EXPR, type,
3174 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3175 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3177 case TRUTH_OR_EXPR:
3178 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3179 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3180 return build2_loc (loc, TRUTH_AND_EXPR, type,
3181 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3182 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3184 case TRUTH_XOR_EXPR:
3185 /* Here we can invert either operand. We invert the first operand
3186 unless the second operand is a TRUTH_NOT_EXPR in which case our
3187 result is the XOR of the first operand with the inside of the
3188 negation of the second operand. */
3190 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3191 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3192 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3193 else
3194 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3195 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3196 TREE_OPERAND (arg, 1));
3198 case TRUTH_ANDIF_EXPR:
3199 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3200 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3201 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3202 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3203 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3205 case TRUTH_ORIF_EXPR:
3206 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3207 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3208 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3209 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3210 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3212 case TRUTH_NOT_EXPR:
3213 return TREE_OPERAND (arg, 0);
3215 case COND_EXPR:
3217 tree arg1 = TREE_OPERAND (arg, 1);
3218 tree arg2 = TREE_OPERAND (arg, 2);
3220 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3221 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3223 /* A COND_EXPR may have a throw as one operand, which
3224 then has void type. Just leave void operands
3225 as they are. */
3226 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3227 VOID_TYPE_P (TREE_TYPE (arg1))
3228 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3229 VOID_TYPE_P (TREE_TYPE (arg2))
3230 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3233 case COMPOUND_EXPR:
3234 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3235 return build2_loc (loc, COMPOUND_EXPR, type,
3236 TREE_OPERAND (arg, 0),
3237 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3239 case NON_LVALUE_EXPR:
3240 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3241 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3243 CASE_CONVERT:
3244 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3245 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3247 /* ... fall through ... */
3249 case FLOAT_EXPR:
3250 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3251 return build1_loc (loc, TREE_CODE (arg), type,
3252 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3254 case BIT_AND_EXPR:
3255 if (!integer_onep (TREE_OPERAND (arg, 1)))
3256 return NULL_TREE;
3257 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3259 case SAVE_EXPR:
3260 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3262 case CLEANUP_POINT_EXPR:
3263 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3264 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3265 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3267 default:
3268 return NULL_TREE;
3272 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3273 assume that ARG is an operation that returns a truth value (0 or 1
3274 for scalars, 0 or -1 for vectors). Return the folded expression if
3275 folding is successful. Otherwise, return NULL_TREE. */
3277 static tree
3278 fold_invert_truthvalue (location_t loc, tree arg)
3280 tree type = TREE_TYPE (arg);
3281 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3282 ? BIT_NOT_EXPR
3283 : TRUTH_NOT_EXPR,
3284 type, arg);
3287 /* Return a simplified tree node for the truth-negation of ARG. This
3288 never alters ARG itself. We assume that ARG is an operation that
3289 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3291 tree
3292 invert_truthvalue_loc (location_t loc, tree arg)
3294 if (TREE_CODE (arg) == ERROR_MARK)
3295 return arg;
3297 tree type = TREE_TYPE (arg);
3298 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3299 ? BIT_NOT_EXPR
3300 : TRUTH_NOT_EXPR,
3301 type, arg);
3304 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3305 operands are another bit-wise operation with a common input. If so,
3306 distribute the bit operations to save an operation and possibly two if
3307 constants are involved. For example, convert
3308 (A | B) & (A | C) into A | (B & C)
3309 Further simplification will occur if B and C are constants.
3311 If this optimization cannot be done, 0 will be returned. */
3313 static tree
3314 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3315 tree arg0, tree arg1)
3317 tree common;
3318 tree left, right;
3320 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3321 || TREE_CODE (arg0) == code
3322 || (TREE_CODE (arg0) != BIT_AND_EXPR
3323 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3324 return 0;
3326 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3328 common = TREE_OPERAND (arg0, 0);
3329 left = TREE_OPERAND (arg0, 1);
3330 right = TREE_OPERAND (arg1, 1);
3332 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3334 common = TREE_OPERAND (arg0, 0);
3335 left = TREE_OPERAND (arg0, 1);
3336 right = TREE_OPERAND (arg1, 0);
3338 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3340 common = TREE_OPERAND (arg0, 1);
3341 left = TREE_OPERAND (arg0, 0);
3342 right = TREE_OPERAND (arg1, 1);
3344 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3346 common = TREE_OPERAND (arg0, 1);
3347 left = TREE_OPERAND (arg0, 0);
3348 right = TREE_OPERAND (arg1, 0);
3350 else
3351 return 0;
3353 common = fold_convert_loc (loc, type, common);
3354 left = fold_convert_loc (loc, type, left);
3355 right = fold_convert_loc (loc, type, right);
3356 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3357 fold_build2_loc (loc, code, type, left, right));
3360 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3361 with code CODE. This optimization is unsafe. */
3362 static tree
3363 distribute_real_division (location_t loc, enum tree_code code, tree type,
3364 tree arg0, tree arg1)
3366 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3367 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3369 /* (A / C) +- (B / C) -> (A +- B) / C. */
3370 if (mul0 == mul1
3371 && operand_equal_p (TREE_OPERAND (arg0, 1),
3372 TREE_OPERAND (arg1, 1), 0))
3373 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3374 fold_build2_loc (loc, code, type,
3375 TREE_OPERAND (arg0, 0),
3376 TREE_OPERAND (arg1, 0)),
3377 TREE_OPERAND (arg0, 1));
3379 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3380 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3381 TREE_OPERAND (arg1, 0), 0)
3382 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3383 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3385 REAL_VALUE_TYPE r0, r1;
3386 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3387 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3388 if (!mul0)
3389 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3390 if (!mul1)
3391 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3392 real_arithmetic (&r0, code, &r0, &r1);
3393 return fold_build2_loc (loc, MULT_EXPR, type,
3394 TREE_OPERAND (arg0, 0),
3395 build_real (type, r0));
3398 return NULL_TREE;
3401 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3402 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3403 and uses reverse storage order if REVERSEP is nonzero. */
3405 static tree
3406 make_bit_field_ref (location_t loc, tree inner, tree type,
3407 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3408 int unsignedp, int reversep)
3410 tree result, bftype;
3412 if (bitpos == 0 && !reversep)
3414 tree size = TYPE_SIZE (TREE_TYPE (inner));
3415 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3416 || POINTER_TYPE_P (TREE_TYPE (inner)))
3417 && tree_fits_shwi_p (size)
3418 && tree_to_shwi (size) == bitsize)
3419 return fold_convert_loc (loc, type, inner);
3422 bftype = type;
3423 if (TYPE_PRECISION (bftype) != bitsize
3424 || TYPE_UNSIGNED (bftype) == !unsignedp)
3425 bftype = build_nonstandard_integer_type (bitsize, 0);
3427 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3428 size_int (bitsize), bitsize_int (bitpos));
3429 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3431 if (bftype != type)
3432 result = fold_convert_loc (loc, type, result);
3434 return result;
3437 /* Optimize a bit-field compare.
3439 There are two cases: First is a compare against a constant and the
3440 second is a comparison of two items where the fields are at the same
3441 bit position relative to the start of a chunk (byte, halfword, word)
3442 large enough to contain it. In these cases we can avoid the shift
3443 implicit in bitfield extractions.
3445 For constants, we emit a compare of the shifted constant with the
3446 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3447 compared. For two fields at the same position, we do the ANDs with the
3448 similar mask and compare the result of the ANDs.
3450 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3451 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3452 are the left and right operands of the comparison, respectively.
3454 If the optimization described above can be done, we return the resulting
3455 tree. Otherwise we return zero. */
3457 static tree
3458 optimize_bit_field_compare (location_t loc, enum tree_code code,
3459 tree compare_type, tree lhs, tree rhs)
3461 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3462 tree type = TREE_TYPE (lhs);
3463 tree unsigned_type;
3464 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3465 machine_mode lmode, rmode, nmode;
3466 int lunsignedp, runsignedp;
3467 int lreversep, rreversep;
3468 int lvolatilep = 0, rvolatilep = 0;
3469 tree linner, rinner = NULL_TREE;
3470 tree mask;
3471 tree offset;
3473 /* Get all the information about the extractions being done. If the bit size
3474 if the same as the size of the underlying object, we aren't doing an
3475 extraction at all and so can do nothing. We also don't want to
3476 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3477 then will no longer be able to replace it. */
3478 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3479 &lunsignedp, &lreversep, &lvolatilep, false);
3480 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3481 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3482 return 0;
3484 if (const_p)
3485 rreversep = lreversep;
3486 else
3488 /* If this is not a constant, we can only do something if bit positions,
3489 sizes, signedness and storage order are the same. */
3490 rinner
3491 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3492 &runsignedp, &rreversep, &rvolatilep, false);
3494 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3495 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3496 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3497 return 0;
3500 /* See if we can find a mode to refer to this field. We should be able to,
3501 but fail if we can't. */
3502 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3503 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3504 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3505 TYPE_ALIGN (TREE_TYPE (rinner))),
3506 word_mode, false);
3507 if (nmode == VOIDmode)
3508 return 0;
3510 /* Set signed and unsigned types of the precision of this mode for the
3511 shifts below. */
3512 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3514 /* Compute the bit position and size for the new reference and our offset
3515 within it. If the new reference is the same size as the original, we
3516 won't optimize anything, so return zero. */
3517 nbitsize = GET_MODE_BITSIZE (nmode);
3518 nbitpos = lbitpos & ~ (nbitsize - 1);
3519 lbitpos -= nbitpos;
3520 if (nbitsize == lbitsize)
3521 return 0;
3523 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3524 lbitpos = nbitsize - lbitsize - lbitpos;
3526 /* Make the mask to be used against the extracted field. */
3527 mask = build_int_cst_type (unsigned_type, -1);
3528 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3529 mask = const_binop (RSHIFT_EXPR, mask,
3530 size_int (nbitsize - lbitsize - lbitpos));
3532 if (! const_p)
3533 /* If not comparing with constant, just rework the comparison
3534 and return. */
3535 return fold_build2_loc (loc, code, compare_type,
3536 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3537 make_bit_field_ref (loc, linner,
3538 unsigned_type,
3539 nbitsize, nbitpos,
3540 1, lreversep),
3541 mask),
3542 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3543 make_bit_field_ref (loc, rinner,
3544 unsigned_type,
3545 nbitsize, nbitpos,
3546 1, rreversep),
3547 mask));
3549 /* Otherwise, we are handling the constant case. See if the constant is too
3550 big for the field. Warn and return a tree for 0 (false) if so. We do
3551 this not only for its own sake, but to avoid having to test for this
3552 error case below. If we didn't, we might generate wrong code.
3554 For unsigned fields, the constant shifted right by the field length should
3555 be all zero. For signed fields, the high-order bits should agree with
3556 the sign bit. */
3558 if (lunsignedp)
3560 if (wi::lrshift (rhs, lbitsize) != 0)
3562 warning (0, "comparison is always %d due to width of bit-field",
3563 code == NE_EXPR);
3564 return constant_boolean_node (code == NE_EXPR, compare_type);
3567 else
3569 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3570 if (tem != 0 && tem != -1)
3572 warning (0, "comparison is always %d due to width of bit-field",
3573 code == NE_EXPR);
3574 return constant_boolean_node (code == NE_EXPR, compare_type);
3578 /* Single-bit compares should always be against zero. */
3579 if (lbitsize == 1 && ! integer_zerop (rhs))
3581 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3582 rhs = build_int_cst (type, 0);
3585 /* Make a new bitfield reference, shift the constant over the
3586 appropriate number of bits and mask it with the computed mask
3587 (in case this was a signed field). If we changed it, make a new one. */
3588 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1,
3589 lreversep);
3591 rhs = const_binop (BIT_AND_EXPR,
3592 const_binop (LSHIFT_EXPR,
3593 fold_convert_loc (loc, unsigned_type, rhs),
3594 size_int (lbitpos)),
3595 mask);
3597 lhs = build2_loc (loc, code, compare_type,
3598 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3599 return lhs;
3602 /* Subroutine for fold_truth_andor_1: decode a field reference.
3604 If EXP is a comparison reference, we return the innermost reference.
3606 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3607 set to the starting bit number.
3609 If the innermost field can be completely contained in a mode-sized
3610 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3612 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3613 otherwise it is not changed.
3615 *PUNSIGNEDP is set to the signedness of the field.
3617 *PREVERSEP is set to the storage order of the field.
3619 *PMASK is set to the mask used. This is either contained in a
3620 BIT_AND_EXPR or derived from the width of the field.
3622 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3624 Return 0 if this is not a component reference or is one that we can't
3625 do anything with. */
3627 static tree
3628 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3629 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3630 int *punsignedp, int *preversep, int *pvolatilep,
3631 tree *pmask, tree *pand_mask)
3633 tree outer_type = 0;
3634 tree and_mask = 0;
3635 tree mask, inner, offset;
3636 tree unsigned_type;
3637 unsigned int precision;
3639 /* All the optimizations using this function assume integer fields.
3640 There are problems with FP fields since the type_for_size call
3641 below can fail for, e.g., XFmode. */
3642 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3643 return 0;
3645 /* We are interested in the bare arrangement of bits, so strip everything
3646 that doesn't affect the machine mode. However, record the type of the
3647 outermost expression if it may matter below. */
3648 if (CONVERT_EXPR_P (exp)
3649 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3650 outer_type = TREE_TYPE (exp);
3651 STRIP_NOPS (exp);
3653 if (TREE_CODE (exp) == BIT_AND_EXPR)
3655 and_mask = TREE_OPERAND (exp, 1);
3656 exp = TREE_OPERAND (exp, 0);
3657 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3658 if (TREE_CODE (and_mask) != INTEGER_CST)
3659 return 0;
3662 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3663 punsignedp, preversep, pvolatilep, false);
3664 if ((inner == exp && and_mask == 0)
3665 || *pbitsize < 0 || offset != 0
3666 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3667 return 0;
3669 /* If the number of bits in the reference is the same as the bitsize of
3670 the outer type, then the outer type gives the signedness. Otherwise
3671 (in case of a small bitfield) the signedness is unchanged. */
3672 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3673 *punsignedp = TYPE_UNSIGNED (outer_type);
3675 /* Compute the mask to access the bitfield. */
3676 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3677 precision = TYPE_PRECISION (unsigned_type);
3679 mask = build_int_cst_type (unsigned_type, -1);
3681 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3682 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3684 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3685 if (and_mask != 0)
3686 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3687 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3689 *pmask = mask;
3690 *pand_mask = and_mask;
3691 return inner;
3694 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3695 bit positions and MASK is SIGNED. */
3697 static int
3698 all_ones_mask_p (const_tree mask, unsigned int size)
3700 tree type = TREE_TYPE (mask);
3701 unsigned int precision = TYPE_PRECISION (type);
3703 /* If this function returns true when the type of the mask is
3704 UNSIGNED, then there will be errors. In particular see
3705 gcc.c-torture/execute/990326-1.c. There does not appear to be
3706 any documentation paper trail as to why this is so. But the pre
3707 wide-int worked with that restriction and it has been preserved
3708 here. */
3709 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3710 return false;
3712 return wi::mask (size, false, precision) == mask;
3715 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3716 represents the sign bit of EXP's type. If EXP represents a sign
3717 or zero extension, also test VAL against the unextended type.
3718 The return value is the (sub)expression whose sign bit is VAL,
3719 or NULL_TREE otherwise. */
3721 static tree
3722 sign_bit_p (tree exp, const_tree val)
3724 int width;
3725 tree t;
3727 /* Tree EXP must have an integral type. */
3728 t = TREE_TYPE (exp);
3729 if (! INTEGRAL_TYPE_P (t))
3730 return NULL_TREE;
3732 /* Tree VAL must be an integer constant. */
3733 if (TREE_CODE (val) != INTEGER_CST
3734 || TREE_OVERFLOW (val))
3735 return NULL_TREE;
3737 width = TYPE_PRECISION (t);
3738 if (wi::only_sign_bit_p (val, width))
3739 return exp;
3741 /* Handle extension from a narrower type. */
3742 if (TREE_CODE (exp) == NOP_EXPR
3743 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3744 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3746 return NULL_TREE;
3749 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3750 to be evaluated unconditionally. */
3752 static int
3753 simple_operand_p (const_tree exp)
3755 /* Strip any conversions that don't change the machine mode. */
3756 STRIP_NOPS (exp);
3758 return (CONSTANT_CLASS_P (exp)
3759 || TREE_CODE (exp) == SSA_NAME
3760 || (DECL_P (exp)
3761 && ! TREE_ADDRESSABLE (exp)
3762 && ! TREE_THIS_VOLATILE (exp)
3763 && ! DECL_NONLOCAL (exp)
3764 /* Don't regard global variables as simple. They may be
3765 allocated in ways unknown to the compiler (shared memory,
3766 #pragma weak, etc). */
3767 && ! TREE_PUBLIC (exp)
3768 && ! DECL_EXTERNAL (exp)
3769 /* Weakrefs are not safe to be read, since they can be NULL.
3770 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3771 have DECL_WEAK flag set. */
3772 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3773 /* Loading a static variable is unduly expensive, but global
3774 registers aren't expensive. */
3775 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3778 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3779 to be evaluated unconditionally.
3780 I addition to simple_operand_p, we assume that comparisons, conversions,
3781 and logic-not operations are simple, if their operands are simple, too. */
3783 static bool
3784 simple_operand_p_2 (tree exp)
3786 enum tree_code code;
3788 if (TREE_SIDE_EFFECTS (exp)
3789 || tree_could_trap_p (exp))
3790 return false;
3792 while (CONVERT_EXPR_P (exp))
3793 exp = TREE_OPERAND (exp, 0);
3795 code = TREE_CODE (exp);
3797 if (TREE_CODE_CLASS (code) == tcc_comparison)
3798 return (simple_operand_p (TREE_OPERAND (exp, 0))
3799 && simple_operand_p (TREE_OPERAND (exp, 1)));
3801 if (code == TRUTH_NOT_EXPR)
3802 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3804 return simple_operand_p (exp);
3808 /* The following functions are subroutines to fold_range_test and allow it to
3809 try to change a logical combination of comparisons into a range test.
3811 For example, both
3812 X == 2 || X == 3 || X == 4 || X == 5
3814 X >= 2 && X <= 5
3815 are converted to
3816 (unsigned) (X - 2) <= 3
3818 We describe each set of comparisons as being either inside or outside
3819 a range, using a variable named like IN_P, and then describe the
3820 range with a lower and upper bound. If one of the bounds is omitted,
3821 it represents either the highest or lowest value of the type.
3823 In the comments below, we represent a range by two numbers in brackets
3824 preceded by a "+" to designate being inside that range, or a "-" to
3825 designate being outside that range, so the condition can be inverted by
3826 flipping the prefix. An omitted bound is represented by a "-". For
3827 example, "- [-, 10]" means being outside the range starting at the lowest
3828 possible value and ending at 10, in other words, being greater than 10.
3829 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3830 always false.
3832 We set up things so that the missing bounds are handled in a consistent
3833 manner so neither a missing bound nor "true" and "false" need to be
3834 handled using a special case. */
3836 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3837 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3838 and UPPER1_P are nonzero if the respective argument is an upper bound
3839 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3840 must be specified for a comparison. ARG1 will be converted to ARG0's
3841 type if both are specified. */
3843 static tree
3844 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3845 tree arg1, int upper1_p)
3847 tree tem;
3848 int result;
3849 int sgn0, sgn1;
3851 /* If neither arg represents infinity, do the normal operation.
3852 Else, if not a comparison, return infinity. Else handle the special
3853 comparison rules. Note that most of the cases below won't occur, but
3854 are handled for consistency. */
3856 if (arg0 != 0 && arg1 != 0)
3858 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3859 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3860 STRIP_NOPS (tem);
3861 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3864 if (TREE_CODE_CLASS (code) != tcc_comparison)
3865 return 0;
3867 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3868 for neither. In real maths, we cannot assume open ended ranges are
3869 the same. But, this is computer arithmetic, where numbers are finite.
3870 We can therefore make the transformation of any unbounded range with
3871 the value Z, Z being greater than any representable number. This permits
3872 us to treat unbounded ranges as equal. */
3873 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3874 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3875 switch (code)
3877 case EQ_EXPR:
3878 result = sgn0 == sgn1;
3879 break;
3880 case NE_EXPR:
3881 result = sgn0 != sgn1;
3882 break;
3883 case LT_EXPR:
3884 result = sgn0 < sgn1;
3885 break;
3886 case LE_EXPR:
3887 result = sgn0 <= sgn1;
3888 break;
3889 case GT_EXPR:
3890 result = sgn0 > sgn1;
3891 break;
3892 case GE_EXPR:
3893 result = sgn0 >= sgn1;
3894 break;
3895 default:
3896 gcc_unreachable ();
3899 return constant_boolean_node (result, type);
3902 /* Helper routine for make_range. Perform one step for it, return
3903 new expression if the loop should continue or NULL_TREE if it should
3904 stop. */
3906 tree
3907 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3908 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3909 bool *strict_overflow_p)
3911 tree arg0_type = TREE_TYPE (arg0);
3912 tree n_low, n_high, low = *p_low, high = *p_high;
3913 int in_p = *p_in_p, n_in_p;
3915 switch (code)
3917 case TRUTH_NOT_EXPR:
3918 /* We can only do something if the range is testing for zero. */
3919 if (low == NULL_TREE || high == NULL_TREE
3920 || ! integer_zerop (low) || ! integer_zerop (high))
3921 return NULL_TREE;
3922 *p_in_p = ! in_p;
3923 return arg0;
3925 case EQ_EXPR: case NE_EXPR:
3926 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3927 /* We can only do something if the range is testing for zero
3928 and if the second operand is an integer constant. Note that
3929 saying something is "in" the range we make is done by
3930 complementing IN_P since it will set in the initial case of
3931 being not equal to zero; "out" is leaving it alone. */
3932 if (low == NULL_TREE || high == NULL_TREE
3933 || ! integer_zerop (low) || ! integer_zerop (high)
3934 || TREE_CODE (arg1) != INTEGER_CST)
3935 return NULL_TREE;
3937 switch (code)
3939 case NE_EXPR: /* - [c, c] */
3940 low = high = arg1;
3941 break;
3942 case EQ_EXPR: /* + [c, c] */
3943 in_p = ! in_p, low = high = arg1;
3944 break;
3945 case GT_EXPR: /* - [-, c] */
3946 low = 0, high = arg1;
3947 break;
3948 case GE_EXPR: /* + [c, -] */
3949 in_p = ! in_p, low = arg1, high = 0;
3950 break;
3951 case LT_EXPR: /* - [c, -] */
3952 low = arg1, high = 0;
3953 break;
3954 case LE_EXPR: /* + [-, c] */
3955 in_p = ! in_p, low = 0, high = arg1;
3956 break;
3957 default:
3958 gcc_unreachable ();
3961 /* If this is an unsigned comparison, we also know that EXP is
3962 greater than or equal to zero. We base the range tests we make
3963 on that fact, so we record it here so we can parse existing
3964 range tests. We test arg0_type since often the return type
3965 of, e.g. EQ_EXPR, is boolean. */
3966 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3968 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3969 in_p, low, high, 1,
3970 build_int_cst (arg0_type, 0),
3971 NULL_TREE))
3972 return NULL_TREE;
3974 in_p = n_in_p, low = n_low, high = n_high;
3976 /* If the high bound is missing, but we have a nonzero low
3977 bound, reverse the range so it goes from zero to the low bound
3978 minus 1. */
3979 if (high == 0 && low && ! integer_zerop (low))
3981 in_p = ! in_p;
3982 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3983 build_int_cst (TREE_TYPE (low), 1), 0);
3984 low = build_int_cst (arg0_type, 0);
3988 *p_low = low;
3989 *p_high = high;
3990 *p_in_p = in_p;
3991 return arg0;
3993 case NEGATE_EXPR:
3994 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3995 low and high are non-NULL, then normalize will DTRT. */
3996 if (!TYPE_UNSIGNED (arg0_type)
3997 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3999 if (low == NULL_TREE)
4000 low = TYPE_MIN_VALUE (arg0_type);
4001 if (high == NULL_TREE)
4002 high = TYPE_MAX_VALUE (arg0_type);
4005 /* (-x) IN [a,b] -> x in [-b, -a] */
4006 n_low = range_binop (MINUS_EXPR, exp_type,
4007 build_int_cst (exp_type, 0),
4008 0, high, 1);
4009 n_high = range_binop (MINUS_EXPR, exp_type,
4010 build_int_cst (exp_type, 0),
4011 0, low, 0);
4012 if (n_high != 0 && TREE_OVERFLOW (n_high))
4013 return NULL_TREE;
4014 goto normalize;
4016 case BIT_NOT_EXPR:
4017 /* ~ X -> -X - 1 */
4018 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4019 build_int_cst (exp_type, 1));
4021 case PLUS_EXPR:
4022 case MINUS_EXPR:
4023 if (TREE_CODE (arg1) != INTEGER_CST)
4024 return NULL_TREE;
4026 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4027 move a constant to the other side. */
4028 if (!TYPE_UNSIGNED (arg0_type)
4029 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4030 return NULL_TREE;
4032 /* If EXP is signed, any overflow in the computation is undefined,
4033 so we don't worry about it so long as our computations on
4034 the bounds don't overflow. For unsigned, overflow is defined
4035 and this is exactly the right thing. */
4036 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4037 arg0_type, low, 0, arg1, 0);
4038 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4039 arg0_type, high, 1, arg1, 0);
4040 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4041 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4042 return NULL_TREE;
4044 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4045 *strict_overflow_p = true;
4047 normalize:
4048 /* Check for an unsigned range which has wrapped around the maximum
4049 value thus making n_high < n_low, and normalize it. */
4050 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4052 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4053 build_int_cst (TREE_TYPE (n_high), 1), 0);
4054 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4055 build_int_cst (TREE_TYPE (n_low), 1), 0);
4057 /* If the range is of the form +/- [ x+1, x ], we won't
4058 be able to normalize it. But then, it represents the
4059 whole range or the empty set, so make it
4060 +/- [ -, - ]. */
4061 if (tree_int_cst_equal (n_low, low)
4062 && tree_int_cst_equal (n_high, high))
4063 low = high = 0;
4064 else
4065 in_p = ! in_p;
4067 else
4068 low = n_low, high = n_high;
4070 *p_low = low;
4071 *p_high = high;
4072 *p_in_p = in_p;
4073 return arg0;
4075 CASE_CONVERT:
4076 case NON_LVALUE_EXPR:
4077 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4078 return NULL_TREE;
4080 if (! INTEGRAL_TYPE_P (arg0_type)
4081 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4082 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4083 return NULL_TREE;
4085 n_low = low, n_high = high;
4087 if (n_low != 0)
4088 n_low = fold_convert_loc (loc, arg0_type, n_low);
4090 if (n_high != 0)
4091 n_high = fold_convert_loc (loc, arg0_type, n_high);
4093 /* If we're converting arg0 from an unsigned type, to exp,
4094 a signed type, we will be doing the comparison as unsigned.
4095 The tests above have already verified that LOW and HIGH
4096 are both positive.
4098 So we have to ensure that we will handle large unsigned
4099 values the same way that the current signed bounds treat
4100 negative values. */
4102 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4104 tree high_positive;
4105 tree equiv_type;
4106 /* For fixed-point modes, we need to pass the saturating flag
4107 as the 2nd parameter. */
4108 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4109 equiv_type
4110 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4111 TYPE_SATURATING (arg0_type));
4112 else
4113 equiv_type
4114 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4116 /* A range without an upper bound is, naturally, unbounded.
4117 Since convert would have cropped a very large value, use
4118 the max value for the destination type. */
4119 high_positive
4120 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4121 : TYPE_MAX_VALUE (arg0_type);
4123 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4124 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4125 fold_convert_loc (loc, arg0_type,
4126 high_positive),
4127 build_int_cst (arg0_type, 1));
4129 /* If the low bound is specified, "and" the range with the
4130 range for which the original unsigned value will be
4131 positive. */
4132 if (low != 0)
4134 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4135 1, fold_convert_loc (loc, arg0_type,
4136 integer_zero_node),
4137 high_positive))
4138 return NULL_TREE;
4140 in_p = (n_in_p == in_p);
4142 else
4144 /* Otherwise, "or" the range with the range of the input
4145 that will be interpreted as negative. */
4146 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4147 1, fold_convert_loc (loc, arg0_type,
4148 integer_zero_node),
4149 high_positive))
4150 return NULL_TREE;
4152 in_p = (in_p != n_in_p);
4156 *p_low = n_low;
4157 *p_high = n_high;
4158 *p_in_p = in_p;
4159 return arg0;
4161 default:
4162 return NULL_TREE;
4166 /* Given EXP, a logical expression, set the range it is testing into
4167 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4168 actually being tested. *PLOW and *PHIGH will be made of the same
4169 type as the returned expression. If EXP is not a comparison, we
4170 will most likely not be returning a useful value and range. Set
4171 *STRICT_OVERFLOW_P to true if the return value is only valid
4172 because signed overflow is undefined; otherwise, do not change
4173 *STRICT_OVERFLOW_P. */
4175 tree
4176 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4177 bool *strict_overflow_p)
4179 enum tree_code code;
4180 tree arg0, arg1 = NULL_TREE;
4181 tree exp_type, nexp;
4182 int in_p;
4183 tree low, high;
4184 location_t loc = EXPR_LOCATION (exp);
4186 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4187 and see if we can refine the range. Some of the cases below may not
4188 happen, but it doesn't seem worth worrying about this. We "continue"
4189 the outer loop when we've changed something; otherwise we "break"
4190 the switch, which will "break" the while. */
4192 in_p = 0;
4193 low = high = build_int_cst (TREE_TYPE (exp), 0);
4195 while (1)
4197 code = TREE_CODE (exp);
4198 exp_type = TREE_TYPE (exp);
4199 arg0 = NULL_TREE;
4201 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4203 if (TREE_OPERAND_LENGTH (exp) > 0)
4204 arg0 = TREE_OPERAND (exp, 0);
4205 if (TREE_CODE_CLASS (code) == tcc_binary
4206 || TREE_CODE_CLASS (code) == tcc_comparison
4207 || (TREE_CODE_CLASS (code) == tcc_expression
4208 && TREE_OPERAND_LENGTH (exp) > 1))
4209 arg1 = TREE_OPERAND (exp, 1);
4211 if (arg0 == NULL_TREE)
4212 break;
4214 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4215 &high, &in_p, strict_overflow_p);
4216 if (nexp == NULL_TREE)
4217 break;
4218 exp = nexp;
4221 /* If EXP is a constant, we can evaluate whether this is true or false. */
4222 if (TREE_CODE (exp) == INTEGER_CST)
4224 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4225 exp, 0, low, 0))
4226 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4227 exp, 1, high, 1)));
4228 low = high = 0;
4229 exp = 0;
4232 *pin_p = in_p, *plow = low, *phigh = high;
4233 return exp;
4236 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4237 type, TYPE, return an expression to test if EXP is in (or out of, depending
4238 on IN_P) the range. Return 0 if the test couldn't be created. */
4240 tree
4241 build_range_check (location_t loc, tree type, tree exp, int in_p,
4242 tree low, tree high)
4244 tree etype = TREE_TYPE (exp), value;
4246 #ifdef HAVE_canonicalize_funcptr_for_compare
4247 /* Disable this optimization for function pointer expressions
4248 on targets that require function pointer canonicalization. */
4249 if (HAVE_canonicalize_funcptr_for_compare
4250 && TREE_CODE (etype) == POINTER_TYPE
4251 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4252 return NULL_TREE;
4253 #endif
4255 if (! in_p)
4257 value = build_range_check (loc, type, exp, 1, low, high);
4258 if (value != 0)
4259 return invert_truthvalue_loc (loc, value);
4261 return 0;
4264 if (low == 0 && high == 0)
4265 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4267 if (low == 0)
4268 return fold_build2_loc (loc, LE_EXPR, type, exp,
4269 fold_convert_loc (loc, etype, high));
4271 if (high == 0)
4272 return fold_build2_loc (loc, GE_EXPR, type, exp,
4273 fold_convert_loc (loc, etype, low));
4275 if (operand_equal_p (low, high, 0))
4276 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4277 fold_convert_loc (loc, etype, low));
4279 if (integer_zerop (low))
4281 if (! TYPE_UNSIGNED (etype))
4283 etype = unsigned_type_for (etype);
4284 high = fold_convert_loc (loc, etype, high);
4285 exp = fold_convert_loc (loc, etype, exp);
4287 return build_range_check (loc, type, exp, 1, 0, high);
4290 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4291 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4293 int prec = TYPE_PRECISION (etype);
4295 if (wi::mask (prec - 1, false, prec) == high)
4297 if (TYPE_UNSIGNED (etype))
4299 tree signed_etype = signed_type_for (etype);
4300 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4301 etype
4302 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4303 else
4304 etype = signed_etype;
4305 exp = fold_convert_loc (loc, etype, exp);
4307 return fold_build2_loc (loc, GT_EXPR, type, exp,
4308 build_int_cst (etype, 0));
4312 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4313 This requires wrap-around arithmetics for the type of the expression.
4314 First make sure that arithmetics in this type is valid, then make sure
4315 that it wraps around. */
4316 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4317 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4318 TYPE_UNSIGNED (etype));
4320 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4322 tree utype, minv, maxv;
4324 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4325 for the type in question, as we rely on this here. */
4326 utype = unsigned_type_for (etype);
4327 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4328 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4329 build_int_cst (TREE_TYPE (maxv), 1), 1);
4330 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4332 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4333 minv, 1, maxv, 1)))
4334 etype = utype;
4335 else
4336 return 0;
4339 high = fold_convert_loc (loc, etype, high);
4340 low = fold_convert_loc (loc, etype, low);
4341 exp = fold_convert_loc (loc, etype, exp);
4343 value = const_binop (MINUS_EXPR, high, low);
4346 if (POINTER_TYPE_P (etype))
4348 if (value != 0 && !TREE_OVERFLOW (value))
4350 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4351 return build_range_check (loc, type,
4352 fold_build_pointer_plus_loc (loc, exp, low),
4353 1, build_int_cst (etype, 0), value);
4355 return 0;
4358 if (value != 0 && !TREE_OVERFLOW (value))
4359 return build_range_check (loc, type,
4360 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4361 1, build_int_cst (etype, 0), value);
4363 return 0;
4366 /* Return the predecessor of VAL in its type, handling the infinite case. */
4368 static tree
4369 range_predecessor (tree val)
4371 tree type = TREE_TYPE (val);
4373 if (INTEGRAL_TYPE_P (type)
4374 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4375 return 0;
4376 else
4377 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4378 build_int_cst (TREE_TYPE (val), 1), 0);
4381 /* Return the successor of VAL in its type, handling the infinite case. */
4383 static tree
4384 range_successor (tree val)
4386 tree type = TREE_TYPE (val);
4388 if (INTEGRAL_TYPE_P (type)
4389 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4390 return 0;
4391 else
4392 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4393 build_int_cst (TREE_TYPE (val), 1), 0);
4396 /* Given two ranges, see if we can merge them into one. Return 1 if we
4397 can, 0 if we can't. Set the output range into the specified parameters. */
4399 bool
4400 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4401 tree high0, int in1_p, tree low1, tree high1)
4403 int no_overlap;
4404 int subset;
4405 int temp;
4406 tree tem;
4407 int in_p;
4408 tree low, high;
4409 int lowequal = ((low0 == 0 && low1 == 0)
4410 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4411 low0, 0, low1, 0)));
4412 int highequal = ((high0 == 0 && high1 == 0)
4413 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4414 high0, 1, high1, 1)));
4416 /* Make range 0 be the range that starts first, or ends last if they
4417 start at the same value. Swap them if it isn't. */
4418 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4419 low0, 0, low1, 0))
4420 || (lowequal
4421 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4422 high1, 1, high0, 1))))
4424 temp = in0_p, in0_p = in1_p, in1_p = temp;
4425 tem = low0, low0 = low1, low1 = tem;
4426 tem = high0, high0 = high1, high1 = tem;
4429 /* Now flag two cases, whether the ranges are disjoint or whether the
4430 second range is totally subsumed in the first. Note that the tests
4431 below are simplified by the ones above. */
4432 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4433 high0, 1, low1, 0));
4434 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4435 high1, 1, high0, 1));
4437 /* We now have four cases, depending on whether we are including or
4438 excluding the two ranges. */
4439 if (in0_p && in1_p)
4441 /* If they don't overlap, the result is false. If the second range
4442 is a subset it is the result. Otherwise, the range is from the start
4443 of the second to the end of the first. */
4444 if (no_overlap)
4445 in_p = 0, low = high = 0;
4446 else if (subset)
4447 in_p = 1, low = low1, high = high1;
4448 else
4449 in_p = 1, low = low1, high = high0;
4452 else if (in0_p && ! in1_p)
4454 /* If they don't overlap, the result is the first range. If they are
4455 equal, the result is false. If the second range is a subset of the
4456 first, and the ranges begin at the same place, we go from just after
4457 the end of the second range to the end of the first. If the second
4458 range is not a subset of the first, or if it is a subset and both
4459 ranges end at the same place, the range starts at the start of the
4460 first range and ends just before the second range.
4461 Otherwise, we can't describe this as a single range. */
4462 if (no_overlap)
4463 in_p = 1, low = low0, high = high0;
4464 else if (lowequal && highequal)
4465 in_p = 0, low = high = 0;
4466 else if (subset && lowequal)
4468 low = range_successor (high1);
4469 high = high0;
4470 in_p = 1;
4471 if (low == 0)
4473 /* We are in the weird situation where high0 > high1 but
4474 high1 has no successor. Punt. */
4475 return 0;
4478 else if (! subset || highequal)
4480 low = low0;
4481 high = range_predecessor (low1);
4482 in_p = 1;
4483 if (high == 0)
4485 /* low0 < low1 but low1 has no predecessor. Punt. */
4486 return 0;
4489 else
4490 return 0;
4493 else if (! in0_p && in1_p)
4495 /* If they don't overlap, the result is the second range. If the second
4496 is a subset of the first, the result is false. Otherwise,
4497 the range starts just after the first range and ends at the
4498 end of the second. */
4499 if (no_overlap)
4500 in_p = 1, low = low1, high = high1;
4501 else if (subset || highequal)
4502 in_p = 0, low = high = 0;
4503 else
4505 low = range_successor (high0);
4506 high = high1;
4507 in_p = 1;
4508 if (low == 0)
4510 /* high1 > high0 but high0 has no successor. Punt. */
4511 return 0;
4516 else
4518 /* The case where we are excluding both ranges. Here the complex case
4519 is if they don't overlap. In that case, the only time we have a
4520 range is if they are adjacent. If the second is a subset of the
4521 first, the result is the first. Otherwise, the range to exclude
4522 starts at the beginning of the first range and ends at the end of the
4523 second. */
4524 if (no_overlap)
4526 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4527 range_successor (high0),
4528 1, low1, 0)))
4529 in_p = 0, low = low0, high = high1;
4530 else
4532 /* Canonicalize - [min, x] into - [-, x]. */
4533 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4534 switch (TREE_CODE (TREE_TYPE (low0)))
4536 case ENUMERAL_TYPE:
4537 if (TYPE_PRECISION (TREE_TYPE (low0))
4538 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4539 break;
4540 /* FALLTHROUGH */
4541 case INTEGER_TYPE:
4542 if (tree_int_cst_equal (low0,
4543 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4544 low0 = 0;
4545 break;
4546 case POINTER_TYPE:
4547 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4548 && integer_zerop (low0))
4549 low0 = 0;
4550 break;
4551 default:
4552 break;
4555 /* Canonicalize - [x, max] into - [x, -]. */
4556 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4557 switch (TREE_CODE (TREE_TYPE (high1)))
4559 case ENUMERAL_TYPE:
4560 if (TYPE_PRECISION (TREE_TYPE (high1))
4561 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4562 break;
4563 /* FALLTHROUGH */
4564 case INTEGER_TYPE:
4565 if (tree_int_cst_equal (high1,
4566 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4567 high1 = 0;
4568 break;
4569 case POINTER_TYPE:
4570 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4571 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4572 high1, 1,
4573 build_int_cst (TREE_TYPE (high1), 1),
4574 1)))
4575 high1 = 0;
4576 break;
4577 default:
4578 break;
4581 /* The ranges might be also adjacent between the maximum and
4582 minimum values of the given type. For
4583 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4584 return + [x + 1, y - 1]. */
4585 if (low0 == 0 && high1 == 0)
4587 low = range_successor (high0);
4588 high = range_predecessor (low1);
4589 if (low == 0 || high == 0)
4590 return 0;
4592 in_p = 1;
4594 else
4595 return 0;
4598 else if (subset)
4599 in_p = 0, low = low0, high = high0;
4600 else
4601 in_p = 0, low = low0, high = high1;
4604 *pin_p = in_p, *plow = low, *phigh = high;
4605 return 1;
4609 /* Subroutine of fold, looking inside expressions of the form
4610 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4611 of the COND_EXPR. This function is being used also to optimize
4612 A op B ? C : A, by reversing the comparison first.
4614 Return a folded expression whose code is not a COND_EXPR
4615 anymore, or NULL_TREE if no folding opportunity is found. */
4617 static tree
4618 fold_cond_expr_with_comparison (location_t loc, tree type,
4619 tree arg0, tree arg1, tree arg2)
4621 enum tree_code comp_code = TREE_CODE (arg0);
4622 tree arg00 = TREE_OPERAND (arg0, 0);
4623 tree arg01 = TREE_OPERAND (arg0, 1);
4624 tree arg1_type = TREE_TYPE (arg1);
4625 tree tem;
4627 STRIP_NOPS (arg1);
4628 STRIP_NOPS (arg2);
4630 /* If we have A op 0 ? A : -A, consider applying the following
4631 transformations:
4633 A == 0? A : -A same as -A
4634 A != 0? A : -A same as A
4635 A >= 0? A : -A same as abs (A)
4636 A > 0? A : -A same as abs (A)
4637 A <= 0? A : -A same as -abs (A)
4638 A < 0? A : -A same as -abs (A)
4640 None of these transformations work for modes with signed
4641 zeros. If A is +/-0, the first two transformations will
4642 change the sign of the result (from +0 to -0, or vice
4643 versa). The last four will fix the sign of the result,
4644 even though the original expressions could be positive or
4645 negative, depending on the sign of A.
4647 Note that all these transformations are correct if A is
4648 NaN, since the two alternatives (A and -A) are also NaNs. */
4649 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4650 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4651 ? real_zerop (arg01)
4652 : integer_zerop (arg01))
4653 && ((TREE_CODE (arg2) == NEGATE_EXPR
4654 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4655 /* In the case that A is of the form X-Y, '-A' (arg2) may
4656 have already been folded to Y-X, check for that. */
4657 || (TREE_CODE (arg1) == MINUS_EXPR
4658 && TREE_CODE (arg2) == MINUS_EXPR
4659 && operand_equal_p (TREE_OPERAND (arg1, 0),
4660 TREE_OPERAND (arg2, 1), 0)
4661 && operand_equal_p (TREE_OPERAND (arg1, 1),
4662 TREE_OPERAND (arg2, 0), 0))))
4663 switch (comp_code)
4665 case EQ_EXPR:
4666 case UNEQ_EXPR:
4667 tem = fold_convert_loc (loc, arg1_type, arg1);
4668 return pedantic_non_lvalue_loc (loc,
4669 fold_convert_loc (loc, type,
4670 negate_expr (tem)));
4671 case NE_EXPR:
4672 case LTGT_EXPR:
4673 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4674 case UNGE_EXPR:
4675 case UNGT_EXPR:
4676 if (flag_trapping_math)
4677 break;
4678 /* Fall through. */
4679 case GE_EXPR:
4680 case GT_EXPR:
4681 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4682 arg1 = fold_convert_loc (loc, signed_type_for
4683 (TREE_TYPE (arg1)), arg1);
4684 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4685 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4686 case UNLE_EXPR:
4687 case UNLT_EXPR:
4688 if (flag_trapping_math)
4689 break;
4690 case LE_EXPR:
4691 case LT_EXPR:
4692 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4693 arg1 = fold_convert_loc (loc, signed_type_for
4694 (TREE_TYPE (arg1)), arg1);
4695 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4696 return negate_expr (fold_convert_loc (loc, type, tem));
4697 default:
4698 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4699 break;
4702 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4703 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4704 both transformations are correct when A is NaN: A != 0
4705 is then true, and A == 0 is false. */
4707 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4708 && integer_zerop (arg01) && integer_zerop (arg2))
4710 if (comp_code == NE_EXPR)
4711 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4712 else if (comp_code == EQ_EXPR)
4713 return build_zero_cst (type);
4716 /* Try some transformations of A op B ? A : B.
4718 A == B? A : B same as B
4719 A != B? A : B same as A
4720 A >= B? A : B same as max (A, B)
4721 A > B? A : B same as max (B, A)
4722 A <= B? A : B same as min (A, B)
4723 A < B? A : B same as min (B, A)
4725 As above, these transformations don't work in the presence
4726 of signed zeros. For example, if A and B are zeros of
4727 opposite sign, the first two transformations will change
4728 the sign of the result. In the last four, the original
4729 expressions give different results for (A=+0, B=-0) and
4730 (A=-0, B=+0), but the transformed expressions do not.
4732 The first two transformations are correct if either A or B
4733 is a NaN. In the first transformation, the condition will
4734 be false, and B will indeed be chosen. In the case of the
4735 second transformation, the condition A != B will be true,
4736 and A will be chosen.
4738 The conversions to max() and min() are not correct if B is
4739 a number and A is not. The conditions in the original
4740 expressions will be false, so all four give B. The min()
4741 and max() versions would give a NaN instead. */
4742 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4743 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4744 /* Avoid these transformations if the COND_EXPR may be used
4745 as an lvalue in the C++ front-end. PR c++/19199. */
4746 && (in_gimple_form
4747 || VECTOR_TYPE_P (type)
4748 || (strcmp (lang_hooks.name, "GNU C++") != 0
4749 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4750 || ! maybe_lvalue_p (arg1)
4751 || ! maybe_lvalue_p (arg2)))
4753 tree comp_op0 = arg00;
4754 tree comp_op1 = arg01;
4755 tree comp_type = TREE_TYPE (comp_op0);
4757 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4758 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4760 comp_type = type;
4761 comp_op0 = arg1;
4762 comp_op1 = arg2;
4765 switch (comp_code)
4767 case EQ_EXPR:
4768 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4769 case NE_EXPR:
4770 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4771 case LE_EXPR:
4772 case LT_EXPR:
4773 case UNLE_EXPR:
4774 case UNLT_EXPR:
4775 /* In C++ a ?: expression can be an lvalue, so put the
4776 operand which will be used if they are equal first
4777 so that we can convert this back to the
4778 corresponding COND_EXPR. */
4779 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4781 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4782 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4783 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4784 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4785 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4786 comp_op1, comp_op0);
4787 return pedantic_non_lvalue_loc (loc,
4788 fold_convert_loc (loc, type, tem));
4790 break;
4791 case GE_EXPR:
4792 case GT_EXPR:
4793 case UNGE_EXPR:
4794 case UNGT_EXPR:
4795 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4797 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4798 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4799 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4800 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4801 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4802 comp_op1, comp_op0);
4803 return pedantic_non_lvalue_loc (loc,
4804 fold_convert_loc (loc, type, tem));
4806 break;
4807 case UNEQ_EXPR:
4808 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4809 return pedantic_non_lvalue_loc (loc,
4810 fold_convert_loc (loc, type, arg2));
4811 break;
4812 case LTGT_EXPR:
4813 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4814 return pedantic_non_lvalue_loc (loc,
4815 fold_convert_loc (loc, type, arg1));
4816 break;
4817 default:
4818 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4819 break;
4823 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4824 we might still be able to simplify this. For example,
4825 if C1 is one less or one more than C2, this might have started
4826 out as a MIN or MAX and been transformed by this function.
4827 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4829 if (INTEGRAL_TYPE_P (type)
4830 && TREE_CODE (arg01) == INTEGER_CST
4831 && TREE_CODE (arg2) == INTEGER_CST)
4832 switch (comp_code)
4834 case EQ_EXPR:
4835 if (TREE_CODE (arg1) == INTEGER_CST)
4836 break;
4837 /* We can replace A with C1 in this case. */
4838 arg1 = fold_convert_loc (loc, type, arg01);
4839 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4841 case LT_EXPR:
4842 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4843 MIN_EXPR, to preserve the signedness of the comparison. */
4844 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4845 OEP_ONLY_CONST)
4846 && operand_equal_p (arg01,
4847 const_binop (PLUS_EXPR, arg2,
4848 build_int_cst (type, 1)),
4849 OEP_ONLY_CONST))
4851 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4852 fold_convert_loc (loc, TREE_TYPE (arg00),
4853 arg2));
4854 return pedantic_non_lvalue_loc (loc,
4855 fold_convert_loc (loc, type, tem));
4857 break;
4859 case LE_EXPR:
4860 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4861 as above. */
4862 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4863 OEP_ONLY_CONST)
4864 && operand_equal_p (arg01,
4865 const_binop (MINUS_EXPR, arg2,
4866 build_int_cst (type, 1)),
4867 OEP_ONLY_CONST))
4869 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4870 fold_convert_loc (loc, TREE_TYPE (arg00),
4871 arg2));
4872 return pedantic_non_lvalue_loc (loc,
4873 fold_convert_loc (loc, type, tem));
4875 break;
4877 case GT_EXPR:
4878 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4879 MAX_EXPR, to preserve the signedness of the comparison. */
4880 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4881 OEP_ONLY_CONST)
4882 && operand_equal_p (arg01,
4883 const_binop (MINUS_EXPR, arg2,
4884 build_int_cst (type, 1)),
4885 OEP_ONLY_CONST))
4887 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4888 fold_convert_loc (loc, TREE_TYPE (arg00),
4889 arg2));
4890 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4892 break;
4894 case GE_EXPR:
4895 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4896 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4897 OEP_ONLY_CONST)
4898 && operand_equal_p (arg01,
4899 const_binop (PLUS_EXPR, arg2,
4900 build_int_cst (type, 1)),
4901 OEP_ONLY_CONST))
4903 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4904 fold_convert_loc (loc, TREE_TYPE (arg00),
4905 arg2));
4906 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4908 break;
4909 case NE_EXPR:
4910 break;
4911 default:
4912 gcc_unreachable ();
4915 return NULL_TREE;
4920 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4921 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4922 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4923 false) >= 2)
4924 #endif
4926 /* EXP is some logical combination of boolean tests. See if we can
4927 merge it into some range test. Return the new tree if so. */
4929 static tree
4930 fold_range_test (location_t loc, enum tree_code code, tree type,
4931 tree op0, tree op1)
4933 int or_op = (code == TRUTH_ORIF_EXPR
4934 || code == TRUTH_OR_EXPR);
4935 int in0_p, in1_p, in_p;
4936 tree low0, low1, low, high0, high1, high;
4937 bool strict_overflow_p = false;
4938 tree tem, lhs, rhs;
4939 const char * const warnmsg = G_("assuming signed overflow does not occur "
4940 "when simplifying range test");
4942 if (!INTEGRAL_TYPE_P (type))
4943 return 0;
4945 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4946 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4948 /* If this is an OR operation, invert both sides; we will invert
4949 again at the end. */
4950 if (or_op)
4951 in0_p = ! in0_p, in1_p = ! in1_p;
4953 /* If both expressions are the same, if we can merge the ranges, and we
4954 can build the range test, return it or it inverted. If one of the
4955 ranges is always true or always false, consider it to be the same
4956 expression as the other. */
4957 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4958 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4959 in1_p, low1, high1)
4960 && 0 != (tem = (build_range_check (loc, type,
4961 lhs != 0 ? lhs
4962 : rhs != 0 ? rhs : integer_zero_node,
4963 in_p, low, high))))
4965 if (strict_overflow_p)
4966 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4967 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4970 /* On machines where the branch cost is expensive, if this is a
4971 short-circuited branch and the underlying object on both sides
4972 is the same, make a non-short-circuit operation. */
4973 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4974 && lhs != 0 && rhs != 0
4975 && (code == TRUTH_ANDIF_EXPR
4976 || code == TRUTH_ORIF_EXPR)
4977 && operand_equal_p (lhs, rhs, 0))
4979 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4980 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4981 which cases we can't do this. */
4982 if (simple_operand_p (lhs))
4983 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4984 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4985 type, op0, op1);
4987 else if (!lang_hooks.decls.global_bindings_p ()
4988 && !CONTAINS_PLACEHOLDER_P (lhs))
4990 tree common = save_expr (lhs);
4992 if (0 != (lhs = build_range_check (loc, type, common,
4993 or_op ? ! in0_p : in0_p,
4994 low0, high0))
4995 && (0 != (rhs = build_range_check (loc, type, common,
4996 or_op ? ! in1_p : in1_p,
4997 low1, high1))))
4999 if (strict_overflow_p)
5000 fold_overflow_warning (warnmsg,
5001 WARN_STRICT_OVERFLOW_COMPARISON);
5002 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5003 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5004 type, lhs, rhs);
5009 return 0;
5012 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5013 bit value. Arrange things so the extra bits will be set to zero if and
5014 only if C is signed-extended to its full width. If MASK is nonzero,
5015 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5017 static tree
5018 unextend (tree c, int p, int unsignedp, tree mask)
5020 tree type = TREE_TYPE (c);
5021 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5022 tree temp;
5024 if (p == modesize || unsignedp)
5025 return c;
5027 /* We work by getting just the sign bit into the low-order bit, then
5028 into the high-order bit, then sign-extend. We then XOR that value
5029 with C. */
5030 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5032 /* We must use a signed type in order to get an arithmetic right shift.
5033 However, we must also avoid introducing accidental overflows, so that
5034 a subsequent call to integer_zerop will work. Hence we must
5035 do the type conversion here. At this point, the constant is either
5036 zero or one, and the conversion to a signed type can never overflow.
5037 We could get an overflow if this conversion is done anywhere else. */
5038 if (TYPE_UNSIGNED (type))
5039 temp = fold_convert (signed_type_for (type), temp);
5041 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5042 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5043 if (mask != 0)
5044 temp = const_binop (BIT_AND_EXPR, temp,
5045 fold_convert (TREE_TYPE (c), mask));
5046 /* If necessary, convert the type back to match the type of C. */
5047 if (TYPE_UNSIGNED (type))
5048 temp = fold_convert (type, temp);
5050 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5053 /* For an expression that has the form
5054 (A && B) || ~B
5056 (A || B) && ~B,
5057 we can drop one of the inner expressions and simplify to
5058 A || ~B
5060 A && ~B
5061 LOC is the location of the resulting expression. OP is the inner
5062 logical operation; the left-hand side in the examples above, while CMPOP
5063 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5064 removing a condition that guards another, as in
5065 (A != NULL && A->...) || A == NULL
5066 which we must not transform. If RHS_ONLY is true, only eliminate the
5067 right-most operand of the inner logical operation. */
5069 static tree
5070 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5071 bool rhs_only)
5073 tree type = TREE_TYPE (cmpop);
5074 enum tree_code code = TREE_CODE (cmpop);
5075 enum tree_code truthop_code = TREE_CODE (op);
5076 tree lhs = TREE_OPERAND (op, 0);
5077 tree rhs = TREE_OPERAND (op, 1);
5078 tree orig_lhs = lhs, orig_rhs = rhs;
5079 enum tree_code rhs_code = TREE_CODE (rhs);
5080 enum tree_code lhs_code = TREE_CODE (lhs);
5081 enum tree_code inv_code;
5083 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5084 return NULL_TREE;
5086 if (TREE_CODE_CLASS (code) != tcc_comparison)
5087 return NULL_TREE;
5089 if (rhs_code == truthop_code)
5091 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5092 if (newrhs != NULL_TREE)
5094 rhs = newrhs;
5095 rhs_code = TREE_CODE (rhs);
5098 if (lhs_code == truthop_code && !rhs_only)
5100 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5101 if (newlhs != NULL_TREE)
5103 lhs = newlhs;
5104 lhs_code = TREE_CODE (lhs);
5108 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5109 if (inv_code == rhs_code
5110 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5111 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5112 return lhs;
5113 if (!rhs_only && inv_code == lhs_code
5114 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5115 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5116 return rhs;
5117 if (rhs != orig_rhs || lhs != orig_lhs)
5118 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5119 lhs, rhs);
5120 return NULL_TREE;
5123 /* Find ways of folding logical expressions of LHS and RHS:
5124 Try to merge two comparisons to the same innermost item.
5125 Look for range tests like "ch >= '0' && ch <= '9'".
5126 Look for combinations of simple terms on machines with expensive branches
5127 and evaluate the RHS unconditionally.
5129 For example, if we have p->a == 2 && p->b == 4 and we can make an
5130 object large enough to span both A and B, we can do this with a comparison
5131 against the object ANDed with the a mask.
5133 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5134 operations to do this with one comparison.
5136 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5137 function and the one above.
5139 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5140 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5142 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5143 two operands.
5145 We return the simplified tree or 0 if no optimization is possible. */
5147 static tree
5148 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5149 tree lhs, tree rhs)
5151 /* If this is the "or" of two comparisons, we can do something if
5152 the comparisons are NE_EXPR. If this is the "and", we can do something
5153 if the comparisons are EQ_EXPR. I.e.,
5154 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5156 WANTED_CODE is this operation code. For single bit fields, we can
5157 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5158 comparison for one-bit fields. */
5160 enum tree_code wanted_code;
5161 enum tree_code lcode, rcode;
5162 tree ll_arg, lr_arg, rl_arg, rr_arg;
5163 tree ll_inner, lr_inner, rl_inner, rr_inner;
5164 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5165 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5166 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5167 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5168 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5169 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5170 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5171 machine_mode lnmode, rnmode;
5172 tree ll_mask, lr_mask, rl_mask, rr_mask;
5173 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5174 tree l_const, r_const;
5175 tree lntype, rntype, result;
5176 HOST_WIDE_INT first_bit, end_bit;
5177 int volatilep;
5179 /* Start by getting the comparison codes. Fail if anything is volatile.
5180 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5181 it were surrounded with a NE_EXPR. */
5183 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5184 return 0;
5186 lcode = TREE_CODE (lhs);
5187 rcode = TREE_CODE (rhs);
5189 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5191 lhs = build2 (NE_EXPR, truth_type, lhs,
5192 build_int_cst (TREE_TYPE (lhs), 0));
5193 lcode = NE_EXPR;
5196 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5198 rhs = build2 (NE_EXPR, truth_type, rhs,
5199 build_int_cst (TREE_TYPE (rhs), 0));
5200 rcode = NE_EXPR;
5203 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5204 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5205 return 0;
5207 ll_arg = TREE_OPERAND (lhs, 0);
5208 lr_arg = TREE_OPERAND (lhs, 1);
5209 rl_arg = TREE_OPERAND (rhs, 0);
5210 rr_arg = TREE_OPERAND (rhs, 1);
5212 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5213 if (simple_operand_p (ll_arg)
5214 && simple_operand_p (lr_arg))
5216 if (operand_equal_p (ll_arg, rl_arg, 0)
5217 && operand_equal_p (lr_arg, rr_arg, 0))
5219 result = combine_comparisons (loc, code, lcode, rcode,
5220 truth_type, ll_arg, lr_arg);
5221 if (result)
5222 return result;
5224 else if (operand_equal_p (ll_arg, rr_arg, 0)
5225 && operand_equal_p (lr_arg, rl_arg, 0))
5227 result = combine_comparisons (loc, code, lcode,
5228 swap_tree_comparison (rcode),
5229 truth_type, ll_arg, lr_arg);
5230 if (result)
5231 return result;
5235 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5236 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5238 /* If the RHS can be evaluated unconditionally and its operands are
5239 simple, it wins to evaluate the RHS unconditionally on machines
5240 with expensive branches. In this case, this isn't a comparison
5241 that can be merged. */
5243 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5244 false) >= 2
5245 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5246 && simple_operand_p (rl_arg)
5247 && simple_operand_p (rr_arg))
5249 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5250 if (code == TRUTH_OR_EXPR
5251 && lcode == NE_EXPR && integer_zerop (lr_arg)
5252 && rcode == NE_EXPR && integer_zerop (rr_arg)
5253 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5254 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5255 return build2_loc (loc, NE_EXPR, truth_type,
5256 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5257 ll_arg, rl_arg),
5258 build_int_cst (TREE_TYPE (ll_arg), 0));
5260 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5261 if (code == TRUTH_AND_EXPR
5262 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5263 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5264 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5265 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5266 return build2_loc (loc, EQ_EXPR, truth_type,
5267 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5268 ll_arg, rl_arg),
5269 build_int_cst (TREE_TYPE (ll_arg), 0));
5272 /* See if the comparisons can be merged. Then get all the parameters for
5273 each side. */
5275 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5276 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5277 return 0;
5279 volatilep = 0;
5280 ll_inner = decode_field_reference (loc, ll_arg,
5281 &ll_bitsize, &ll_bitpos, &ll_mode,
5282 &ll_unsignedp, &ll_reversep, &volatilep,
5283 &ll_mask, &ll_and_mask);
5284 lr_inner = decode_field_reference (loc, lr_arg,
5285 &lr_bitsize, &lr_bitpos, &lr_mode,
5286 &lr_unsignedp, &lr_reversep, &volatilep,
5287 &lr_mask, &lr_and_mask);
5288 rl_inner = decode_field_reference (loc, rl_arg,
5289 &rl_bitsize, &rl_bitpos, &rl_mode,
5290 &rl_unsignedp, &rl_reversep, &volatilep,
5291 &rl_mask, &rl_and_mask);
5292 rr_inner = decode_field_reference (loc, rr_arg,
5293 &rr_bitsize, &rr_bitpos, &rr_mode,
5294 &rr_unsignedp, &rr_reversep, &volatilep,
5295 &rr_mask, &rr_and_mask);
5297 /* It must be true that the inner operation on the lhs of each
5298 comparison must be the same if we are to be able to do anything.
5299 Then see if we have constants. If not, the same must be true for
5300 the rhs's. */
5301 if (volatilep
5302 || ll_reversep != rl_reversep
5303 || ll_inner == 0 || rl_inner == 0
5304 || ! operand_equal_p (ll_inner, rl_inner, 0))
5305 return 0;
5307 if (TREE_CODE (lr_arg) == INTEGER_CST
5308 && TREE_CODE (rr_arg) == INTEGER_CST)
5310 l_const = lr_arg, r_const = rr_arg;
5311 lr_reversep = ll_reversep;
5313 else if (lr_reversep != rr_reversep
5314 || lr_inner == 0 || rr_inner == 0
5315 || ! operand_equal_p (lr_inner, rr_inner, 0))
5316 return 0;
5317 else
5318 l_const = r_const = 0;
5320 /* If either comparison code is not correct for our logical operation,
5321 fail. However, we can convert a one-bit comparison against zero into
5322 the opposite comparison against that bit being set in the field. */
5324 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5325 if (lcode != wanted_code)
5327 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5329 /* Make the left operand unsigned, since we are only interested
5330 in the value of one bit. Otherwise we are doing the wrong
5331 thing below. */
5332 ll_unsignedp = 1;
5333 l_const = ll_mask;
5335 else
5336 return 0;
5339 /* This is analogous to the code for l_const above. */
5340 if (rcode != wanted_code)
5342 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5344 rl_unsignedp = 1;
5345 r_const = rl_mask;
5347 else
5348 return 0;
5351 /* See if we can find a mode that contains both fields being compared on
5352 the left. If we can't, fail. Otherwise, update all constants and masks
5353 to be relative to a field of that size. */
5354 first_bit = MIN (ll_bitpos, rl_bitpos);
5355 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5356 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5357 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5358 volatilep);
5359 if (lnmode == VOIDmode)
5360 return 0;
5362 lnbitsize = GET_MODE_BITSIZE (lnmode);
5363 lnbitpos = first_bit & ~ (lnbitsize - 1);
5364 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5365 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5367 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5369 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5370 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5373 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5374 size_int (xll_bitpos));
5375 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5376 size_int (xrl_bitpos));
5378 if (l_const)
5380 l_const = fold_convert_loc (loc, lntype, l_const);
5381 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5382 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5383 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5384 fold_build1_loc (loc, BIT_NOT_EXPR,
5385 lntype, ll_mask))))
5387 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5389 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5392 if (r_const)
5394 r_const = fold_convert_loc (loc, lntype, r_const);
5395 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5396 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5397 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5398 fold_build1_loc (loc, BIT_NOT_EXPR,
5399 lntype, rl_mask))))
5401 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5403 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5407 /* If the right sides are not constant, do the same for it. Also,
5408 disallow this optimization if a size or signedness mismatch occurs
5409 between the left and right sides. */
5410 if (l_const == 0)
5412 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5413 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5414 /* Make sure the two fields on the right
5415 correspond to the left without being swapped. */
5416 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5417 return 0;
5419 first_bit = MIN (lr_bitpos, rr_bitpos);
5420 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5421 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5422 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5423 volatilep);
5424 if (rnmode == VOIDmode)
5425 return 0;
5427 rnbitsize = GET_MODE_BITSIZE (rnmode);
5428 rnbitpos = first_bit & ~ (rnbitsize - 1);
5429 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5430 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5432 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5434 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5435 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5438 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5439 rntype, lr_mask),
5440 size_int (xlr_bitpos));
5441 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5442 rntype, rr_mask),
5443 size_int (xrr_bitpos));
5445 /* Make a mask that corresponds to both fields being compared.
5446 Do this for both items being compared. If the operands are the
5447 same size and the bits being compared are in the same position
5448 then we can do this by masking both and comparing the masked
5449 results. */
5450 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5451 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5452 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5454 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5455 ll_unsignedp || rl_unsignedp, ll_reversep);
5456 if (! all_ones_mask_p (ll_mask, lnbitsize))
5457 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5459 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5460 lr_unsignedp || rr_unsignedp, lr_reversep);
5461 if (! all_ones_mask_p (lr_mask, rnbitsize))
5462 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5464 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5467 /* There is still another way we can do something: If both pairs of
5468 fields being compared are adjacent, we may be able to make a wider
5469 field containing them both.
5471 Note that we still must mask the lhs/rhs expressions. Furthermore,
5472 the mask must be shifted to account for the shift done by
5473 make_bit_field_ref. */
5474 if ((ll_bitsize + ll_bitpos == rl_bitpos
5475 && lr_bitsize + lr_bitpos == rr_bitpos)
5476 || (ll_bitpos == rl_bitpos + rl_bitsize
5477 && lr_bitpos == rr_bitpos + rr_bitsize))
5479 tree type;
5481 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5482 ll_bitsize + rl_bitsize,
5483 MIN (ll_bitpos, rl_bitpos),
5484 ll_unsignedp, ll_reversep);
5485 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5486 lr_bitsize + rr_bitsize,
5487 MIN (lr_bitpos, rr_bitpos),
5488 lr_unsignedp, lr_reversep);
5490 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5491 size_int (MIN (xll_bitpos, xrl_bitpos)));
5492 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5493 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5495 /* Convert to the smaller type before masking out unwanted bits. */
5496 type = lntype;
5497 if (lntype != rntype)
5499 if (lnbitsize > rnbitsize)
5501 lhs = fold_convert_loc (loc, rntype, lhs);
5502 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5503 type = rntype;
5505 else if (lnbitsize < rnbitsize)
5507 rhs = fold_convert_loc (loc, lntype, rhs);
5508 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5509 type = lntype;
5513 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5514 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5516 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5517 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5519 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5522 return 0;
5525 /* Handle the case of comparisons with constants. If there is something in
5526 common between the masks, those bits of the constants must be the same.
5527 If not, the condition is always false. Test for this to avoid generating
5528 incorrect code below. */
5529 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5530 if (! integer_zerop (result)
5531 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5532 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5534 if (wanted_code == NE_EXPR)
5536 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5537 return constant_boolean_node (true, truth_type);
5539 else
5541 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5542 return constant_boolean_node (false, truth_type);
5546 /* Construct the expression we will return. First get the component
5547 reference we will make. Unless the mask is all ones the width of
5548 that field, perform the mask operation. Then compare with the
5549 merged constant. */
5550 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5551 ll_unsignedp || rl_unsignedp, ll_reversep);
5553 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5554 if (! all_ones_mask_p (ll_mask, lnbitsize))
5555 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5557 return build2_loc (loc, wanted_code, truth_type, result,
5558 const_binop (BIT_IOR_EXPR, l_const, r_const));
5561 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5562 constant. */
5564 static tree
5565 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5566 tree op0, tree op1)
5568 tree arg0 = op0;
5569 enum tree_code op_code;
5570 tree comp_const;
5571 tree minmax_const;
5572 int consts_equal, consts_lt;
5573 tree inner;
5575 STRIP_SIGN_NOPS (arg0);
5577 op_code = TREE_CODE (arg0);
5578 minmax_const = TREE_OPERAND (arg0, 1);
5579 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5580 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5581 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5582 inner = TREE_OPERAND (arg0, 0);
5584 /* If something does not permit us to optimize, return the original tree. */
5585 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5586 || TREE_CODE (comp_const) != INTEGER_CST
5587 || TREE_OVERFLOW (comp_const)
5588 || TREE_CODE (minmax_const) != INTEGER_CST
5589 || TREE_OVERFLOW (minmax_const))
5590 return NULL_TREE;
5592 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5593 and GT_EXPR, doing the rest with recursive calls using logical
5594 simplifications. */
5595 switch (code)
5597 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5599 tree tem
5600 = optimize_minmax_comparison (loc,
5601 invert_tree_comparison (code, false),
5602 type, op0, op1);
5603 if (tem)
5604 return invert_truthvalue_loc (loc, tem);
5605 return NULL_TREE;
5608 case GE_EXPR:
5609 return
5610 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5611 optimize_minmax_comparison
5612 (loc, EQ_EXPR, type, arg0, comp_const),
5613 optimize_minmax_comparison
5614 (loc, GT_EXPR, type, arg0, comp_const));
5616 case EQ_EXPR:
5617 if (op_code == MAX_EXPR && consts_equal)
5618 /* MAX (X, 0) == 0 -> X <= 0 */
5619 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5621 else if (op_code == MAX_EXPR && consts_lt)
5622 /* MAX (X, 0) == 5 -> X == 5 */
5623 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5625 else if (op_code == MAX_EXPR)
5626 /* MAX (X, 0) == -1 -> false */
5627 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5629 else if (consts_equal)
5630 /* MIN (X, 0) == 0 -> X >= 0 */
5631 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5633 else if (consts_lt)
5634 /* MIN (X, 0) == 5 -> false */
5635 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5637 else
5638 /* MIN (X, 0) == -1 -> X == -1 */
5639 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5641 case GT_EXPR:
5642 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5643 /* MAX (X, 0) > 0 -> X > 0
5644 MAX (X, 0) > 5 -> X > 5 */
5645 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5647 else if (op_code == MAX_EXPR)
5648 /* MAX (X, 0) > -1 -> true */
5649 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5651 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5652 /* MIN (X, 0) > 0 -> false
5653 MIN (X, 0) > 5 -> false */
5654 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5656 else
5657 /* MIN (X, 0) > -1 -> X > -1 */
5658 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5660 default:
5661 return NULL_TREE;
5665 /* T is an integer expression that is being multiplied, divided, or taken a
5666 modulus (CODE says which and what kind of divide or modulus) by a
5667 constant C. See if we can eliminate that operation by folding it with
5668 other operations already in T. WIDE_TYPE, if non-null, is a type that
5669 should be used for the computation if wider than our type.
5671 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5672 (X * 2) + (Y * 4). We must, however, be assured that either the original
5673 expression would not overflow or that overflow is undefined for the type
5674 in the language in question.
5676 If we return a non-null expression, it is an equivalent form of the
5677 original computation, but need not be in the original type.
5679 We set *STRICT_OVERFLOW_P to true if the return values depends on
5680 signed overflow being undefined. Otherwise we do not change
5681 *STRICT_OVERFLOW_P. */
5683 static tree
5684 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5685 bool *strict_overflow_p)
5687 /* To avoid exponential search depth, refuse to allow recursion past
5688 three levels. Beyond that (1) it's highly unlikely that we'll find
5689 something interesting and (2) we've probably processed it before
5690 when we built the inner expression. */
5692 static int depth;
5693 tree ret;
5695 if (depth > 3)
5696 return NULL;
5698 depth++;
5699 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5700 depth--;
5702 return ret;
5705 static tree
5706 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5707 bool *strict_overflow_p)
5709 tree type = TREE_TYPE (t);
5710 enum tree_code tcode = TREE_CODE (t);
5711 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5712 > GET_MODE_SIZE (TYPE_MODE (type)))
5713 ? wide_type : type);
5714 tree t1, t2;
5715 int same_p = tcode == code;
5716 tree op0 = NULL_TREE, op1 = NULL_TREE;
5717 bool sub_strict_overflow_p;
5719 /* Don't deal with constants of zero here; they confuse the code below. */
5720 if (integer_zerop (c))
5721 return NULL_TREE;
5723 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5724 op0 = TREE_OPERAND (t, 0);
5726 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5727 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5729 /* Note that we need not handle conditional operations here since fold
5730 already handles those cases. So just do arithmetic here. */
5731 switch (tcode)
5733 case INTEGER_CST:
5734 /* For a constant, we can always simplify if we are a multiply
5735 or (for divide and modulus) if it is a multiple of our constant. */
5736 if (code == MULT_EXPR
5737 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5738 return const_binop (code, fold_convert (ctype, t),
5739 fold_convert (ctype, c));
5740 break;
5742 CASE_CONVERT: case NON_LVALUE_EXPR:
5743 /* If op0 is an expression ... */
5744 if ((COMPARISON_CLASS_P (op0)
5745 || UNARY_CLASS_P (op0)
5746 || BINARY_CLASS_P (op0)
5747 || VL_EXP_CLASS_P (op0)
5748 || EXPRESSION_CLASS_P (op0))
5749 /* ... and has wrapping overflow, and its type is smaller
5750 than ctype, then we cannot pass through as widening. */
5751 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5752 && (TYPE_PRECISION (ctype)
5753 > TYPE_PRECISION (TREE_TYPE (op0))))
5754 /* ... or this is a truncation (t is narrower than op0),
5755 then we cannot pass through this narrowing. */
5756 || (TYPE_PRECISION (type)
5757 < TYPE_PRECISION (TREE_TYPE (op0)))
5758 /* ... or signedness changes for division or modulus,
5759 then we cannot pass through this conversion. */
5760 || (code != MULT_EXPR
5761 && (TYPE_UNSIGNED (ctype)
5762 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5763 /* ... or has undefined overflow while the converted to
5764 type has not, we cannot do the operation in the inner type
5765 as that would introduce undefined overflow. */
5766 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5767 && !TYPE_OVERFLOW_UNDEFINED (type))))
5768 break;
5770 /* Pass the constant down and see if we can make a simplification. If
5771 we can, replace this expression with the inner simplification for
5772 possible later conversion to our or some other type. */
5773 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5774 && TREE_CODE (t2) == INTEGER_CST
5775 && !TREE_OVERFLOW (t2)
5776 && (0 != (t1 = extract_muldiv (op0, t2, code,
5777 code == MULT_EXPR
5778 ? ctype : NULL_TREE,
5779 strict_overflow_p))))
5780 return t1;
5781 break;
5783 case ABS_EXPR:
5784 /* If widening the type changes it from signed to unsigned, then we
5785 must avoid building ABS_EXPR itself as unsigned. */
5786 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5788 tree cstype = (*signed_type_for) (ctype);
5789 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5790 != 0)
5792 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5793 return fold_convert (ctype, t1);
5795 break;
5797 /* If the constant is negative, we cannot simplify this. */
5798 if (tree_int_cst_sgn (c) == -1)
5799 break;
5800 /* FALLTHROUGH */
5801 case NEGATE_EXPR:
5802 /* For division and modulus, type can't be unsigned, as e.g.
5803 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5804 For signed types, even with wrapping overflow, this is fine. */
5805 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5806 break;
5807 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5808 != 0)
5809 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5810 break;
5812 case MIN_EXPR: case MAX_EXPR:
5813 /* If widening the type changes the signedness, then we can't perform
5814 this optimization as that changes the result. */
5815 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5816 break;
5818 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5819 sub_strict_overflow_p = false;
5820 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5821 &sub_strict_overflow_p)) != 0
5822 && (t2 = extract_muldiv (op1, c, code, wide_type,
5823 &sub_strict_overflow_p)) != 0)
5825 if (tree_int_cst_sgn (c) < 0)
5826 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5827 if (sub_strict_overflow_p)
5828 *strict_overflow_p = true;
5829 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5830 fold_convert (ctype, t2));
5832 break;
5834 case LSHIFT_EXPR: case RSHIFT_EXPR:
5835 /* If the second operand is constant, this is a multiplication
5836 or floor division, by a power of two, so we can treat it that
5837 way unless the multiplier or divisor overflows. Signed
5838 left-shift overflow is implementation-defined rather than
5839 undefined in C90, so do not convert signed left shift into
5840 multiplication. */
5841 if (TREE_CODE (op1) == INTEGER_CST
5842 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5843 /* const_binop may not detect overflow correctly,
5844 so check for it explicitly here. */
5845 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5846 && 0 != (t1 = fold_convert (ctype,
5847 const_binop (LSHIFT_EXPR,
5848 size_one_node,
5849 op1)))
5850 && !TREE_OVERFLOW (t1))
5851 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5852 ? MULT_EXPR : FLOOR_DIV_EXPR,
5853 ctype,
5854 fold_convert (ctype, op0),
5855 t1),
5856 c, code, wide_type, strict_overflow_p);
5857 break;
5859 case PLUS_EXPR: case MINUS_EXPR:
5860 /* See if we can eliminate the operation on both sides. If we can, we
5861 can return a new PLUS or MINUS. If we can't, the only remaining
5862 cases where we can do anything are if the second operand is a
5863 constant. */
5864 sub_strict_overflow_p = false;
5865 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5866 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5867 if (t1 != 0 && t2 != 0
5868 && (code == MULT_EXPR
5869 /* If not multiplication, we can only do this if both operands
5870 are divisible by c. */
5871 || (multiple_of_p (ctype, op0, c)
5872 && multiple_of_p (ctype, op1, c))))
5874 if (sub_strict_overflow_p)
5875 *strict_overflow_p = true;
5876 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5877 fold_convert (ctype, t2));
5880 /* If this was a subtraction, negate OP1 and set it to be an addition.
5881 This simplifies the logic below. */
5882 if (tcode == MINUS_EXPR)
5884 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5885 /* If OP1 was not easily negatable, the constant may be OP0. */
5886 if (TREE_CODE (op0) == INTEGER_CST)
5888 tree tem = op0;
5889 op0 = op1;
5890 op1 = tem;
5891 tem = t1;
5892 t1 = t2;
5893 t2 = tem;
5897 if (TREE_CODE (op1) != INTEGER_CST)
5898 break;
5900 /* If either OP1 or C are negative, this optimization is not safe for
5901 some of the division and remainder types while for others we need
5902 to change the code. */
5903 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5905 if (code == CEIL_DIV_EXPR)
5906 code = FLOOR_DIV_EXPR;
5907 else if (code == FLOOR_DIV_EXPR)
5908 code = CEIL_DIV_EXPR;
5909 else if (code != MULT_EXPR
5910 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5911 break;
5914 /* If it's a multiply or a division/modulus operation of a multiple
5915 of our constant, do the operation and verify it doesn't overflow. */
5916 if (code == MULT_EXPR
5917 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5919 op1 = const_binop (code, fold_convert (ctype, op1),
5920 fold_convert (ctype, c));
5921 /* We allow the constant to overflow with wrapping semantics. */
5922 if (op1 == 0
5923 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5924 break;
5926 else
5927 break;
5929 /* If we have an unsigned type, we cannot widen the operation since it
5930 will change the result if the original computation overflowed. */
5931 if (TYPE_UNSIGNED (ctype) && ctype != type)
5932 break;
5934 /* If we were able to eliminate our operation from the first side,
5935 apply our operation to the second side and reform the PLUS. */
5936 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5937 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5939 /* The last case is if we are a multiply. In that case, we can
5940 apply the distributive law to commute the multiply and addition
5941 if the multiplication of the constants doesn't overflow
5942 and overflow is defined. With undefined overflow
5943 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5944 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5945 return fold_build2 (tcode, ctype,
5946 fold_build2 (code, ctype,
5947 fold_convert (ctype, op0),
5948 fold_convert (ctype, c)),
5949 op1);
5951 break;
5953 case MULT_EXPR:
5954 /* We have a special case here if we are doing something like
5955 (C * 8) % 4 since we know that's zero. */
5956 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5957 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5958 /* If the multiplication can overflow we cannot optimize this. */
5959 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5960 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5961 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5963 *strict_overflow_p = true;
5964 return omit_one_operand (type, integer_zero_node, op0);
5967 /* ... fall through ... */
5969 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5970 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5971 /* If we can extract our operation from the LHS, do so and return a
5972 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5973 do something only if the second operand is a constant. */
5974 if (same_p
5975 && (t1 = extract_muldiv (op0, c, code, wide_type,
5976 strict_overflow_p)) != 0)
5977 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5978 fold_convert (ctype, op1));
5979 else if (tcode == MULT_EXPR && code == MULT_EXPR
5980 && (t1 = extract_muldiv (op1, c, code, wide_type,
5981 strict_overflow_p)) != 0)
5982 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5983 fold_convert (ctype, t1));
5984 else if (TREE_CODE (op1) != INTEGER_CST)
5985 return 0;
5987 /* If these are the same operation types, we can associate them
5988 assuming no overflow. */
5989 if (tcode == code)
5991 bool overflow_p = false;
5992 bool overflow_mul_p;
5993 signop sign = TYPE_SIGN (ctype);
5994 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5995 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5996 if (overflow_mul_p
5997 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5998 overflow_p = true;
5999 if (!overflow_p)
6000 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6001 wide_int_to_tree (ctype, mul));
6004 /* If these operations "cancel" each other, we have the main
6005 optimizations of this pass, which occur when either constant is a
6006 multiple of the other, in which case we replace this with either an
6007 operation or CODE or TCODE.
6009 If we have an unsigned type, we cannot do this since it will change
6010 the result if the original computation overflowed. */
6011 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6012 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6013 || (tcode == MULT_EXPR
6014 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6015 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6016 && code != MULT_EXPR)))
6018 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6020 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6021 *strict_overflow_p = true;
6022 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6023 fold_convert (ctype,
6024 const_binop (TRUNC_DIV_EXPR,
6025 op1, c)));
6027 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6029 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6030 *strict_overflow_p = true;
6031 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6032 fold_convert (ctype,
6033 const_binop (TRUNC_DIV_EXPR,
6034 c, op1)));
6037 break;
6039 default:
6040 break;
6043 return 0;
6046 /* Return a node which has the indicated constant VALUE (either 0 or
6047 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6048 and is of the indicated TYPE. */
6050 tree
6051 constant_boolean_node (bool value, tree type)
6053 if (type == integer_type_node)
6054 return value ? integer_one_node : integer_zero_node;
6055 else if (type == boolean_type_node)
6056 return value ? boolean_true_node : boolean_false_node;
6057 else if (TREE_CODE (type) == VECTOR_TYPE)
6058 return build_vector_from_val (type,
6059 build_int_cst (TREE_TYPE (type),
6060 value ? -1 : 0));
6061 else
6062 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6066 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6067 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6068 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6069 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6070 COND is the first argument to CODE; otherwise (as in the example
6071 given here), it is the second argument. TYPE is the type of the
6072 original expression. Return NULL_TREE if no simplification is
6073 possible. */
6075 static tree
6076 fold_binary_op_with_conditional_arg (location_t loc,
6077 enum tree_code code,
6078 tree type, tree op0, tree op1,
6079 tree cond, tree arg, int cond_first_p)
6081 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6082 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6083 tree test, true_value, false_value;
6084 tree lhs = NULL_TREE;
6085 tree rhs = NULL_TREE;
6086 enum tree_code cond_code = COND_EXPR;
6088 if (TREE_CODE (cond) == COND_EXPR
6089 || TREE_CODE (cond) == VEC_COND_EXPR)
6091 test = TREE_OPERAND (cond, 0);
6092 true_value = TREE_OPERAND (cond, 1);
6093 false_value = TREE_OPERAND (cond, 2);
6094 /* If this operand throws an expression, then it does not make
6095 sense to try to perform a logical or arithmetic operation
6096 involving it. */
6097 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6098 lhs = true_value;
6099 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6100 rhs = false_value;
6102 else
6104 tree testtype = TREE_TYPE (cond);
6105 test = cond;
6106 true_value = constant_boolean_node (true, testtype);
6107 false_value = constant_boolean_node (false, testtype);
6110 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6111 cond_code = VEC_COND_EXPR;
6113 /* This transformation is only worthwhile if we don't have to wrap ARG
6114 in a SAVE_EXPR and the operation can be simplified without recursing
6115 on at least one of the branches once its pushed inside the COND_EXPR. */
6116 if (!TREE_CONSTANT (arg)
6117 && (TREE_SIDE_EFFECTS (arg)
6118 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6119 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6120 return NULL_TREE;
6122 arg = fold_convert_loc (loc, arg_type, arg);
6123 if (lhs == 0)
6125 true_value = fold_convert_loc (loc, cond_type, true_value);
6126 if (cond_first_p)
6127 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6128 else
6129 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6131 if (rhs == 0)
6133 false_value = fold_convert_loc (loc, cond_type, false_value);
6134 if (cond_first_p)
6135 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6136 else
6137 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6140 /* Check that we have simplified at least one of the branches. */
6141 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6142 return NULL_TREE;
6144 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6148 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6150 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6151 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6152 ADDEND is the same as X.
6154 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6155 and finite. The problematic cases are when X is zero, and its mode
6156 has signed zeros. In the case of rounding towards -infinity,
6157 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6158 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6160 bool
6161 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6163 if (!real_zerop (addend))
6164 return false;
6166 /* Don't allow the fold with -fsignaling-nans. */
6167 if (HONOR_SNANS (TYPE_MODE (type)))
6168 return false;
6170 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6171 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6172 return true;
6174 /* In a vector or complex, we would need to check the sign of all zeros. */
6175 if (TREE_CODE (addend) != REAL_CST)
6176 return false;
6178 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6179 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6180 negate = !negate;
6182 /* The mode has signed zeros, and we have to honor their sign.
6183 In this situation, there is only one case we can return true for.
6184 X - 0 is the same as X unless rounding towards -infinity is
6185 supported. */
6186 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6189 /* Subroutine of fold() that checks comparisons of built-in math
6190 functions against real constants.
6192 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6193 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6194 is the type of the result and ARG0 and ARG1 are the operands of the
6195 comparison. ARG1 must be a TREE_REAL_CST.
6197 The function returns the constant folded tree if a simplification
6198 can be made, and NULL_TREE otherwise. */
6200 static tree
6201 fold_mathfn_compare (location_t loc,
6202 enum built_in_function fcode, enum tree_code code,
6203 tree type, tree arg0, tree arg1)
6205 REAL_VALUE_TYPE c;
6207 if (BUILTIN_SQRT_P (fcode))
6209 tree arg = CALL_EXPR_ARG (arg0, 0);
6210 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6212 c = TREE_REAL_CST (arg1);
6213 if (REAL_VALUE_NEGATIVE (c))
6215 /* sqrt(x) < y is always false, if y is negative. */
6216 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6217 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6219 /* sqrt(x) > y is always true, if y is negative and we
6220 don't care about NaNs, i.e. negative values of x. */
6221 if (code == NE_EXPR || !HONOR_NANS (mode))
6222 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6224 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6225 return fold_build2_loc (loc, GE_EXPR, type, arg,
6226 build_real (TREE_TYPE (arg), dconst0));
6228 else if (code == GT_EXPR || code == GE_EXPR)
6230 REAL_VALUE_TYPE c2;
6232 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6233 real_convert (&c2, mode, &c2);
6235 if (REAL_VALUE_ISINF (c2))
6237 /* sqrt(x) > y is x == +Inf, when y is very large. */
6238 if (HONOR_INFINITIES (mode))
6239 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6240 build_real (TREE_TYPE (arg), c2));
6242 /* sqrt(x) > y is always false, when y is very large
6243 and we don't care about infinities. */
6244 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6247 /* sqrt(x) > c is the same as x > c*c. */
6248 return fold_build2_loc (loc, code, type, arg,
6249 build_real (TREE_TYPE (arg), c2));
6251 else if (code == LT_EXPR || code == LE_EXPR)
6253 REAL_VALUE_TYPE c2;
6255 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6256 real_convert (&c2, mode, &c2);
6258 if (REAL_VALUE_ISINF (c2))
6260 /* sqrt(x) < y is always true, when y is a very large
6261 value and we don't care about NaNs or Infinities. */
6262 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6263 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6265 /* sqrt(x) < y is x != +Inf when y is very large and we
6266 don't care about NaNs. */
6267 if (! HONOR_NANS (mode))
6268 return fold_build2_loc (loc, NE_EXPR, type, arg,
6269 build_real (TREE_TYPE (arg), c2));
6271 /* sqrt(x) < y is x >= 0 when y is very large and we
6272 don't care about Infinities. */
6273 if (! HONOR_INFINITIES (mode))
6274 return fold_build2_loc (loc, GE_EXPR, type, arg,
6275 build_real (TREE_TYPE (arg), dconst0));
6277 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6278 arg = save_expr (arg);
6279 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6280 fold_build2_loc (loc, GE_EXPR, type, arg,
6281 build_real (TREE_TYPE (arg),
6282 dconst0)),
6283 fold_build2_loc (loc, NE_EXPR, type, arg,
6284 build_real (TREE_TYPE (arg),
6285 c2)));
6288 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6289 if (! HONOR_NANS (mode))
6290 return fold_build2_loc (loc, code, type, arg,
6291 build_real (TREE_TYPE (arg), c2));
6293 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6294 arg = save_expr (arg);
6295 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6296 fold_build2_loc (loc, GE_EXPR, type, arg,
6297 build_real (TREE_TYPE (arg),
6298 dconst0)),
6299 fold_build2_loc (loc, code, type, arg,
6300 build_real (TREE_TYPE (arg),
6301 c2)));
6305 return NULL_TREE;
6308 /* Subroutine of fold() that optimizes comparisons against Infinities,
6309 either +Inf or -Inf.
6311 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6312 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6313 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6315 The function returns the constant folded tree if a simplification
6316 can be made, and NULL_TREE otherwise. */
6318 static tree
6319 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6320 tree arg0, tree arg1)
6322 machine_mode mode;
6323 REAL_VALUE_TYPE max;
6324 tree temp;
6325 bool neg;
6327 mode = TYPE_MODE (TREE_TYPE (arg0));
6329 /* For negative infinity swap the sense of the comparison. */
6330 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6331 if (neg)
6332 code = swap_tree_comparison (code);
6334 switch (code)
6336 case GT_EXPR:
6337 /* x > +Inf is always false, if with ignore sNANs. */
6338 if (HONOR_SNANS (mode))
6339 return NULL_TREE;
6340 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6342 case LE_EXPR:
6343 /* x <= +Inf is always true, if we don't case about NaNs. */
6344 if (! HONOR_NANS (mode))
6345 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6347 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6348 arg0 = save_expr (arg0);
6349 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6351 case EQ_EXPR:
6352 case GE_EXPR:
6353 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6354 real_maxval (&max, neg, mode);
6355 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6356 arg0, build_real (TREE_TYPE (arg0), max));
6358 case LT_EXPR:
6359 /* x < +Inf is always equal to x <= DBL_MAX. */
6360 real_maxval (&max, neg, mode);
6361 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6362 arg0, build_real (TREE_TYPE (arg0), max));
6364 case NE_EXPR:
6365 /* x != +Inf is always equal to !(x > DBL_MAX). */
6366 real_maxval (&max, neg, mode);
6367 if (! HONOR_NANS (mode))
6368 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6369 arg0, build_real (TREE_TYPE (arg0), max));
6371 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6372 arg0, build_real (TREE_TYPE (arg0), max));
6373 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6375 default:
6376 break;
6379 return NULL_TREE;
6382 /* Subroutine of fold() that optimizes comparisons of a division by
6383 a nonzero integer constant against an integer constant, i.e.
6384 X/C1 op C2.
6386 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6387 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6388 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6390 The function returns the constant folded tree if a simplification
6391 can be made, and NULL_TREE otherwise. */
6393 static tree
6394 fold_div_compare (location_t loc,
6395 enum tree_code code, tree type, tree arg0, tree arg1)
6397 tree prod, tmp, hi, lo;
6398 tree arg00 = TREE_OPERAND (arg0, 0);
6399 tree arg01 = TREE_OPERAND (arg0, 1);
6400 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6401 bool neg_overflow = false;
6402 bool overflow;
6404 /* We have to do this the hard way to detect unsigned overflow.
6405 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6406 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6407 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6408 neg_overflow = false;
6410 if (sign == UNSIGNED)
6412 tmp = int_const_binop (MINUS_EXPR, arg01,
6413 build_int_cst (TREE_TYPE (arg01), 1));
6414 lo = prod;
6416 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6417 val = wi::add (prod, tmp, sign, &overflow);
6418 hi = force_fit_type (TREE_TYPE (arg00), val,
6419 -1, overflow | TREE_OVERFLOW (prod));
6421 else if (tree_int_cst_sgn (arg01) >= 0)
6423 tmp = int_const_binop (MINUS_EXPR, arg01,
6424 build_int_cst (TREE_TYPE (arg01), 1));
6425 switch (tree_int_cst_sgn (arg1))
6427 case -1:
6428 neg_overflow = true;
6429 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6430 hi = prod;
6431 break;
6433 case 0:
6434 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6435 hi = tmp;
6436 break;
6438 case 1:
6439 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6440 lo = prod;
6441 break;
6443 default:
6444 gcc_unreachable ();
6447 else
6449 /* A negative divisor reverses the relational operators. */
6450 code = swap_tree_comparison (code);
6452 tmp = int_const_binop (PLUS_EXPR, arg01,
6453 build_int_cst (TREE_TYPE (arg01), 1));
6454 switch (tree_int_cst_sgn (arg1))
6456 case -1:
6457 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6458 lo = prod;
6459 break;
6461 case 0:
6462 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6463 lo = tmp;
6464 break;
6466 case 1:
6467 neg_overflow = true;
6468 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6469 hi = prod;
6470 break;
6472 default:
6473 gcc_unreachable ();
6477 switch (code)
6479 case EQ_EXPR:
6480 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6481 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6482 if (TREE_OVERFLOW (hi))
6483 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6484 if (TREE_OVERFLOW (lo))
6485 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6486 return build_range_check (loc, type, arg00, 1, lo, hi);
6488 case NE_EXPR:
6489 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6490 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6491 if (TREE_OVERFLOW (hi))
6492 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6493 if (TREE_OVERFLOW (lo))
6494 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6495 return build_range_check (loc, type, arg00, 0, lo, hi);
6497 case LT_EXPR:
6498 if (TREE_OVERFLOW (lo))
6500 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6501 return omit_one_operand_loc (loc, type, tmp, arg00);
6503 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6505 case LE_EXPR:
6506 if (TREE_OVERFLOW (hi))
6508 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6509 return omit_one_operand_loc (loc, type, tmp, arg00);
6511 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6513 case GT_EXPR:
6514 if (TREE_OVERFLOW (hi))
6516 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6517 return omit_one_operand_loc (loc, type, tmp, arg00);
6519 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6521 case GE_EXPR:
6522 if (TREE_OVERFLOW (lo))
6524 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6525 return omit_one_operand_loc (loc, type, tmp, arg00);
6527 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6529 default:
6530 break;
6533 return NULL_TREE;
6537 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6538 equality/inequality test, then return a simplified form of the test
6539 using a sign testing. Otherwise return NULL. TYPE is the desired
6540 result type. */
6542 static tree
6543 fold_single_bit_test_into_sign_test (location_t loc,
6544 enum tree_code code, tree arg0, tree arg1,
6545 tree result_type)
6547 /* If this is testing a single bit, we can optimize the test. */
6548 if ((code == NE_EXPR || code == EQ_EXPR)
6549 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6550 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6552 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6553 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6554 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6556 if (arg00 != NULL_TREE
6557 /* This is only a win if casting to a signed type is cheap,
6558 i.e. when arg00's type is not a partial mode. */
6559 && TYPE_PRECISION (TREE_TYPE (arg00))
6560 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6562 tree stype = signed_type_for (TREE_TYPE (arg00));
6563 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6564 result_type,
6565 fold_convert_loc (loc, stype, arg00),
6566 build_int_cst (stype, 0));
6570 return NULL_TREE;
6573 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6574 equality/inequality test, then return a simplified form of
6575 the test using shifts and logical operations. Otherwise return
6576 NULL. TYPE is the desired result type. */
6578 tree
6579 fold_single_bit_test (location_t loc, enum tree_code code,
6580 tree arg0, tree arg1, tree result_type)
6582 /* If this is testing a single bit, we can optimize the test. */
6583 if ((code == NE_EXPR || code == EQ_EXPR)
6584 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6585 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6587 tree inner = TREE_OPERAND (arg0, 0);
6588 tree type = TREE_TYPE (arg0);
6589 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6590 machine_mode operand_mode = TYPE_MODE (type);
6591 int ops_unsigned;
6592 tree signed_type, unsigned_type, intermediate_type;
6593 tree tem, one;
6595 /* First, see if we can fold the single bit test into a sign-bit
6596 test. */
6597 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6598 result_type);
6599 if (tem)
6600 return tem;
6602 /* Otherwise we have (A & C) != 0 where C is a single bit,
6603 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6604 Similarly for (A & C) == 0. */
6606 /* If INNER is a right shift of a constant and it plus BITNUM does
6607 not overflow, adjust BITNUM and INNER. */
6608 if (TREE_CODE (inner) == RSHIFT_EXPR
6609 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6610 && bitnum < TYPE_PRECISION (type)
6611 && wi::ltu_p (TREE_OPERAND (inner, 1),
6612 TYPE_PRECISION (type) - bitnum))
6614 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6615 inner = TREE_OPERAND (inner, 0);
6618 /* If we are going to be able to omit the AND below, we must do our
6619 operations as unsigned. If we must use the AND, we have a choice.
6620 Normally unsigned is faster, but for some machines signed is. */
6621 #ifdef LOAD_EXTEND_OP
6622 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6623 && !flag_syntax_only) ? 0 : 1;
6624 #else
6625 ops_unsigned = 1;
6626 #endif
6628 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6629 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6630 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6631 inner = fold_convert_loc (loc, intermediate_type, inner);
6633 if (bitnum != 0)
6634 inner = build2 (RSHIFT_EXPR, intermediate_type,
6635 inner, size_int (bitnum));
6637 one = build_int_cst (intermediate_type, 1);
6639 if (code == EQ_EXPR)
6640 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6642 /* Put the AND last so it can combine with more things. */
6643 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6645 /* Make sure to return the proper type. */
6646 inner = fold_convert_loc (loc, result_type, inner);
6648 return inner;
6650 return NULL_TREE;
6653 /* Check whether we are allowed to reorder operands arg0 and arg1,
6654 such that the evaluation of arg1 occurs before arg0. */
6656 static bool
6657 reorder_operands_p (const_tree arg0, const_tree arg1)
6659 if (! flag_evaluation_order)
6660 return true;
6661 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6662 return true;
6663 return ! TREE_SIDE_EFFECTS (arg0)
6664 && ! TREE_SIDE_EFFECTS (arg1);
6667 /* Test whether it is preferable two swap two operands, ARG0 and
6668 ARG1, for example because ARG0 is an integer constant and ARG1
6669 isn't. If REORDER is true, only recommend swapping if we can
6670 evaluate the operands in reverse order. */
6672 bool
6673 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6675 if (CONSTANT_CLASS_P (arg1))
6676 return 0;
6677 if (CONSTANT_CLASS_P (arg0))
6678 return 1;
6680 STRIP_SIGN_NOPS (arg0);
6681 STRIP_SIGN_NOPS (arg1);
6683 if (TREE_CONSTANT (arg1))
6684 return 0;
6685 if (TREE_CONSTANT (arg0))
6686 return 1;
6688 if (reorder && flag_evaluation_order
6689 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6690 return 0;
6692 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6693 for commutative and comparison operators. Ensuring a canonical
6694 form allows the optimizers to find additional redundancies without
6695 having to explicitly check for both orderings. */
6696 if (TREE_CODE (arg0) == SSA_NAME
6697 && TREE_CODE (arg1) == SSA_NAME
6698 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6699 return 1;
6701 /* Put SSA_NAMEs last. */
6702 if (TREE_CODE (arg1) == SSA_NAME)
6703 return 0;
6704 if (TREE_CODE (arg0) == SSA_NAME)
6705 return 1;
6707 /* Put variables last. */
6708 if (DECL_P (arg1))
6709 return 0;
6710 if (DECL_P (arg0))
6711 return 1;
6713 return 0;
6716 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6717 ARG0 is extended to a wider type. */
6719 static tree
6720 fold_widened_comparison (location_t loc, enum tree_code code,
6721 tree type, tree arg0, tree arg1)
6723 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6724 tree arg1_unw;
6725 tree shorter_type, outer_type;
6726 tree min, max;
6727 bool above, below;
6729 if (arg0_unw == arg0)
6730 return NULL_TREE;
6731 shorter_type = TREE_TYPE (arg0_unw);
6733 #ifdef HAVE_canonicalize_funcptr_for_compare
6734 /* Disable this optimization if we're casting a function pointer
6735 type on targets that require function pointer canonicalization. */
6736 if (HAVE_canonicalize_funcptr_for_compare
6737 && TREE_CODE (shorter_type) == POINTER_TYPE
6738 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6739 return NULL_TREE;
6740 #endif
6742 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6743 return NULL_TREE;
6745 arg1_unw = get_unwidened (arg1, NULL_TREE);
6747 /* If possible, express the comparison in the shorter mode. */
6748 if ((code == EQ_EXPR || code == NE_EXPR
6749 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6750 && (TREE_TYPE (arg1_unw) == shorter_type
6751 || ((TYPE_PRECISION (shorter_type)
6752 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6753 && (TYPE_UNSIGNED (shorter_type)
6754 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6755 || (TREE_CODE (arg1_unw) == INTEGER_CST
6756 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6757 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6758 && int_fits_type_p (arg1_unw, shorter_type))))
6759 return fold_build2_loc (loc, code, type, arg0_unw,
6760 fold_convert_loc (loc, shorter_type, arg1_unw));
6762 if (TREE_CODE (arg1_unw) != INTEGER_CST
6763 || TREE_CODE (shorter_type) != INTEGER_TYPE
6764 || !int_fits_type_p (arg1_unw, shorter_type))
6765 return NULL_TREE;
6767 /* If we are comparing with the integer that does not fit into the range
6768 of the shorter type, the result is known. */
6769 outer_type = TREE_TYPE (arg1_unw);
6770 min = lower_bound_in_type (outer_type, shorter_type);
6771 max = upper_bound_in_type (outer_type, shorter_type);
6773 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6774 max, arg1_unw));
6775 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6776 arg1_unw, min));
6778 switch (code)
6780 case EQ_EXPR:
6781 if (above || below)
6782 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6783 break;
6785 case NE_EXPR:
6786 if (above || below)
6787 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6788 break;
6790 case LT_EXPR:
6791 case LE_EXPR:
6792 if (above)
6793 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6794 else if (below)
6795 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6797 case GT_EXPR:
6798 case GE_EXPR:
6799 if (above)
6800 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6801 else if (below)
6802 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6804 default:
6805 break;
6808 return NULL_TREE;
6811 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6812 ARG0 just the signedness is changed. */
6814 static tree
6815 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6816 tree arg0, tree arg1)
6818 tree arg0_inner;
6819 tree inner_type, outer_type;
6821 if (!CONVERT_EXPR_P (arg0))
6822 return NULL_TREE;
6824 outer_type = TREE_TYPE (arg0);
6825 arg0_inner = TREE_OPERAND (arg0, 0);
6826 inner_type = TREE_TYPE (arg0_inner);
6828 #ifdef HAVE_canonicalize_funcptr_for_compare
6829 /* Disable this optimization if we're casting a function pointer
6830 type on targets that require function pointer canonicalization. */
6831 if (HAVE_canonicalize_funcptr_for_compare
6832 && TREE_CODE (inner_type) == POINTER_TYPE
6833 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6834 return NULL_TREE;
6835 #endif
6837 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6838 return NULL_TREE;
6840 if (TREE_CODE (arg1) != INTEGER_CST
6841 && !(CONVERT_EXPR_P (arg1)
6842 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6843 return NULL_TREE;
6845 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6846 && code != NE_EXPR
6847 && code != EQ_EXPR)
6848 return NULL_TREE;
6850 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6851 return NULL_TREE;
6853 if (TREE_CODE (arg1) == INTEGER_CST)
6854 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6855 TREE_OVERFLOW (arg1));
6856 else
6857 arg1 = fold_convert_loc (loc, inner_type, arg1);
6859 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6863 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6864 means A >= Y && A != MAX, but in this case we know that
6865 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6867 static tree
6868 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6870 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6872 if (TREE_CODE (bound) == LT_EXPR)
6873 a = TREE_OPERAND (bound, 0);
6874 else if (TREE_CODE (bound) == GT_EXPR)
6875 a = TREE_OPERAND (bound, 1);
6876 else
6877 return NULL_TREE;
6879 typea = TREE_TYPE (a);
6880 if (!INTEGRAL_TYPE_P (typea)
6881 && !POINTER_TYPE_P (typea))
6882 return NULL_TREE;
6884 if (TREE_CODE (ineq) == LT_EXPR)
6886 a1 = TREE_OPERAND (ineq, 1);
6887 y = TREE_OPERAND (ineq, 0);
6889 else if (TREE_CODE (ineq) == GT_EXPR)
6891 a1 = TREE_OPERAND (ineq, 0);
6892 y = TREE_OPERAND (ineq, 1);
6894 else
6895 return NULL_TREE;
6897 if (TREE_TYPE (a1) != typea)
6898 return NULL_TREE;
6900 if (POINTER_TYPE_P (typea))
6902 /* Convert the pointer types into integer before taking the difference. */
6903 tree ta = fold_convert_loc (loc, ssizetype, a);
6904 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6905 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6907 else
6908 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6910 if (!diff || !integer_onep (diff))
6911 return NULL_TREE;
6913 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6916 /* Fold a sum or difference of at least one multiplication.
6917 Returns the folded tree or NULL if no simplification could be made. */
6919 static tree
6920 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6921 tree arg0, tree arg1)
6923 tree arg00, arg01, arg10, arg11;
6924 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6926 /* (A * C) +- (B * C) -> (A+-B) * C.
6927 (A * C) +- A -> A * (C+-1).
6928 We are most concerned about the case where C is a constant,
6929 but other combinations show up during loop reduction. Since
6930 it is not difficult, try all four possibilities. */
6932 if (TREE_CODE (arg0) == MULT_EXPR)
6934 arg00 = TREE_OPERAND (arg0, 0);
6935 arg01 = TREE_OPERAND (arg0, 1);
6937 else if (TREE_CODE (arg0) == INTEGER_CST)
6939 arg00 = build_one_cst (type);
6940 arg01 = arg0;
6942 else
6944 /* We cannot generate constant 1 for fract. */
6945 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6946 return NULL_TREE;
6947 arg00 = arg0;
6948 arg01 = build_one_cst (type);
6950 if (TREE_CODE (arg1) == MULT_EXPR)
6952 arg10 = TREE_OPERAND (arg1, 0);
6953 arg11 = TREE_OPERAND (arg1, 1);
6955 else if (TREE_CODE (arg1) == INTEGER_CST)
6957 arg10 = build_one_cst (type);
6958 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6959 the purpose of this canonicalization. */
6960 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6961 && negate_expr_p (arg1)
6962 && code == PLUS_EXPR)
6964 arg11 = negate_expr (arg1);
6965 code = MINUS_EXPR;
6967 else
6968 arg11 = arg1;
6970 else
6972 /* We cannot generate constant 1 for fract. */
6973 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6974 return NULL_TREE;
6975 arg10 = arg1;
6976 arg11 = build_one_cst (type);
6978 same = NULL_TREE;
6980 if (operand_equal_p (arg01, arg11, 0))
6981 same = arg01, alt0 = arg00, alt1 = arg10;
6982 else if (operand_equal_p (arg00, arg10, 0))
6983 same = arg00, alt0 = arg01, alt1 = arg11;
6984 else if (operand_equal_p (arg00, arg11, 0))
6985 same = arg00, alt0 = arg01, alt1 = arg10;
6986 else if (operand_equal_p (arg01, arg10, 0))
6987 same = arg01, alt0 = arg00, alt1 = arg11;
6989 /* No identical multiplicands; see if we can find a common
6990 power-of-two factor in non-power-of-two multiplies. This
6991 can help in multi-dimensional array access. */
6992 else if (tree_fits_shwi_p (arg01)
6993 && tree_fits_shwi_p (arg11))
6995 HOST_WIDE_INT int01, int11, tmp;
6996 bool swap = false;
6997 tree maybe_same;
6998 int01 = tree_to_shwi (arg01);
6999 int11 = tree_to_shwi (arg11);
7001 /* Move min of absolute values to int11. */
7002 if (absu_hwi (int01) < absu_hwi (int11))
7004 tmp = int01, int01 = int11, int11 = tmp;
7005 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7006 maybe_same = arg01;
7007 swap = true;
7009 else
7010 maybe_same = arg11;
7012 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7013 /* The remainder should not be a constant, otherwise we
7014 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7015 increased the number of multiplications necessary. */
7016 && TREE_CODE (arg10) != INTEGER_CST)
7018 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7019 build_int_cst (TREE_TYPE (arg00),
7020 int01 / int11));
7021 alt1 = arg10;
7022 same = maybe_same;
7023 if (swap)
7024 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7028 if (same)
7029 return fold_build2_loc (loc, MULT_EXPR, type,
7030 fold_build2_loc (loc, code, type,
7031 fold_convert_loc (loc, type, alt0),
7032 fold_convert_loc (loc, type, alt1)),
7033 fold_convert_loc (loc, type, same));
7035 return NULL_TREE;
7038 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7039 specified by EXPR into the buffer PTR of length LEN bytes.
7040 If REVERSE is true, the INTEGER_CST is interpreted in reverse
7041 storage order wrt the target order.
7042 Return the number of bytes placed in the buffer, or zero
7043 upon failure. */
7045 static int
7046 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off,
7047 bool reverse)
7049 tree type = TREE_TYPE (expr);
7050 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7051 int byte, offset, word, words;
7052 unsigned char value;
7054 if ((off == -1 && total_bytes > len)
7055 || off >= total_bytes)
7056 return 0;
7057 if (off == -1)
7058 off = 0;
7059 words = total_bytes / UNITS_PER_WORD;
7061 for (byte = 0; byte < total_bytes; byte++)
7063 int bitpos = byte * BITS_PER_UNIT;
7064 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7065 number of bytes. */
7066 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7068 if (total_bytes > UNITS_PER_WORD)
7070 word = byte / UNITS_PER_WORD;
7071 if (reverse ? !WORDS_BIG_ENDIAN : WORDS_BIG_ENDIAN)
7072 word = (words - 1) - word;
7073 offset = word * UNITS_PER_WORD;
7074 if (reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
7075 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7076 else
7077 offset += byte % UNITS_PER_WORD;
7079 else
7081 if (reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
7082 offset = (total_bytes - 1) - byte;
7083 else
7084 offset = byte;
7086 if (offset >= off
7087 && offset - off < len)
7088 ptr[offset - off] = value;
7090 return MIN (len, total_bytes - off);
7094 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7095 specified by EXPR into the buffer PTR of length LEN bytes.
7096 If REVERSE is true, the INTEGER_CST is interpreted in reverse
7097 storage order wrt the target order.
7098 Return the number of bytes placed in the buffer, or zero
7099 upon failure. */
7101 static int
7102 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off,
7103 bool reverse)
7105 tree type = TREE_TYPE (expr);
7106 machine_mode mode = TYPE_MODE (type);
7107 int total_bytes = GET_MODE_SIZE (mode);
7108 FIXED_VALUE_TYPE value;
7109 tree i_value, i_type;
7111 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7112 return 0;
7114 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7116 if (NULL_TREE == i_type
7117 || TYPE_PRECISION (i_type) != total_bytes)
7118 return 0;
7120 value = TREE_FIXED_CST (expr);
7121 i_value = double_int_to_tree (i_type, value.data);
7123 return native_encode_int (i_value, ptr, len, off, reverse);
7127 /* Subroutine of native_encode_expr. Encode the REAL_CST
7128 specified by EXPR into the buffer PTR of length LEN bytes.
7129 If REVERSE is true, the INTEGER_CST is interpreted in reverse
7130 storage order wrt the target order.
7131 Return the number of bytes placed in the buffer, or zero
7132 upon failure. */
7134 static int
7135 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off,
7136 bool reverse)
7138 tree type = TREE_TYPE (expr);
7139 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7140 int byte, offset, word, words, bitpos;
7141 unsigned char value;
7143 /* There are always 32 bits in each long, no matter the size of
7144 the hosts long. We handle floating point representations with
7145 up to 192 bits. */
7146 long tmp[6];
7148 if ((off == -1 && total_bytes > len)
7149 || off >= total_bytes)
7150 return 0;
7151 if (off == -1)
7152 off = 0;
7153 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7155 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7157 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7158 bitpos += BITS_PER_UNIT)
7160 byte = (bitpos / BITS_PER_UNIT) & 3;
7161 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7163 if (UNITS_PER_WORD < 4)
7165 word = byte / UNITS_PER_WORD;
7166 if (reverse ? !WORDS_BIG_ENDIAN : WORDS_BIG_ENDIAN)
7167 word = (words - 1) - word;
7168 offset = word * UNITS_PER_WORD;
7169 if (reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
7170 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7171 else
7172 offset += byte % UNITS_PER_WORD;
7174 else
7176 if (reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
7177 offset = 3 - byte;
7178 else
7179 offset = byte;
7181 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7182 if (offset >= off
7183 && offset - off < len)
7184 ptr[offset - off] = value;
7186 return MIN (len, total_bytes - off);
7189 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7190 specified by EXPR into the buffer PTR of length LEN bytes.
7191 If REVERSE is true, the INTEGER_CST is interpreted in reverse
7192 storage order wrt the target order.
7193 Return the number of bytes placed in the buffer, or zero
7194 upon failure. */
7196 static int
7197 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off,
7198 bool reverse)
7200 int rsize, isize;
7201 tree part;
7203 part = TREE_REALPART (expr);
7204 rsize = native_encode_expr (part, ptr, len, off, reverse);
7205 if (off == -1
7206 && rsize == 0)
7207 return 0;
7208 part = TREE_IMAGPART (expr);
7209 if (off != -1)
7210 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7211 isize = native_encode_expr (part, ptr+rsize, len-rsize, off, reverse);
7212 if (off == -1
7213 && isize != rsize)
7214 return 0;
7215 return rsize + isize;
7219 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7220 specified by EXPR into the buffer PTR of length LEN bytes.
7221 If REVERSE is true, the INTEGER_CST is interpreted in reverse
7222 storage order wrt the target order.
7223 Return the number of bytes placed in the buffer, or zero
7224 upon failure. */
7226 static int
7227 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off,
7228 bool reverse)
7230 unsigned i, count;
7231 int size, offset;
7232 tree itype, elem;
7234 offset = 0;
7235 count = VECTOR_CST_NELTS (expr);
7236 itype = TREE_TYPE (TREE_TYPE (expr));
7237 size = GET_MODE_SIZE (TYPE_MODE (itype));
7238 for (i = 0; i < count; i++)
7240 if (off >= size)
7242 off -= size;
7243 continue;
7245 elem = VECTOR_CST_ELT (expr, i);
7246 int res = native_encode_expr (elem, ptr+offset, len-offset, off, reverse);
7247 if ((off == -1 && res != size)
7248 || res == 0)
7249 return 0;
7250 offset += res;
7251 if (offset >= len)
7252 return offset;
7253 if (off != -1)
7254 off = 0;
7256 return offset;
7260 /* Subroutine of native_encode_expr. Encode the STRING_CST
7261 specified by EXPR into the buffer PTR of length LEN bytes.
7262 Return the number of bytes placed in the buffer, or zero
7263 upon failure. */
7265 static int
7266 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7268 tree type = TREE_TYPE (expr);
7269 HOST_WIDE_INT total_bytes;
7271 if (TREE_CODE (type) != ARRAY_TYPE
7272 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7273 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7274 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7275 return 0;
7276 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7277 if ((off == -1 && total_bytes > len)
7278 || off >= total_bytes)
7279 return 0;
7280 if (off == -1)
7281 off = 0;
7282 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7284 int written = 0;
7285 if (off < TREE_STRING_LENGTH (expr))
7287 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7288 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7290 memset (ptr + written, 0,
7291 MIN (total_bytes - written, len - written));
7293 else
7294 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7295 return MIN (total_bytes - off, len);
7299 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7300 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7301 buffer PTR of length LEN bytes. If OFF is not -1 then start
7302 the encoding at byte offset OFF and encode at most LEN bytes.
7303 If REVERSE is true, the _CST object is interpreted in reverse
7304 storage order wrt the target order.
7305 Return the number of bytes placed in the buffer, or zero upon failure. */
7308 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off,
7309 bool reverse)
7311 switch (TREE_CODE (expr))
7313 case INTEGER_CST:
7314 return native_encode_int (expr, ptr, len, off, reverse);
7316 case REAL_CST:
7317 return native_encode_real (expr, ptr, len, off, reverse);
7319 case FIXED_CST:
7320 return native_encode_fixed (expr, ptr, len, off, reverse);
7322 case COMPLEX_CST:
7323 return native_encode_complex (expr, ptr, len, off, reverse);
7325 case VECTOR_CST:
7326 return native_encode_vector (expr, ptr, len, off, reverse);
7328 case STRING_CST:
7329 return native_encode_string (expr, ptr, len, off);
7331 default:
7332 return 0;
7337 /* Subroutine of native_interpret_expr. Interpret the contents of
7338 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7339 If the buffer cannot be interpreted, return NULL_TREE. */
7341 static tree
7342 native_interpret_int (tree type, const unsigned char *ptr, int len)
7344 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7346 if (total_bytes > len
7347 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7348 return NULL_TREE;
7350 wide_int result = wi::from_buffer (ptr, total_bytes);
7352 return wide_int_to_tree (type, result);
7356 /* Subroutine of native_interpret_expr. Interpret the contents of
7357 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7358 If the buffer cannot be interpreted, return NULL_TREE. */
7360 static tree
7361 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7363 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7364 double_int result;
7365 FIXED_VALUE_TYPE fixed_value;
7367 if (total_bytes > len
7368 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7369 return NULL_TREE;
7371 result = double_int::from_buffer (ptr, total_bytes);
7372 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7374 return build_fixed (type, fixed_value);
7378 /* Subroutine of native_interpret_expr. Interpret the contents of
7379 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7380 If the buffer cannot be interpreted, return NULL_TREE. */
7382 static tree
7383 native_interpret_real (tree type, const unsigned char *ptr, int len)
7385 machine_mode mode = TYPE_MODE (type);
7386 int total_bytes = GET_MODE_SIZE (mode);
7387 int byte, offset, word, words, bitpos;
7388 unsigned char value;
7389 /* There are always 32 bits in each long, no matter the size of
7390 the hosts long. We handle floating point representations with
7391 up to 192 bits. */
7392 REAL_VALUE_TYPE r;
7393 long tmp[6];
7395 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7396 if (total_bytes > len || total_bytes > 24)
7397 return NULL_TREE;
7398 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7400 memset (tmp, 0, sizeof (tmp));
7401 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7402 bitpos += BITS_PER_UNIT)
7404 byte = (bitpos / BITS_PER_UNIT) & 3;
7405 if (UNITS_PER_WORD < 4)
7407 word = byte / UNITS_PER_WORD;
7408 if (WORDS_BIG_ENDIAN)
7409 word = (words - 1) - word;
7410 offset = word * UNITS_PER_WORD;
7411 if (BYTES_BIG_ENDIAN)
7412 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7413 else
7414 offset += byte % UNITS_PER_WORD;
7416 else
7417 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7418 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7420 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7423 real_from_target (&r, tmp, mode);
7424 return build_real (type, r);
7428 /* Subroutine of native_interpret_expr. Interpret the contents of
7429 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7430 If the buffer cannot be interpreted, return NULL_TREE. */
7432 static tree
7433 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7435 tree etype, rpart, ipart;
7436 int size;
7438 etype = TREE_TYPE (type);
7439 size = GET_MODE_SIZE (TYPE_MODE (etype));
7440 if (size * 2 > len)
7441 return NULL_TREE;
7442 rpart = native_interpret_expr (etype, ptr, size);
7443 if (!rpart)
7444 return NULL_TREE;
7445 ipart = native_interpret_expr (etype, ptr+size, size);
7446 if (!ipart)
7447 return NULL_TREE;
7448 return build_complex (type, rpart, ipart);
7452 /* Subroutine of native_interpret_expr. Interpret the contents of
7453 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7454 If the buffer cannot be interpreted, return NULL_TREE. */
7456 static tree
7457 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7459 tree etype, elem;
7460 int i, size, count;
7461 tree *elements;
7463 etype = TREE_TYPE (type);
7464 size = GET_MODE_SIZE (TYPE_MODE (etype));
7465 count = TYPE_VECTOR_SUBPARTS (type);
7466 if (size * count > len)
7467 return NULL_TREE;
7469 elements = XALLOCAVEC (tree, count);
7470 for (i = count - 1; i >= 0; i--)
7472 elem = native_interpret_expr (etype, ptr+(i*size), size);
7473 if (!elem)
7474 return NULL_TREE;
7475 elements[i] = elem;
7477 return build_vector (type, elements);
7481 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7482 the buffer PTR of length LEN as a constant of type TYPE. For
7483 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7484 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7485 return NULL_TREE. */
7487 tree
7488 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7490 switch (TREE_CODE (type))
7492 case INTEGER_TYPE:
7493 case ENUMERAL_TYPE:
7494 case BOOLEAN_TYPE:
7495 case POINTER_TYPE:
7496 case REFERENCE_TYPE:
7497 return native_interpret_int (type, ptr, len);
7499 case REAL_TYPE:
7500 return native_interpret_real (type, ptr, len);
7502 case FIXED_POINT_TYPE:
7503 return native_interpret_fixed (type, ptr, len);
7505 case COMPLEX_TYPE:
7506 return native_interpret_complex (type, ptr, len);
7508 case VECTOR_TYPE:
7509 return native_interpret_vector (type, ptr, len);
7511 default:
7512 return NULL_TREE;
7516 /* Returns true if we can interpret the contents of a native encoding
7517 as TYPE. */
7519 static bool
7520 can_native_interpret_type_p (tree type)
7522 switch (TREE_CODE (type))
7524 case INTEGER_TYPE:
7525 case ENUMERAL_TYPE:
7526 case BOOLEAN_TYPE:
7527 case POINTER_TYPE:
7528 case REFERENCE_TYPE:
7529 case FIXED_POINT_TYPE:
7530 case REAL_TYPE:
7531 case COMPLEX_TYPE:
7532 case VECTOR_TYPE:
7533 return true;
7534 default:
7535 return false;
7539 /* Fold the flipping of storage order for constant expression EXPR
7540 at compile-time. If we're unable to perform the flipping, return
7541 NULL_TREE. */
7543 tree
7544 fold_flip_storage_order (tree expr)
7546 /* We support up to 512-bit values (for V8DFmode). */
7547 unsigned char buffer[64];
7548 int len;
7550 /* Check that the host and target are sane. */
7551 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7552 return NULL_TREE;
7554 len = native_encode_expr (expr, buffer, sizeof (buffer), -1, true);
7555 if (len == 0)
7556 return NULL_TREE;
7558 return native_interpret_expr (TREE_TYPE (expr), buffer, len);
7562 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7563 TYPE at compile-time. If we're unable to perform the conversion
7564 return NULL_TREE. */
7566 static tree
7567 fold_view_convert_expr (tree type, tree expr)
7569 /* We support up to 512-bit values (for V8DFmode). */
7570 unsigned char buffer[64];
7571 int len;
7573 /* Check that the host and target are sane. */
7574 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7575 return NULL_TREE;
7577 len = native_encode_expr (expr, buffer, sizeof (buffer));
7578 if (len == 0)
7579 return NULL_TREE;
7581 return native_interpret_expr (type, buffer, len);
7584 /* Build an expression for the address of T. Folds away INDIRECT_REF
7585 to avoid confusing the gimplify process. */
7587 tree
7588 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7590 /* The size of the object is not relevant when talking about its address. */
7591 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7592 t = TREE_OPERAND (t, 0);
7594 if (TREE_CODE (t) == INDIRECT_REF)
7596 t = TREE_OPERAND (t, 0);
7598 if (TREE_TYPE (t) != ptrtype)
7599 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7601 else if (TREE_CODE (t) == MEM_REF
7602 && integer_zerop (TREE_OPERAND (t, 1)))
7603 return TREE_OPERAND (t, 0);
7604 else if (TREE_CODE (t) == MEM_REF
7605 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7606 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7607 TREE_OPERAND (t, 0),
7608 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7609 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7611 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7613 if (TREE_TYPE (t) != ptrtype)
7614 t = fold_convert_loc (loc, ptrtype, t);
7616 else
7617 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7619 return t;
7622 /* Build an expression for the address of T. */
7624 tree
7625 build_fold_addr_expr_loc (location_t loc, tree t)
7627 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7629 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7632 static bool vec_cst_ctor_to_array (tree, tree *);
7634 /* Fold a unary expression of code CODE and type TYPE with operand
7635 OP0. Return the folded expression if folding is successful.
7636 Otherwise, return NULL_TREE. */
7638 tree
7639 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7641 tree tem;
7642 tree arg0;
7643 enum tree_code_class kind = TREE_CODE_CLASS (code);
7645 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7646 && TREE_CODE_LENGTH (code) == 1);
7648 tem = generic_simplify (loc, code, type, op0);
7649 if (tem)
7650 return tem;
7652 arg0 = op0;
7653 if (arg0)
7655 if (CONVERT_EXPR_CODE_P (code)
7656 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7658 /* Don't use STRIP_NOPS, because signedness of argument type
7659 matters. */
7660 STRIP_SIGN_NOPS (arg0);
7662 else
7664 /* Strip any conversions that don't change the mode. This
7665 is safe for every expression, except for a comparison
7666 expression because its signedness is derived from its
7667 operands.
7669 Note that this is done as an internal manipulation within
7670 the constant folder, in order to find the simplest
7671 representation of the arguments so that their form can be
7672 studied. In any cases, the appropriate type conversions
7673 should be put back in the tree that will get out of the
7674 constant folder. */
7675 STRIP_NOPS (arg0);
7679 if (TREE_CODE_CLASS (code) == tcc_unary)
7681 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7682 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7683 fold_build1_loc (loc, code, type,
7684 fold_convert_loc (loc, TREE_TYPE (op0),
7685 TREE_OPERAND (arg0, 1))));
7686 else if (TREE_CODE (arg0) == COND_EXPR)
7688 tree arg01 = TREE_OPERAND (arg0, 1);
7689 tree arg02 = TREE_OPERAND (arg0, 2);
7690 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7691 arg01 = fold_build1_loc (loc, code, type,
7692 fold_convert_loc (loc,
7693 TREE_TYPE (op0), arg01));
7694 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7695 arg02 = fold_build1_loc (loc, code, type,
7696 fold_convert_loc (loc,
7697 TREE_TYPE (op0), arg02));
7698 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7699 arg01, arg02);
7701 /* If this was a conversion, and all we did was to move into
7702 inside the COND_EXPR, bring it back out. But leave it if
7703 it is a conversion from integer to integer and the
7704 result precision is no wider than a word since such a
7705 conversion is cheap and may be optimized away by combine,
7706 while it couldn't if it were outside the COND_EXPR. Then return
7707 so we don't get into an infinite recursion loop taking the
7708 conversion out and then back in. */
7710 if ((CONVERT_EXPR_CODE_P (code)
7711 || code == NON_LVALUE_EXPR)
7712 && TREE_CODE (tem) == COND_EXPR
7713 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7714 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7715 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7716 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7717 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7718 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7719 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7720 && (INTEGRAL_TYPE_P
7721 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7722 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7723 || flag_syntax_only))
7724 tem = build1_loc (loc, code, type,
7725 build3 (COND_EXPR,
7726 TREE_TYPE (TREE_OPERAND
7727 (TREE_OPERAND (tem, 1), 0)),
7728 TREE_OPERAND (tem, 0),
7729 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7730 TREE_OPERAND (TREE_OPERAND (tem, 2),
7731 0)));
7732 return tem;
7736 switch (code)
7738 case NON_LVALUE_EXPR:
7739 if (!maybe_lvalue_p (op0))
7740 return fold_convert_loc (loc, type, op0);
7741 return NULL_TREE;
7743 CASE_CONVERT:
7744 case FLOAT_EXPR:
7745 case FIX_TRUNC_EXPR:
7746 if (COMPARISON_CLASS_P (op0))
7748 /* If we have (type) (a CMP b) and type is an integral type, return
7749 new expression involving the new type. Canonicalize
7750 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7751 non-integral type.
7752 Do not fold the result as that would not simplify further, also
7753 folding again results in recursions. */
7754 if (TREE_CODE (type) == BOOLEAN_TYPE)
7755 return build2_loc (loc, TREE_CODE (op0), type,
7756 TREE_OPERAND (op0, 0),
7757 TREE_OPERAND (op0, 1));
7758 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7759 && TREE_CODE (type) != VECTOR_TYPE)
7760 return build3_loc (loc, COND_EXPR, type, op0,
7761 constant_boolean_node (true, type),
7762 constant_boolean_node (false, type));
7765 /* Handle cases of two conversions in a row. */
7766 if (CONVERT_EXPR_P (op0))
7768 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7769 tree inter_type = TREE_TYPE (op0);
7770 int inside_int = INTEGRAL_TYPE_P (inside_type);
7771 int inside_ptr = POINTER_TYPE_P (inside_type);
7772 int inside_float = FLOAT_TYPE_P (inside_type);
7773 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7774 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7775 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7776 int inter_int = INTEGRAL_TYPE_P (inter_type);
7777 int inter_ptr = POINTER_TYPE_P (inter_type);
7778 int inter_float = FLOAT_TYPE_P (inter_type);
7779 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7780 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7781 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7782 int final_int = INTEGRAL_TYPE_P (type);
7783 int final_ptr = POINTER_TYPE_P (type);
7784 int final_float = FLOAT_TYPE_P (type);
7785 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7786 unsigned int final_prec = TYPE_PRECISION (type);
7787 int final_unsignedp = TYPE_UNSIGNED (type);
7789 /* In addition to the cases of two conversions in a row
7790 handled below, if we are converting something to its own
7791 type via an object of identical or wider precision, neither
7792 conversion is needed. */
7793 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7794 && (((inter_int || inter_ptr) && final_int)
7795 || (inter_float && final_float))
7796 && inter_prec >= final_prec)
7797 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7799 /* Likewise, if the intermediate and initial types are either both
7800 float or both integer, we don't need the middle conversion if the
7801 former is wider than the latter and doesn't change the signedness
7802 (for integers). Avoid this if the final type is a pointer since
7803 then we sometimes need the middle conversion. Likewise if the
7804 final type has a precision not equal to the size of its mode. */
7805 if (((inter_int && inside_int)
7806 || (inter_float && inside_float)
7807 || (inter_vec && inside_vec))
7808 && inter_prec >= inside_prec
7809 && (inter_float || inter_vec
7810 || inter_unsignedp == inside_unsignedp)
7811 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7812 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7813 && ! final_ptr
7814 && (! final_vec || inter_prec == inside_prec))
7815 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7817 /* If we have a sign-extension of a zero-extended value, we can
7818 replace that by a single zero-extension. Likewise if the
7819 final conversion does not change precision we can drop the
7820 intermediate conversion. */
7821 if (inside_int && inter_int && final_int
7822 && ((inside_prec < inter_prec && inter_prec < final_prec
7823 && inside_unsignedp && !inter_unsignedp)
7824 || final_prec == inter_prec))
7825 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7827 /* Two conversions in a row are not needed unless:
7828 - some conversion is floating-point (overstrict for now), or
7829 - some conversion is a vector (overstrict for now), or
7830 - the intermediate type is narrower than both initial and
7831 final, or
7832 - the intermediate type and innermost type differ in signedness,
7833 and the outermost type is wider than the intermediate, or
7834 - the initial type is a pointer type and the precisions of the
7835 intermediate and final types differ, or
7836 - the final type is a pointer type and the precisions of the
7837 initial and intermediate types differ. */
7838 if (! inside_float && ! inter_float && ! final_float
7839 && ! inside_vec && ! inter_vec && ! final_vec
7840 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7841 && ! (inside_int && inter_int
7842 && inter_unsignedp != inside_unsignedp
7843 && inter_prec < final_prec)
7844 && ((inter_unsignedp && inter_prec > inside_prec)
7845 == (final_unsignedp && final_prec > inter_prec))
7846 && ! (inside_ptr && inter_prec != final_prec)
7847 && ! (final_ptr && inside_prec != inter_prec)
7848 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7849 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7850 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7853 /* Handle (T *)&A.B.C for A being of type T and B and C
7854 living at offset zero. This occurs frequently in
7855 C++ upcasting and then accessing the base. */
7856 if (TREE_CODE (op0) == ADDR_EXPR
7857 && POINTER_TYPE_P (type)
7858 && handled_component_p (TREE_OPERAND (op0, 0)))
7860 HOST_WIDE_INT bitsize, bitpos;
7861 tree offset;
7862 machine_mode mode;
7863 int unsignedp, reversep, volatilep;
7864 tree base
7865 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7866 &offset, &mode, &unsignedp, &reversep,
7867 &volatilep, false);
7868 /* If the reference was to a (constant) zero offset, we can use
7869 the address of the base if it has the same base type
7870 as the result type and the pointer type is unqualified. */
7871 if (! offset && bitpos == 0
7872 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7873 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7874 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7875 return fold_convert_loc (loc, type,
7876 build_fold_addr_expr_loc (loc, base));
7879 if (TREE_CODE (op0) == MODIFY_EXPR
7880 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7881 /* Detect assigning a bitfield. */
7882 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7883 && DECL_BIT_FIELD
7884 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7886 /* Don't leave an assignment inside a conversion
7887 unless assigning a bitfield. */
7888 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7889 /* First do the assignment, then return converted constant. */
7890 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7891 TREE_NO_WARNING (tem) = 1;
7892 TREE_USED (tem) = 1;
7893 return tem;
7896 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7897 constants (if x has signed type, the sign bit cannot be set
7898 in c). This folds extension into the BIT_AND_EXPR.
7899 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7900 very likely don't have maximal range for their precision and this
7901 transformation effectively doesn't preserve non-maximal ranges. */
7902 if (TREE_CODE (type) == INTEGER_TYPE
7903 && TREE_CODE (op0) == BIT_AND_EXPR
7904 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7906 tree and_expr = op0;
7907 tree and0 = TREE_OPERAND (and_expr, 0);
7908 tree and1 = TREE_OPERAND (and_expr, 1);
7909 int change = 0;
7911 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7912 || (TYPE_PRECISION (type)
7913 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7914 change = 1;
7915 else if (TYPE_PRECISION (TREE_TYPE (and1))
7916 <= HOST_BITS_PER_WIDE_INT
7917 && tree_fits_uhwi_p (and1))
7919 unsigned HOST_WIDE_INT cst;
7921 cst = tree_to_uhwi (and1);
7922 cst &= HOST_WIDE_INT_M1U
7923 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7924 change = (cst == 0);
7925 #ifdef LOAD_EXTEND_OP
7926 if (change
7927 && !flag_syntax_only
7928 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7929 == ZERO_EXTEND))
7931 tree uns = unsigned_type_for (TREE_TYPE (and0));
7932 and0 = fold_convert_loc (loc, uns, and0);
7933 and1 = fold_convert_loc (loc, uns, and1);
7935 #endif
7937 if (change)
7939 tem = force_fit_type (type, wi::to_widest (and1), 0,
7940 TREE_OVERFLOW (and1));
7941 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7942 fold_convert_loc (loc, type, and0), tem);
7946 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7947 when one of the new casts will fold away. Conservatively we assume
7948 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7949 if (POINTER_TYPE_P (type)
7950 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7951 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7952 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7953 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7954 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7956 tree arg00 = TREE_OPERAND (arg0, 0);
7957 tree arg01 = TREE_OPERAND (arg0, 1);
7959 return fold_build_pointer_plus_loc
7960 (loc, fold_convert_loc (loc, type, arg00), arg01);
7963 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7964 of the same precision, and X is an integer type not narrower than
7965 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7966 if (INTEGRAL_TYPE_P (type)
7967 && TREE_CODE (op0) == BIT_NOT_EXPR
7968 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7969 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7970 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7972 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7973 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7974 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7975 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7976 fold_convert_loc (loc, type, tem));
7979 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7980 type of X and Y (integer types only). */
7981 if (INTEGRAL_TYPE_P (type)
7982 && TREE_CODE (op0) == MULT_EXPR
7983 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7984 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7986 /* Be careful not to introduce new overflows. */
7987 tree mult_type;
7988 if (TYPE_OVERFLOW_WRAPS (type))
7989 mult_type = type;
7990 else
7991 mult_type = unsigned_type_for (type);
7993 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7995 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7996 fold_convert_loc (loc, mult_type,
7997 TREE_OPERAND (op0, 0)),
7998 fold_convert_loc (loc, mult_type,
7999 TREE_OPERAND (op0, 1)));
8000 return fold_convert_loc (loc, type, tem);
8004 tem = fold_convert_const (code, type, arg0);
8005 return tem ? tem : NULL_TREE;
8007 case ADDR_SPACE_CONVERT_EXPR:
8008 if (integer_zerop (arg0))
8009 return fold_convert_const (code, type, arg0);
8010 return NULL_TREE;
8012 case FIXED_CONVERT_EXPR:
8013 tem = fold_convert_const (code, type, arg0);
8014 return tem ? tem : NULL_TREE;
8016 case VIEW_CONVERT_EXPR:
8017 if (TREE_CODE (op0) == MEM_REF)
8019 tem = fold_build2_loc (loc, MEM_REF, type,
8020 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8021 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
8022 return tem;
8025 return fold_view_convert_expr (type, op0);
8027 case NEGATE_EXPR:
8028 tem = fold_negate_expr (loc, arg0);
8029 if (tem)
8030 return fold_convert_loc (loc, type, tem);
8031 return NULL_TREE;
8033 case ABS_EXPR:
8034 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8035 return fold_abs_const (arg0, type);
8036 /* Convert fabs((double)float) into (double)fabsf(float). */
8037 else if (TREE_CODE (arg0) == NOP_EXPR
8038 && TREE_CODE (type) == REAL_TYPE)
8040 tree targ0 = strip_float_extensions (arg0);
8041 if (targ0 != arg0)
8042 return fold_convert_loc (loc, type,
8043 fold_build1_loc (loc, ABS_EXPR,
8044 TREE_TYPE (targ0),
8045 targ0));
8047 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8048 else if (TREE_CODE (arg0) == ABS_EXPR)
8049 return arg0;
8051 /* Strip sign ops from argument. */
8052 if (TREE_CODE (type) == REAL_TYPE)
8054 tem = fold_strip_sign_ops (arg0);
8055 if (tem)
8056 return fold_build1_loc (loc, ABS_EXPR, type,
8057 fold_convert_loc (loc, type, tem));
8059 return NULL_TREE;
8061 case CONJ_EXPR:
8062 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8063 return fold_convert_loc (loc, type, arg0);
8064 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8066 tree itype = TREE_TYPE (type);
8067 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8068 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8069 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8070 negate_expr (ipart));
8072 if (TREE_CODE (arg0) == COMPLEX_CST)
8074 tree itype = TREE_TYPE (type);
8075 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8076 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8077 return build_complex (type, rpart, negate_expr (ipart));
8079 if (TREE_CODE (arg0) == CONJ_EXPR)
8080 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8081 return NULL_TREE;
8083 case BIT_NOT_EXPR:
8084 if (TREE_CODE (arg0) == INTEGER_CST)
8085 return fold_not_const (arg0, type);
8086 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8087 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8088 /* Convert ~ (-A) to A - 1. */
8089 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8090 return fold_build2_loc (loc, MINUS_EXPR, type,
8091 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8092 build_int_cst (type, 1));
8093 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8094 else if (INTEGRAL_TYPE_P (type)
8095 && ((TREE_CODE (arg0) == MINUS_EXPR
8096 && integer_onep (TREE_OPERAND (arg0, 1)))
8097 || (TREE_CODE (arg0) == PLUS_EXPR
8098 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8099 return fold_build1_loc (loc, NEGATE_EXPR, type,
8100 fold_convert_loc (loc, type,
8101 TREE_OPERAND (arg0, 0)));
8102 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8103 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8104 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8105 fold_convert_loc (loc, type,
8106 TREE_OPERAND (arg0, 0)))))
8107 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8108 fold_convert_loc (loc, type,
8109 TREE_OPERAND (arg0, 1)));
8110 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8111 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8112 fold_convert_loc (loc, type,
8113 TREE_OPERAND (arg0, 1)))))
8114 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8115 fold_convert_loc (loc, type,
8116 TREE_OPERAND (arg0, 0)), tem);
8117 /* Perform BIT_NOT_EXPR on each element individually. */
8118 else if (TREE_CODE (arg0) == VECTOR_CST)
8120 tree *elements;
8121 tree elem;
8122 unsigned count = VECTOR_CST_NELTS (arg0), i;
8124 elements = XALLOCAVEC (tree, count);
8125 for (i = 0; i < count; i++)
8127 elem = VECTOR_CST_ELT (arg0, i);
8128 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8129 if (elem == NULL_TREE)
8130 break;
8131 elements[i] = elem;
8133 if (i == count)
8134 return build_vector (type, elements);
8136 else if (COMPARISON_CLASS_P (arg0)
8137 && (VECTOR_TYPE_P (type)
8138 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8140 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8141 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8142 HONOR_NANS (TYPE_MODE (op_type)));
8143 if (subcode != ERROR_MARK)
8144 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8145 TREE_OPERAND (arg0, 1));
8149 return NULL_TREE;
8151 case TRUTH_NOT_EXPR:
8152 /* Note that the operand of this must be an int
8153 and its values must be 0 or 1.
8154 ("true" is a fixed value perhaps depending on the language,
8155 but we don't handle values other than 1 correctly yet.) */
8156 tem = fold_truth_not_expr (loc, arg0);
8157 if (!tem)
8158 return NULL_TREE;
8159 return fold_convert_loc (loc, type, tem);
8161 case REALPART_EXPR:
8162 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8163 return fold_convert_loc (loc, type, arg0);
8164 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8165 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8166 TREE_OPERAND (arg0, 1));
8167 if (TREE_CODE (arg0) == COMPLEX_CST)
8168 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8169 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8171 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8172 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8173 fold_build1_loc (loc, REALPART_EXPR, itype,
8174 TREE_OPERAND (arg0, 0)),
8175 fold_build1_loc (loc, REALPART_EXPR, itype,
8176 TREE_OPERAND (arg0, 1)));
8177 return fold_convert_loc (loc, type, tem);
8179 if (TREE_CODE (arg0) == CONJ_EXPR)
8181 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8182 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8183 TREE_OPERAND (arg0, 0));
8184 return fold_convert_loc (loc, type, tem);
8186 if (TREE_CODE (arg0) == CALL_EXPR)
8188 tree fn = get_callee_fndecl (arg0);
8189 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8190 switch (DECL_FUNCTION_CODE (fn))
8192 CASE_FLT_FN (BUILT_IN_CEXPI):
8193 fn = mathfn_built_in (type, BUILT_IN_COS);
8194 if (fn)
8195 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8196 break;
8198 default:
8199 break;
8202 return NULL_TREE;
8204 case IMAGPART_EXPR:
8205 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8206 return build_zero_cst (type);
8207 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8208 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8209 TREE_OPERAND (arg0, 0));
8210 if (TREE_CODE (arg0) == COMPLEX_CST)
8211 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8212 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8214 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8215 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8216 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8217 TREE_OPERAND (arg0, 0)),
8218 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8219 TREE_OPERAND (arg0, 1)));
8220 return fold_convert_loc (loc, type, tem);
8222 if (TREE_CODE (arg0) == CONJ_EXPR)
8224 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8225 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8226 return fold_convert_loc (loc, type, negate_expr (tem));
8228 if (TREE_CODE (arg0) == CALL_EXPR)
8230 tree fn = get_callee_fndecl (arg0);
8231 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8232 switch (DECL_FUNCTION_CODE (fn))
8234 CASE_FLT_FN (BUILT_IN_CEXPI):
8235 fn = mathfn_built_in (type, BUILT_IN_SIN);
8236 if (fn)
8237 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8238 break;
8240 default:
8241 break;
8244 return NULL_TREE;
8246 case INDIRECT_REF:
8247 /* Fold *&X to X if X is an lvalue. */
8248 if (TREE_CODE (op0) == ADDR_EXPR)
8250 tree op00 = TREE_OPERAND (op0, 0);
8251 if ((TREE_CODE (op00) == VAR_DECL
8252 || TREE_CODE (op00) == PARM_DECL
8253 || TREE_CODE (op00) == RESULT_DECL)
8254 && !TREE_READONLY (op00))
8255 return op00;
8257 return NULL_TREE;
8259 case VEC_UNPACK_LO_EXPR:
8260 case VEC_UNPACK_HI_EXPR:
8261 case VEC_UNPACK_FLOAT_LO_EXPR:
8262 case VEC_UNPACK_FLOAT_HI_EXPR:
8264 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8265 tree *elts;
8266 enum tree_code subcode;
8268 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8269 if (TREE_CODE (arg0) != VECTOR_CST)
8270 return NULL_TREE;
8272 elts = XALLOCAVEC (tree, nelts * 2);
8273 if (!vec_cst_ctor_to_array (arg0, elts))
8274 return NULL_TREE;
8276 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8277 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8278 elts += nelts;
8280 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8281 subcode = NOP_EXPR;
8282 else
8283 subcode = FLOAT_EXPR;
8285 for (i = 0; i < nelts; i++)
8287 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8288 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8289 return NULL_TREE;
8292 return build_vector (type, elts);
8295 case REDUC_MIN_EXPR:
8296 case REDUC_MAX_EXPR:
8297 case REDUC_PLUS_EXPR:
8299 unsigned int nelts, i;
8300 tree *elts;
8301 enum tree_code subcode;
8303 if (TREE_CODE (op0) != VECTOR_CST)
8304 return NULL_TREE;
8305 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (op0));
8307 elts = XALLOCAVEC (tree, nelts);
8308 if (!vec_cst_ctor_to_array (op0, elts))
8309 return NULL_TREE;
8311 switch (code)
8313 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8314 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8315 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8316 default: gcc_unreachable ();
8319 for (i = 1; i < nelts; i++)
8321 elts[0] = const_binop (subcode, elts[0], elts[i]);
8322 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8323 return NULL_TREE;
8326 return elts[0];
8329 default:
8330 return NULL_TREE;
8331 } /* switch (code) */
8335 /* If the operation was a conversion do _not_ mark a resulting constant
8336 with TREE_OVERFLOW if the original constant was not. These conversions
8337 have implementation defined behavior and retaining the TREE_OVERFLOW
8338 flag here would confuse later passes such as VRP. */
8339 tree
8340 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8341 tree type, tree op0)
8343 tree res = fold_unary_loc (loc, code, type, op0);
8344 if (res
8345 && TREE_CODE (res) == INTEGER_CST
8346 && TREE_CODE (op0) == INTEGER_CST
8347 && CONVERT_EXPR_CODE_P (code))
8348 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8350 return res;
8353 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8354 operands OP0 and OP1. LOC is the location of the resulting expression.
8355 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8356 Return the folded expression if folding is successful. Otherwise,
8357 return NULL_TREE. */
8358 static tree
8359 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8360 tree arg0, tree arg1, tree op0, tree op1)
8362 tree tem;
8364 /* We only do these simplifications if we are optimizing. */
8365 if (!optimize)
8366 return NULL_TREE;
8368 /* Check for things like (A || B) && (A || C). We can convert this
8369 to A || (B && C). Note that either operator can be any of the four
8370 truth and/or operations and the transformation will still be
8371 valid. Also note that we only care about order for the
8372 ANDIF and ORIF operators. If B contains side effects, this
8373 might change the truth-value of A. */
8374 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8375 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8376 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8377 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8378 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8379 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8381 tree a00 = TREE_OPERAND (arg0, 0);
8382 tree a01 = TREE_OPERAND (arg0, 1);
8383 tree a10 = TREE_OPERAND (arg1, 0);
8384 tree a11 = TREE_OPERAND (arg1, 1);
8385 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8386 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8387 && (code == TRUTH_AND_EXPR
8388 || code == TRUTH_OR_EXPR));
8390 if (operand_equal_p (a00, a10, 0))
8391 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8392 fold_build2_loc (loc, code, type, a01, a11));
8393 else if (commutative && operand_equal_p (a00, a11, 0))
8394 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8395 fold_build2_loc (loc, code, type, a01, a10));
8396 else if (commutative && operand_equal_p (a01, a10, 0))
8397 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8398 fold_build2_loc (loc, code, type, a00, a11));
8400 /* This case if tricky because we must either have commutative
8401 operators or else A10 must not have side-effects. */
8403 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8404 && operand_equal_p (a01, a11, 0))
8405 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8406 fold_build2_loc (loc, code, type, a00, a10),
8407 a01);
8410 /* See if we can build a range comparison. */
8411 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8412 return tem;
8414 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8415 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8417 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8418 if (tem)
8419 return fold_build2_loc (loc, code, type, tem, arg1);
8422 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8423 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8425 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8426 if (tem)
8427 return fold_build2_loc (loc, code, type, arg0, tem);
8430 /* Check for the possibility of merging component references. If our
8431 lhs is another similar operation, try to merge its rhs with our
8432 rhs. Then try to merge our lhs and rhs. */
8433 if (TREE_CODE (arg0) == code
8434 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8435 TREE_OPERAND (arg0, 1), arg1)))
8436 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8438 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8439 return tem;
8441 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8442 && (code == TRUTH_AND_EXPR
8443 || code == TRUTH_ANDIF_EXPR
8444 || code == TRUTH_OR_EXPR
8445 || code == TRUTH_ORIF_EXPR))
8447 enum tree_code ncode, icode;
8449 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8450 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8451 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8453 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8454 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8455 We don't want to pack more than two leafs to a non-IF AND/OR
8456 expression.
8457 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8458 equal to IF-CODE, then we don't want to add right-hand operand.
8459 If the inner right-hand side of left-hand operand has
8460 side-effects, or isn't simple, then we can't add to it,
8461 as otherwise we might destroy if-sequence. */
8462 if (TREE_CODE (arg0) == icode
8463 && simple_operand_p_2 (arg1)
8464 /* Needed for sequence points to handle trappings, and
8465 side-effects. */
8466 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8468 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8469 arg1);
8470 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8471 tem);
8473 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8474 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8475 else if (TREE_CODE (arg1) == icode
8476 && simple_operand_p_2 (arg0)
8477 /* Needed for sequence points to handle trappings, and
8478 side-effects. */
8479 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8481 tem = fold_build2_loc (loc, ncode, type,
8482 arg0, TREE_OPERAND (arg1, 0));
8483 return fold_build2_loc (loc, icode, type, tem,
8484 TREE_OPERAND (arg1, 1));
8486 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8487 into (A OR B).
8488 For sequence point consistancy, we need to check for trapping,
8489 and side-effects. */
8490 else if (code == icode && simple_operand_p_2 (arg0)
8491 && simple_operand_p_2 (arg1))
8492 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8495 return NULL_TREE;
8498 /* Fold a binary expression of code CODE and type TYPE with operands
8499 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8500 Return the folded expression if folding is successful. Otherwise,
8501 return NULL_TREE. */
8503 static tree
8504 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8506 enum tree_code compl_code;
8508 if (code == MIN_EXPR)
8509 compl_code = MAX_EXPR;
8510 else if (code == MAX_EXPR)
8511 compl_code = MIN_EXPR;
8512 else
8513 gcc_unreachable ();
8515 /* MIN (MAX (a, b), b) == b. */
8516 if (TREE_CODE (op0) == compl_code
8517 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8518 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8520 /* MIN (MAX (b, a), b) == b. */
8521 if (TREE_CODE (op0) == compl_code
8522 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8523 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8524 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8526 /* MIN (a, MAX (a, b)) == a. */
8527 if (TREE_CODE (op1) == compl_code
8528 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8529 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8530 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8532 /* MIN (a, MAX (b, a)) == a. */
8533 if (TREE_CODE (op1) == compl_code
8534 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8535 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8536 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8538 return NULL_TREE;
8541 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8542 by changing CODE to reduce the magnitude of constants involved in
8543 ARG0 of the comparison.
8544 Returns a canonicalized comparison tree if a simplification was
8545 possible, otherwise returns NULL_TREE.
8546 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8547 valid if signed overflow is undefined. */
8549 static tree
8550 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8551 tree arg0, tree arg1,
8552 bool *strict_overflow_p)
8554 enum tree_code code0 = TREE_CODE (arg0);
8555 tree t, cst0 = NULL_TREE;
8556 int sgn0;
8557 bool swap = false;
8559 /* Match A +- CST code arg1 and CST code arg1. We can change the
8560 first form only if overflow is undefined. */
8561 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8562 /* In principle pointers also have undefined overflow behavior,
8563 but that causes problems elsewhere. */
8564 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8565 && (code0 == MINUS_EXPR
8566 || code0 == PLUS_EXPR)
8567 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8568 || code0 == INTEGER_CST))
8569 return NULL_TREE;
8571 /* Identify the constant in arg0 and its sign. */
8572 if (code0 == INTEGER_CST)
8573 cst0 = arg0;
8574 else
8575 cst0 = TREE_OPERAND (arg0, 1);
8576 sgn0 = tree_int_cst_sgn (cst0);
8578 /* Overflowed constants and zero will cause problems. */
8579 if (integer_zerop (cst0)
8580 || TREE_OVERFLOW (cst0))
8581 return NULL_TREE;
8583 /* See if we can reduce the magnitude of the constant in
8584 arg0 by changing the comparison code. */
8585 if (code0 == INTEGER_CST)
8587 /* CST <= arg1 -> CST-1 < arg1. */
8588 if (code == LE_EXPR && sgn0 == 1)
8589 code = LT_EXPR;
8590 /* -CST < arg1 -> -CST-1 <= arg1. */
8591 else if (code == LT_EXPR && sgn0 == -1)
8592 code = LE_EXPR;
8593 /* CST > arg1 -> CST-1 >= arg1. */
8594 else if (code == GT_EXPR && sgn0 == 1)
8595 code = GE_EXPR;
8596 /* -CST >= arg1 -> -CST-1 > arg1. */
8597 else if (code == GE_EXPR && sgn0 == -1)
8598 code = GT_EXPR;
8599 else
8600 return NULL_TREE;
8601 /* arg1 code' CST' might be more canonical. */
8602 swap = true;
8604 else
8606 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8607 if (code == LT_EXPR
8608 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8609 code = LE_EXPR;
8610 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8611 else if (code == GT_EXPR
8612 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8613 code = GE_EXPR;
8614 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8615 else if (code == LE_EXPR
8616 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8617 code = LT_EXPR;
8618 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8619 else if (code == GE_EXPR
8620 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8621 code = GT_EXPR;
8622 else
8623 return NULL_TREE;
8624 *strict_overflow_p = true;
8627 /* Now build the constant reduced in magnitude. But not if that
8628 would produce one outside of its types range. */
8629 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8630 && ((sgn0 == 1
8631 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8632 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8633 || (sgn0 == -1
8634 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8635 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8636 /* We cannot swap the comparison here as that would cause us to
8637 endlessly recurse. */
8638 return NULL_TREE;
8640 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8641 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8642 if (code0 != INTEGER_CST)
8643 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8644 t = fold_convert (TREE_TYPE (arg1), t);
8646 /* If swapping might yield to a more canonical form, do so. */
8647 if (swap)
8648 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8649 else
8650 return fold_build2_loc (loc, code, type, t, arg1);
8653 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8654 overflow further. Try to decrease the magnitude of constants involved
8655 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8656 and put sole constants at the second argument position.
8657 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8659 static tree
8660 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8661 tree arg0, tree arg1)
8663 tree t;
8664 bool strict_overflow_p;
8665 const char * const warnmsg = G_("assuming signed overflow does not occur "
8666 "when reducing constant in comparison");
8668 /* Try canonicalization by simplifying arg0. */
8669 strict_overflow_p = false;
8670 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8671 &strict_overflow_p);
8672 if (t)
8674 if (strict_overflow_p)
8675 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8676 return t;
8679 /* Try canonicalization by simplifying arg1 using the swapped
8680 comparison. */
8681 code = swap_tree_comparison (code);
8682 strict_overflow_p = false;
8683 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8684 &strict_overflow_p);
8685 if (t && strict_overflow_p)
8686 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8687 return t;
8690 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8691 space. This is used to avoid issuing overflow warnings for
8692 expressions like &p->x which can not wrap. */
8694 static bool
8695 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8697 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8698 return true;
8700 if (bitpos < 0)
8701 return true;
8703 wide_int wi_offset;
8704 int precision = TYPE_PRECISION (TREE_TYPE (base));
8705 if (offset == NULL_TREE)
8706 wi_offset = wi::zero (precision);
8707 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8708 return true;
8709 else
8710 wi_offset = offset;
8712 bool overflow;
8713 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8714 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8715 if (overflow)
8716 return true;
8718 if (!wi::fits_uhwi_p (total))
8719 return true;
8721 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8722 if (size <= 0)
8723 return true;
8725 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8726 array. */
8727 if (TREE_CODE (base) == ADDR_EXPR)
8729 HOST_WIDE_INT base_size;
8731 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8732 if (base_size > 0 && size < base_size)
8733 size = base_size;
8736 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8739 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8740 kind INTEGER_CST. This makes sure to properly sign-extend the
8741 constant. */
8743 static HOST_WIDE_INT
8744 size_low_cst (const_tree t)
8746 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8747 int prec = TYPE_PRECISION (TREE_TYPE (t));
8748 if (prec < HOST_BITS_PER_WIDE_INT)
8749 return sext_hwi (w, prec);
8750 return w;
8753 /* Subroutine of fold_binary. This routine performs all of the
8754 transformations that are common to the equality/inequality
8755 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8756 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8757 fold_binary should call fold_binary. Fold a comparison with
8758 tree code CODE and type TYPE with operands OP0 and OP1. Return
8759 the folded comparison or NULL_TREE. */
8761 static tree
8762 fold_comparison (location_t loc, enum tree_code code, tree type,
8763 tree op0, tree op1)
8765 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8766 tree arg0, arg1, tem;
8768 arg0 = op0;
8769 arg1 = op1;
8771 STRIP_SIGN_NOPS (arg0);
8772 STRIP_SIGN_NOPS (arg1);
8774 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8775 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8776 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8777 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8778 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8779 && TREE_CODE (arg1) == INTEGER_CST
8780 && !TREE_OVERFLOW (arg1))
8782 const enum tree_code
8783 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8784 tree const1 = TREE_OPERAND (arg0, 1);
8785 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8786 tree variable = TREE_OPERAND (arg0, 0);
8787 tree new_const = int_const_binop (reverse_op, const2, const1);
8789 /* If the constant operation overflowed this can be
8790 simplified as a comparison against INT_MAX/INT_MIN. */
8791 if (TREE_OVERFLOW (new_const)
8792 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8794 int const1_sgn = tree_int_cst_sgn (const1);
8795 enum tree_code code2 = code;
8797 /* Get the sign of the constant on the lhs if the
8798 operation were VARIABLE + CONST1. */
8799 if (TREE_CODE (arg0) == MINUS_EXPR)
8800 const1_sgn = -const1_sgn;
8802 /* The sign of the constant determines if we overflowed
8803 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8804 Canonicalize to the INT_MIN overflow by swapping the comparison
8805 if necessary. */
8806 if (const1_sgn == -1)
8807 code2 = swap_tree_comparison (code);
8809 /* We now can look at the canonicalized case
8810 VARIABLE + 1 CODE2 INT_MIN
8811 and decide on the result. */
8812 switch (code2)
8814 case EQ_EXPR:
8815 case LT_EXPR:
8816 case LE_EXPR:
8817 return
8818 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8820 case NE_EXPR:
8821 case GE_EXPR:
8822 case GT_EXPR:
8823 return
8824 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8826 default:
8827 gcc_unreachable ();
8830 else
8832 if (!equality_code)
8833 fold_overflow_warning ("assuming signed overflow does not occur "
8834 "when changing X +- C1 cmp C2 to "
8835 "X cmp C2 -+ C1",
8836 WARN_STRICT_OVERFLOW_COMPARISON);
8837 return fold_build2_loc (loc, code, type, variable, new_const);
8841 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8842 if (TREE_CODE (arg0) == MINUS_EXPR
8843 && equality_code
8844 && integer_zerop (arg1))
8846 /* ??? The transformation is valid for the other operators if overflow
8847 is undefined for the type, but performing it here badly interacts
8848 with the transformation in fold_cond_expr_with_comparison which
8849 attempts to synthetize ABS_EXPR. */
8850 if (!equality_code)
8851 fold_overflow_warning ("assuming signed overflow does not occur "
8852 "when changing X - Y cmp 0 to X cmp Y",
8853 WARN_STRICT_OVERFLOW_COMPARISON);
8854 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8855 TREE_OPERAND (arg0, 1));
8858 /* For comparisons of pointers we can decompose it to a compile time
8859 comparison of the base objects and the offsets into the object.
8860 This requires at least one operand being an ADDR_EXPR or a
8861 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8862 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8863 && (TREE_CODE (arg0) == ADDR_EXPR
8864 || TREE_CODE (arg1) == ADDR_EXPR
8865 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8866 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8868 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8869 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8870 machine_mode mode;
8871 int volatilep, reversep, unsignedp;
8872 bool indirect_base0 = false, indirect_base1 = false;
8874 /* Get base and offset for the access. Strip ADDR_EXPR for
8875 get_inner_reference, but put it back by stripping INDIRECT_REF
8876 off the base object if possible. indirect_baseN will be true
8877 if baseN is not an address but refers to the object itself. */
8878 base0 = arg0;
8879 if (TREE_CODE (arg0) == ADDR_EXPR)
8881 base0
8882 = get_inner_reference (TREE_OPERAND (arg0, 0),
8883 &bitsize, &bitpos0, &offset0, &mode,
8884 &unsignedp, &reversep, &volatilep, false);
8885 if (TREE_CODE (base0) == INDIRECT_REF)
8886 base0 = TREE_OPERAND (base0, 0);
8887 else
8888 indirect_base0 = true;
8890 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8892 base0 = TREE_OPERAND (arg0, 0);
8893 STRIP_SIGN_NOPS (base0);
8894 if (TREE_CODE (base0) == ADDR_EXPR)
8896 base0 = TREE_OPERAND (base0, 0);
8897 indirect_base0 = true;
8899 offset0 = TREE_OPERAND (arg0, 1);
8900 if (tree_fits_shwi_p (offset0))
8902 HOST_WIDE_INT off = size_low_cst (offset0);
8903 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8904 * BITS_PER_UNIT)
8905 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8907 bitpos0 = off * BITS_PER_UNIT;
8908 offset0 = NULL_TREE;
8913 base1 = arg1;
8914 if (TREE_CODE (arg1) == ADDR_EXPR)
8916 base1
8917 = get_inner_reference (TREE_OPERAND (arg1, 0),
8918 &bitsize, &bitpos1, &offset1, &mode,
8919 &unsignedp, &reversep, &volatilep, false);
8920 if (TREE_CODE (base1) == INDIRECT_REF)
8921 base1 = TREE_OPERAND (base1, 0);
8922 else
8923 indirect_base1 = true;
8925 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8927 base1 = TREE_OPERAND (arg1, 0);
8928 STRIP_SIGN_NOPS (base1);
8929 if (TREE_CODE (base1) == ADDR_EXPR)
8931 base1 = TREE_OPERAND (base1, 0);
8932 indirect_base1 = true;
8934 offset1 = TREE_OPERAND (arg1, 1);
8935 if (tree_fits_shwi_p (offset1))
8937 HOST_WIDE_INT off = size_low_cst (offset1);
8938 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8939 * BITS_PER_UNIT)
8940 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8942 bitpos1 = off * BITS_PER_UNIT;
8943 offset1 = NULL_TREE;
8948 /* A local variable can never be pointed to by
8949 the default SSA name of an incoming parameter. */
8950 if ((TREE_CODE (arg0) == ADDR_EXPR
8951 && indirect_base0
8952 && TREE_CODE (base0) == VAR_DECL
8953 && auto_var_in_fn_p (base0, current_function_decl)
8954 && !indirect_base1
8955 && TREE_CODE (base1) == SSA_NAME
8956 && SSA_NAME_IS_DEFAULT_DEF (base1)
8957 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8958 || (TREE_CODE (arg1) == ADDR_EXPR
8959 && indirect_base1
8960 && TREE_CODE (base1) == VAR_DECL
8961 && auto_var_in_fn_p (base1, current_function_decl)
8962 && !indirect_base0
8963 && TREE_CODE (base0) == SSA_NAME
8964 && SSA_NAME_IS_DEFAULT_DEF (base0)
8965 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8967 if (code == NE_EXPR)
8968 return constant_boolean_node (1, type);
8969 else if (code == EQ_EXPR)
8970 return constant_boolean_node (0, type);
8972 /* If we have equivalent bases we might be able to simplify. */
8973 else if (indirect_base0 == indirect_base1
8974 && operand_equal_p (base0, base1, 0))
8976 /* We can fold this expression to a constant if the non-constant
8977 offset parts are equal. */
8978 if ((offset0 == offset1
8979 || (offset0 && offset1
8980 && operand_equal_p (offset0, offset1, 0)))
8981 && (code == EQ_EXPR
8982 || code == NE_EXPR
8983 || (indirect_base0 && DECL_P (base0))
8984 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8987 if (!equality_code
8988 && bitpos0 != bitpos1
8989 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8990 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8991 fold_overflow_warning (("assuming pointer wraparound does not "
8992 "occur when comparing P +- C1 with "
8993 "P +- C2"),
8994 WARN_STRICT_OVERFLOW_CONDITIONAL);
8996 switch (code)
8998 case EQ_EXPR:
8999 return constant_boolean_node (bitpos0 == bitpos1, type);
9000 case NE_EXPR:
9001 return constant_boolean_node (bitpos0 != bitpos1, type);
9002 case LT_EXPR:
9003 return constant_boolean_node (bitpos0 < bitpos1, type);
9004 case LE_EXPR:
9005 return constant_boolean_node (bitpos0 <= bitpos1, type);
9006 case GE_EXPR:
9007 return constant_boolean_node (bitpos0 >= bitpos1, type);
9008 case GT_EXPR:
9009 return constant_boolean_node (bitpos0 > bitpos1, type);
9010 default:;
9013 /* We can simplify the comparison to a comparison of the variable
9014 offset parts if the constant offset parts are equal.
9015 Be careful to use signed sizetype here because otherwise we
9016 mess with array offsets in the wrong way. This is possible
9017 because pointer arithmetic is restricted to retain within an
9018 object and overflow on pointer differences is undefined as of
9019 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9020 else if (bitpos0 == bitpos1
9021 && (equality_code
9022 || (indirect_base0 && DECL_P (base0))
9023 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9025 /* By converting to signed sizetype we cover middle-end pointer
9026 arithmetic which operates on unsigned pointer types of size
9027 type size and ARRAY_REF offsets which are properly sign or
9028 zero extended from their type in case it is narrower than
9029 sizetype. */
9030 if (offset0 == NULL_TREE)
9031 offset0 = build_int_cst (ssizetype, 0);
9032 else
9033 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9034 if (offset1 == NULL_TREE)
9035 offset1 = build_int_cst (ssizetype, 0);
9036 else
9037 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9039 if (!equality_code
9040 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9041 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9042 fold_overflow_warning (("assuming pointer wraparound does not "
9043 "occur when comparing P +- C1 with "
9044 "P +- C2"),
9045 WARN_STRICT_OVERFLOW_COMPARISON);
9047 return fold_build2_loc (loc, code, type, offset0, offset1);
9050 /* For non-equal bases we can simplify if they are addresses
9051 of local binding decls or constants. */
9052 else if (indirect_base0 && indirect_base1
9053 /* We know that !operand_equal_p (base0, base1, 0)
9054 because the if condition was false. But make
9055 sure two decls are not the same. */
9056 && base0 != base1
9057 && TREE_CODE (arg0) == ADDR_EXPR
9058 && TREE_CODE (arg1) == ADDR_EXPR
9059 && (((TREE_CODE (base0) == VAR_DECL
9060 || TREE_CODE (base0) == PARM_DECL)
9061 && (targetm.binds_local_p (base0)
9062 || CONSTANT_CLASS_P (base1)))
9063 || CONSTANT_CLASS_P (base0))
9064 && (((TREE_CODE (base1) == VAR_DECL
9065 || TREE_CODE (base1) == PARM_DECL)
9066 && (targetm.binds_local_p (base1)
9067 || CONSTANT_CLASS_P (base0)))
9068 || CONSTANT_CLASS_P (base1)))
9070 if (code == EQ_EXPR)
9071 return omit_two_operands_loc (loc, type, boolean_false_node,
9072 arg0, arg1);
9073 else if (code == NE_EXPR)
9074 return omit_two_operands_loc (loc, type, boolean_true_node,
9075 arg0, arg1);
9077 /* For equal offsets we can simplify to a comparison of the
9078 base addresses. */
9079 else if (bitpos0 == bitpos1
9080 && (indirect_base0
9081 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9082 && (indirect_base1
9083 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9084 && ((offset0 == offset1)
9085 || (offset0 && offset1
9086 && operand_equal_p (offset0, offset1, 0))))
9088 if (indirect_base0)
9089 base0 = build_fold_addr_expr_loc (loc, base0);
9090 if (indirect_base1)
9091 base1 = build_fold_addr_expr_loc (loc, base1);
9092 return fold_build2_loc (loc, code, type, base0, base1);
9096 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9097 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9098 the resulting offset is smaller in absolute value than the
9099 original one and has the same sign. */
9100 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9101 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9102 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9103 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9104 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9105 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9106 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9108 tree const1 = TREE_OPERAND (arg0, 1);
9109 tree const2 = TREE_OPERAND (arg1, 1);
9110 tree variable1 = TREE_OPERAND (arg0, 0);
9111 tree variable2 = TREE_OPERAND (arg1, 0);
9112 tree cst;
9113 const char * const warnmsg = G_("assuming signed overflow does not "
9114 "occur when combining constants around "
9115 "a comparison");
9117 /* Put the constant on the side where it doesn't overflow and is
9118 of lower absolute value and of same sign than before. */
9119 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9120 ? MINUS_EXPR : PLUS_EXPR,
9121 const2, const1);
9122 if (!TREE_OVERFLOW (cst)
9123 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9124 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9126 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9127 return fold_build2_loc (loc, code, type,
9128 variable1,
9129 fold_build2_loc (loc, TREE_CODE (arg1),
9130 TREE_TYPE (arg1),
9131 variable2, cst));
9134 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9135 ? MINUS_EXPR : PLUS_EXPR,
9136 const1, const2);
9137 if (!TREE_OVERFLOW (cst)
9138 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9139 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9141 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9142 return fold_build2_loc (loc, code, type,
9143 fold_build2_loc (loc, TREE_CODE (arg0),
9144 TREE_TYPE (arg0),
9145 variable1, cst),
9146 variable2);
9150 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9151 signed arithmetic case. That form is created by the compiler
9152 often enough for folding it to be of value. One example is in
9153 computing loop trip counts after Operator Strength Reduction. */
9154 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9155 && TREE_CODE (arg0) == MULT_EXPR
9156 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9157 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9158 && integer_zerop (arg1))
9160 tree const1 = TREE_OPERAND (arg0, 1);
9161 tree const2 = arg1; /* zero */
9162 tree variable1 = TREE_OPERAND (arg0, 0);
9163 enum tree_code cmp_code = code;
9165 /* Handle unfolded multiplication by zero. */
9166 if (integer_zerop (const1))
9167 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9169 fold_overflow_warning (("assuming signed overflow does not occur when "
9170 "eliminating multiplication in comparison "
9171 "with zero"),
9172 WARN_STRICT_OVERFLOW_COMPARISON);
9174 /* If const1 is negative we swap the sense of the comparison. */
9175 if (tree_int_cst_sgn (const1) < 0)
9176 cmp_code = swap_tree_comparison (cmp_code);
9178 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9181 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9182 if (tem)
9183 return tem;
9185 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9187 tree targ0 = strip_float_extensions (arg0);
9188 tree targ1 = strip_float_extensions (arg1);
9189 tree newtype = TREE_TYPE (targ0);
9191 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9192 newtype = TREE_TYPE (targ1);
9194 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9195 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9196 return fold_build2_loc (loc, code, type,
9197 fold_convert_loc (loc, newtype, targ0),
9198 fold_convert_loc (loc, newtype, targ1));
9200 /* (-a) CMP (-b) -> b CMP a */
9201 if (TREE_CODE (arg0) == NEGATE_EXPR
9202 && TREE_CODE (arg1) == NEGATE_EXPR)
9203 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9204 TREE_OPERAND (arg0, 0));
9206 if (TREE_CODE (arg1) == REAL_CST)
9208 REAL_VALUE_TYPE cst;
9209 cst = TREE_REAL_CST (arg1);
9211 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9212 if (TREE_CODE (arg0) == NEGATE_EXPR)
9213 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9214 TREE_OPERAND (arg0, 0),
9215 build_real (TREE_TYPE (arg1),
9216 real_value_negate (&cst)));
9218 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9219 /* a CMP (-0) -> a CMP 0 */
9220 if (REAL_VALUE_MINUS_ZERO (cst))
9221 return fold_build2_loc (loc, code, type, arg0,
9222 build_real (TREE_TYPE (arg1), dconst0));
9224 /* x != NaN is always true, other ops are always false. */
9225 if (REAL_VALUE_ISNAN (cst)
9226 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9228 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9229 return omit_one_operand_loc (loc, type, tem, arg0);
9232 /* Fold comparisons against infinity. */
9233 if (REAL_VALUE_ISINF (cst)
9234 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9236 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9237 if (tem != NULL_TREE)
9238 return tem;
9242 /* If this is a comparison of a real constant with a PLUS_EXPR
9243 or a MINUS_EXPR of a real constant, we can convert it into a
9244 comparison with a revised real constant as long as no overflow
9245 occurs when unsafe_math_optimizations are enabled. */
9246 if (flag_unsafe_math_optimizations
9247 && TREE_CODE (arg1) == REAL_CST
9248 && (TREE_CODE (arg0) == PLUS_EXPR
9249 || TREE_CODE (arg0) == MINUS_EXPR)
9250 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9251 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9252 ? MINUS_EXPR : PLUS_EXPR,
9253 arg1, TREE_OPERAND (arg0, 1)))
9254 && !TREE_OVERFLOW (tem))
9255 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9257 /* Likewise, we can simplify a comparison of a real constant with
9258 a MINUS_EXPR whose first operand is also a real constant, i.e.
9259 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9260 floating-point types only if -fassociative-math is set. */
9261 if (flag_associative_math
9262 && TREE_CODE (arg1) == REAL_CST
9263 && TREE_CODE (arg0) == MINUS_EXPR
9264 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9265 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9266 arg1))
9267 && !TREE_OVERFLOW (tem))
9268 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9269 TREE_OPERAND (arg0, 1), tem);
9271 /* Fold comparisons against built-in math functions. */
9272 if (TREE_CODE (arg1) == REAL_CST
9273 && flag_unsafe_math_optimizations
9274 && ! flag_errno_math)
9276 enum built_in_function fcode = builtin_mathfn_code (arg0);
9278 if (fcode != END_BUILTINS)
9280 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9281 if (tem != NULL_TREE)
9282 return tem;
9287 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9288 && CONVERT_EXPR_P (arg0))
9290 /* If we are widening one operand of an integer comparison,
9291 see if the other operand is similarly being widened. Perhaps we
9292 can do the comparison in the narrower type. */
9293 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9294 if (tem)
9295 return tem;
9297 /* Or if we are changing signedness. */
9298 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9299 if (tem)
9300 return tem;
9303 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9304 constant, we can simplify it. */
9305 if (TREE_CODE (arg1) == INTEGER_CST
9306 && (TREE_CODE (arg0) == MIN_EXPR
9307 || TREE_CODE (arg0) == MAX_EXPR)
9308 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9310 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9311 if (tem)
9312 return tem;
9315 /* Simplify comparison of something with itself. (For IEEE
9316 floating-point, we can only do some of these simplifications.) */
9317 if (operand_equal_p (arg0, arg1, 0))
9319 switch (code)
9321 case EQ_EXPR:
9322 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9323 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9324 return constant_boolean_node (1, type);
9325 break;
9327 case GE_EXPR:
9328 case LE_EXPR:
9329 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9330 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9331 return constant_boolean_node (1, type);
9332 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9334 case NE_EXPR:
9335 /* For NE, we can only do this simplification if integer
9336 or we don't honor IEEE floating point NaNs. */
9337 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9338 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9339 break;
9340 /* ... fall through ... */
9341 case GT_EXPR:
9342 case LT_EXPR:
9343 return constant_boolean_node (0, type);
9344 default:
9345 gcc_unreachable ();
9349 /* If we are comparing an expression that just has comparisons
9350 of two integer values, arithmetic expressions of those comparisons,
9351 and constants, we can simplify it. There are only three cases
9352 to check: the two values can either be equal, the first can be
9353 greater, or the second can be greater. Fold the expression for
9354 those three values. Since each value must be 0 or 1, we have
9355 eight possibilities, each of which corresponds to the constant 0
9356 or 1 or one of the six possible comparisons.
9358 This handles common cases like (a > b) == 0 but also handles
9359 expressions like ((x > y) - (y > x)) > 0, which supposedly
9360 occur in macroized code. */
9362 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9364 tree cval1 = 0, cval2 = 0;
9365 int save_p = 0;
9367 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9368 /* Don't handle degenerate cases here; they should already
9369 have been handled anyway. */
9370 && cval1 != 0 && cval2 != 0
9371 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9372 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9373 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9374 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9375 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9376 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9377 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9379 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9380 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9382 /* We can't just pass T to eval_subst in case cval1 or cval2
9383 was the same as ARG1. */
9385 tree high_result
9386 = fold_build2_loc (loc, code, type,
9387 eval_subst (loc, arg0, cval1, maxval,
9388 cval2, minval),
9389 arg1);
9390 tree equal_result
9391 = fold_build2_loc (loc, code, type,
9392 eval_subst (loc, arg0, cval1, maxval,
9393 cval2, maxval),
9394 arg1);
9395 tree low_result
9396 = fold_build2_loc (loc, code, type,
9397 eval_subst (loc, arg0, cval1, minval,
9398 cval2, maxval),
9399 arg1);
9401 /* All three of these results should be 0 or 1. Confirm they are.
9402 Then use those values to select the proper code to use. */
9404 if (TREE_CODE (high_result) == INTEGER_CST
9405 && TREE_CODE (equal_result) == INTEGER_CST
9406 && TREE_CODE (low_result) == INTEGER_CST)
9408 /* Make a 3-bit mask with the high-order bit being the
9409 value for `>', the next for '=', and the low for '<'. */
9410 switch ((integer_onep (high_result) * 4)
9411 + (integer_onep (equal_result) * 2)
9412 + integer_onep (low_result))
9414 case 0:
9415 /* Always false. */
9416 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9417 case 1:
9418 code = LT_EXPR;
9419 break;
9420 case 2:
9421 code = EQ_EXPR;
9422 break;
9423 case 3:
9424 code = LE_EXPR;
9425 break;
9426 case 4:
9427 code = GT_EXPR;
9428 break;
9429 case 5:
9430 code = NE_EXPR;
9431 break;
9432 case 6:
9433 code = GE_EXPR;
9434 break;
9435 case 7:
9436 /* Always true. */
9437 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9440 if (save_p)
9442 tem = save_expr (build2 (code, type, cval1, cval2));
9443 SET_EXPR_LOCATION (tem, loc);
9444 return tem;
9446 return fold_build2_loc (loc, code, type, cval1, cval2);
9451 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9452 into a single range test. */
9453 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9454 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9455 && TREE_CODE (arg1) == INTEGER_CST
9456 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9457 && !integer_zerop (TREE_OPERAND (arg0, 1))
9458 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9459 && !TREE_OVERFLOW (arg1))
9461 tem = fold_div_compare (loc, code, type, arg0, arg1);
9462 if (tem != NULL_TREE)
9463 return tem;
9466 /* Fold ~X op ~Y as Y op X. */
9467 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9468 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9470 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9471 return fold_build2_loc (loc, code, type,
9472 fold_convert_loc (loc, cmp_type,
9473 TREE_OPERAND (arg1, 0)),
9474 TREE_OPERAND (arg0, 0));
9477 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9478 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9479 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9481 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9482 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9483 TREE_OPERAND (arg0, 0),
9484 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9485 fold_convert_loc (loc, cmp_type, arg1)));
9488 return NULL_TREE;
9492 /* Subroutine of fold_binary. Optimize complex multiplications of the
9493 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9494 argument EXPR represents the expression "z" of type TYPE. */
9496 static tree
9497 fold_mult_zconjz (location_t loc, tree type, tree expr)
9499 tree itype = TREE_TYPE (type);
9500 tree rpart, ipart, tem;
9502 if (TREE_CODE (expr) == COMPLEX_EXPR)
9504 rpart = TREE_OPERAND (expr, 0);
9505 ipart = TREE_OPERAND (expr, 1);
9507 else if (TREE_CODE (expr) == COMPLEX_CST)
9509 rpart = TREE_REALPART (expr);
9510 ipart = TREE_IMAGPART (expr);
9512 else
9514 expr = save_expr (expr);
9515 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9516 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9519 rpart = save_expr (rpart);
9520 ipart = save_expr (ipart);
9521 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9522 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9523 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9524 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9525 build_zero_cst (itype));
9529 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9530 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9531 guarantees that P and N have the same least significant log2(M) bits.
9532 N is not otherwise constrained. In particular, N is not normalized to
9533 0 <= N < M as is common. In general, the precise value of P is unknown.
9534 M is chosen as large as possible such that constant N can be determined.
9536 Returns M and sets *RESIDUE to N.
9538 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9539 account. This is not always possible due to PR 35705.
9542 static unsigned HOST_WIDE_INT
9543 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9544 bool allow_func_align)
9546 enum tree_code code;
9548 *residue = 0;
9550 code = TREE_CODE (expr);
9551 if (code == ADDR_EXPR)
9553 unsigned int bitalign;
9554 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9555 *residue /= BITS_PER_UNIT;
9556 return bitalign / BITS_PER_UNIT;
9558 else if (code == POINTER_PLUS_EXPR)
9560 tree op0, op1;
9561 unsigned HOST_WIDE_INT modulus;
9562 enum tree_code inner_code;
9564 op0 = TREE_OPERAND (expr, 0);
9565 STRIP_NOPS (op0);
9566 modulus = get_pointer_modulus_and_residue (op0, residue,
9567 allow_func_align);
9569 op1 = TREE_OPERAND (expr, 1);
9570 STRIP_NOPS (op1);
9571 inner_code = TREE_CODE (op1);
9572 if (inner_code == INTEGER_CST)
9574 *residue += TREE_INT_CST_LOW (op1);
9575 return modulus;
9577 else if (inner_code == MULT_EXPR)
9579 op1 = TREE_OPERAND (op1, 1);
9580 if (TREE_CODE (op1) == INTEGER_CST)
9582 unsigned HOST_WIDE_INT align;
9584 /* Compute the greatest power-of-2 divisor of op1. */
9585 align = TREE_INT_CST_LOW (op1);
9586 align &= -align;
9588 /* If align is non-zero and less than *modulus, replace
9589 *modulus with align., If align is 0, then either op1 is 0
9590 or the greatest power-of-2 divisor of op1 doesn't fit in an
9591 unsigned HOST_WIDE_INT. In either case, no additional
9592 constraint is imposed. */
9593 if (align)
9594 modulus = MIN (modulus, align);
9596 return modulus;
9601 /* If we get here, we were unable to determine anything useful about the
9602 expression. */
9603 return 1;
9606 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9607 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9609 static bool
9610 vec_cst_ctor_to_array (tree arg, tree *elts)
9612 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9614 if (TREE_CODE (arg) == VECTOR_CST)
9616 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9617 elts[i] = VECTOR_CST_ELT (arg, i);
9619 else if (TREE_CODE (arg) == CONSTRUCTOR)
9621 constructor_elt *elt;
9623 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9624 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9625 return false;
9626 else
9627 elts[i] = elt->value;
9629 else
9630 return false;
9631 for (; i < nelts; i++)
9632 elts[i]
9633 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9634 return true;
9637 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9638 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9639 NULL_TREE otherwise. */
9641 static tree
9642 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9644 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9645 tree *elts;
9646 bool need_ctor = false;
9648 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9649 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9650 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9651 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9652 return NULL_TREE;
9654 elts = XALLOCAVEC (tree, nelts * 3);
9655 if (!vec_cst_ctor_to_array (arg0, elts)
9656 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9657 return NULL_TREE;
9659 for (i = 0; i < nelts; i++)
9661 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9662 need_ctor = true;
9663 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9666 if (need_ctor)
9668 vec<constructor_elt, va_gc> *v;
9669 vec_alloc (v, nelts);
9670 for (i = 0; i < nelts; i++)
9671 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9672 return build_constructor (type, v);
9674 else
9675 return build_vector (type, &elts[2 * nelts]);
9678 /* Try to fold a pointer difference of type TYPE two address expressions of
9679 array references AREF0 and AREF1 using location LOC. Return a
9680 simplified expression for the difference or NULL_TREE. */
9682 static tree
9683 fold_addr_of_array_ref_difference (location_t loc, tree type,
9684 tree aref0, tree aref1)
9686 tree base0 = TREE_OPERAND (aref0, 0);
9687 tree base1 = TREE_OPERAND (aref1, 0);
9688 tree base_offset = build_int_cst (type, 0);
9690 /* If the bases are array references as well, recurse. If the bases
9691 are pointer indirections compute the difference of the pointers.
9692 If the bases are equal, we are set. */
9693 if ((TREE_CODE (base0) == ARRAY_REF
9694 && TREE_CODE (base1) == ARRAY_REF
9695 && (base_offset
9696 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9697 || (INDIRECT_REF_P (base0)
9698 && INDIRECT_REF_P (base1)
9699 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9700 TREE_OPERAND (base0, 0),
9701 TREE_OPERAND (base1, 0))))
9702 || operand_equal_p (base0, base1, 0))
9704 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9705 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9706 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9707 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9708 return fold_build2_loc (loc, PLUS_EXPR, type,
9709 base_offset,
9710 fold_build2_loc (loc, MULT_EXPR, type,
9711 diff, esz));
9713 return NULL_TREE;
9716 /* If the real or vector real constant CST of type TYPE has an exact
9717 inverse, return it, else return NULL. */
9719 static tree
9720 exact_inverse (tree type, tree cst)
9722 REAL_VALUE_TYPE r;
9723 tree unit_type, *elts;
9724 machine_mode mode;
9725 unsigned vec_nelts, i;
9727 switch (TREE_CODE (cst))
9729 case REAL_CST:
9730 r = TREE_REAL_CST (cst);
9732 if (exact_real_inverse (TYPE_MODE (type), &r))
9733 return build_real (type, r);
9735 return NULL_TREE;
9737 case VECTOR_CST:
9738 vec_nelts = VECTOR_CST_NELTS (cst);
9739 elts = XALLOCAVEC (tree, vec_nelts);
9740 unit_type = TREE_TYPE (type);
9741 mode = TYPE_MODE (unit_type);
9743 for (i = 0; i < vec_nelts; i++)
9745 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9746 if (!exact_real_inverse (mode, &r))
9747 return NULL_TREE;
9748 elts[i] = build_real (unit_type, r);
9751 return build_vector (type, elts);
9753 default:
9754 return NULL_TREE;
9758 /* Mask out the tz least significant bits of X of type TYPE where
9759 tz is the number of trailing zeroes in Y. */
9760 static wide_int
9761 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9763 int tz = wi::ctz (y);
9764 if (tz > 0)
9765 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9766 return x;
9769 /* Return true when T is an address and is known to be nonzero.
9770 For floating point we further ensure that T is not denormal.
9771 Similar logic is present in nonzero_address in rtlanal.h.
9773 If the return value is based on the assumption that signed overflow
9774 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9775 change *STRICT_OVERFLOW_P. */
9777 static bool
9778 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9780 tree type = TREE_TYPE (t);
9781 enum tree_code code;
9783 /* Doing something useful for floating point would need more work. */
9784 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9785 return false;
9787 code = TREE_CODE (t);
9788 switch (TREE_CODE_CLASS (code))
9790 case tcc_unary:
9791 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9792 strict_overflow_p);
9793 case tcc_binary:
9794 case tcc_comparison:
9795 return tree_binary_nonzero_warnv_p (code, type,
9796 TREE_OPERAND (t, 0),
9797 TREE_OPERAND (t, 1),
9798 strict_overflow_p);
9799 case tcc_constant:
9800 case tcc_declaration:
9801 case tcc_reference:
9802 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9804 default:
9805 break;
9808 switch (code)
9810 case TRUTH_NOT_EXPR:
9811 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9812 strict_overflow_p);
9814 case TRUTH_AND_EXPR:
9815 case TRUTH_OR_EXPR:
9816 case TRUTH_XOR_EXPR:
9817 return tree_binary_nonzero_warnv_p (code, type,
9818 TREE_OPERAND (t, 0),
9819 TREE_OPERAND (t, 1),
9820 strict_overflow_p);
9822 case COND_EXPR:
9823 case CONSTRUCTOR:
9824 case OBJ_TYPE_REF:
9825 case ASSERT_EXPR:
9826 case ADDR_EXPR:
9827 case WITH_SIZE_EXPR:
9828 case SSA_NAME:
9829 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9831 case COMPOUND_EXPR:
9832 case MODIFY_EXPR:
9833 case BIND_EXPR:
9834 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9835 strict_overflow_p);
9837 case SAVE_EXPR:
9838 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9839 strict_overflow_p);
9841 case CALL_EXPR:
9843 tree fndecl = get_callee_fndecl (t);
9844 if (!fndecl) return false;
9845 if (flag_delete_null_pointer_checks && !flag_check_new
9846 && DECL_IS_OPERATOR_NEW (fndecl)
9847 && !TREE_NOTHROW (fndecl))
9848 return true;
9849 if (flag_delete_null_pointer_checks
9850 && lookup_attribute ("returns_nonnull",
9851 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9852 return true;
9853 return alloca_call_p (t);
9856 default:
9857 break;
9859 return false;
9862 /* Return true when T is an address and is known to be nonzero.
9863 Handle warnings about undefined signed overflow. */
9865 static bool
9866 tree_expr_nonzero_p (tree t)
9868 bool ret, strict_overflow_p;
9870 strict_overflow_p = false;
9871 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9872 if (strict_overflow_p)
9873 fold_overflow_warning (("assuming signed overflow does not occur when "
9874 "determining that expression is always "
9875 "non-zero"),
9876 WARN_STRICT_OVERFLOW_MISC);
9877 return ret;
9880 /* Fold a binary expression of code CODE and type TYPE with operands
9881 OP0 and OP1. LOC is the location of the resulting expression.
9882 Return the folded expression if folding is successful. Otherwise,
9883 return NULL_TREE. */
9885 tree
9886 fold_binary_loc (location_t loc,
9887 enum tree_code code, tree type, tree op0, tree op1)
9889 enum tree_code_class kind = TREE_CODE_CLASS (code);
9890 tree arg0, arg1, tem;
9891 tree t1 = NULL_TREE;
9892 bool strict_overflow_p;
9893 unsigned int prec;
9895 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9896 && TREE_CODE_LENGTH (code) == 2
9897 && op0 != NULL_TREE
9898 && op1 != NULL_TREE);
9900 arg0 = op0;
9901 arg1 = op1;
9903 /* Strip any conversions that don't change the mode. This is
9904 safe for every expression, except for a comparison expression
9905 because its signedness is derived from its operands. So, in
9906 the latter case, only strip conversions that don't change the
9907 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9908 preserved.
9910 Note that this is done as an internal manipulation within the
9911 constant folder, in order to find the simplest representation
9912 of the arguments so that their form can be studied. In any
9913 cases, the appropriate type conversions should be put back in
9914 the tree that will get out of the constant folder. */
9916 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9918 STRIP_SIGN_NOPS (arg0);
9919 STRIP_SIGN_NOPS (arg1);
9921 else
9923 STRIP_NOPS (arg0);
9924 STRIP_NOPS (arg1);
9927 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9928 constant but we can't do arithmetic on them. */
9929 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9930 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9931 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9932 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9933 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9934 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9935 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9937 if (kind == tcc_binary)
9939 /* Make sure type and arg0 have the same saturating flag. */
9940 gcc_assert (TYPE_SATURATING (type)
9941 == TYPE_SATURATING (TREE_TYPE (arg0)));
9942 tem = const_binop (code, arg0, arg1);
9944 else if (kind == tcc_comparison)
9945 tem = fold_relational_const (code, type, arg0, arg1);
9946 else
9947 tem = NULL_TREE;
9949 if (tem != NULL_TREE)
9951 if (TREE_TYPE (tem) != type)
9952 tem = fold_convert_loc (loc, type, tem);
9953 return tem;
9957 /* If this is a commutative operation, and ARG0 is a constant, move it
9958 to ARG1 to reduce the number of tests below. */
9959 if (commutative_tree_code (code)
9960 && tree_swap_operands_p (arg0, arg1, true))
9961 return fold_build2_loc (loc, code, type, op1, op0);
9963 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9964 to ARG1 to reduce the number of tests below. */
9965 if (kind == tcc_comparison
9966 && tree_swap_operands_p (arg0, arg1, true))
9967 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9969 tem = generic_simplify (loc, code, type, op0, op1);
9970 if (tem)
9971 return tem;
9973 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9975 First check for cases where an arithmetic operation is applied to a
9976 compound, conditional, or comparison operation. Push the arithmetic
9977 operation inside the compound or conditional to see if any folding
9978 can then be done. Convert comparison to conditional for this purpose.
9979 The also optimizes non-constant cases that used to be done in
9980 expand_expr.
9982 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9983 one of the operands is a comparison and the other is a comparison, a
9984 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9985 code below would make the expression more complex. Change it to a
9986 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9987 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9989 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9990 || code == EQ_EXPR || code == NE_EXPR)
9991 && TREE_CODE (type) != VECTOR_TYPE
9992 && ((truth_value_p (TREE_CODE (arg0))
9993 && (truth_value_p (TREE_CODE (arg1))
9994 || (TREE_CODE (arg1) == BIT_AND_EXPR
9995 && integer_onep (TREE_OPERAND (arg1, 1)))))
9996 || (truth_value_p (TREE_CODE (arg1))
9997 && (truth_value_p (TREE_CODE (arg0))
9998 || (TREE_CODE (arg0) == BIT_AND_EXPR
9999 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10001 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10002 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10003 : TRUTH_XOR_EXPR,
10004 boolean_type_node,
10005 fold_convert_loc (loc, boolean_type_node, arg0),
10006 fold_convert_loc (loc, boolean_type_node, arg1));
10008 if (code == EQ_EXPR)
10009 tem = invert_truthvalue_loc (loc, tem);
10011 return fold_convert_loc (loc, type, tem);
10014 if (TREE_CODE_CLASS (code) == tcc_binary
10015 || TREE_CODE_CLASS (code) == tcc_comparison)
10017 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10019 tem = fold_build2_loc (loc, code, type,
10020 fold_convert_loc (loc, TREE_TYPE (op0),
10021 TREE_OPERAND (arg0, 1)), op1);
10022 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10023 tem);
10025 if (TREE_CODE (arg1) == COMPOUND_EXPR
10026 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10028 tem = fold_build2_loc (loc, code, type, op0,
10029 fold_convert_loc (loc, TREE_TYPE (op1),
10030 TREE_OPERAND (arg1, 1)));
10031 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10032 tem);
10035 if (TREE_CODE (arg0) == COND_EXPR
10036 || TREE_CODE (arg0) == VEC_COND_EXPR
10037 || COMPARISON_CLASS_P (arg0))
10039 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10040 arg0, arg1,
10041 /*cond_first_p=*/1);
10042 if (tem != NULL_TREE)
10043 return tem;
10046 if (TREE_CODE (arg1) == COND_EXPR
10047 || TREE_CODE (arg1) == VEC_COND_EXPR
10048 || COMPARISON_CLASS_P (arg1))
10050 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10051 arg1, arg0,
10052 /*cond_first_p=*/0);
10053 if (tem != NULL_TREE)
10054 return tem;
10058 switch (code)
10060 case MEM_REF:
10061 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10062 if (TREE_CODE (arg0) == ADDR_EXPR
10063 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10065 tree iref = TREE_OPERAND (arg0, 0);
10066 return fold_build2 (MEM_REF, type,
10067 TREE_OPERAND (iref, 0),
10068 int_const_binop (PLUS_EXPR, arg1,
10069 TREE_OPERAND (iref, 1)));
10072 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10073 if (TREE_CODE (arg0) == ADDR_EXPR
10074 && handled_component_p (TREE_OPERAND (arg0, 0)))
10076 tree base;
10077 HOST_WIDE_INT coffset;
10078 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10079 &coffset);
10080 if (!base)
10081 return NULL_TREE;
10082 return fold_build2 (MEM_REF, type,
10083 build_fold_addr_expr (base),
10084 int_const_binop (PLUS_EXPR, arg1,
10085 size_int (coffset)));
10088 return NULL_TREE;
10090 case POINTER_PLUS_EXPR:
10091 /* 0 +p index -> (type)index */
10092 if (integer_zerop (arg0))
10093 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10095 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10096 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10097 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10098 return fold_convert_loc (loc, type,
10099 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10100 fold_convert_loc (loc, sizetype,
10101 arg1),
10102 fold_convert_loc (loc, sizetype,
10103 arg0)));
10105 /* (PTR +p B) +p A -> PTR +p (B + A) */
10106 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10108 tree inner;
10109 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10110 tree arg00 = TREE_OPERAND (arg0, 0);
10111 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10112 arg01, fold_convert_loc (loc, sizetype, arg1));
10113 return fold_convert_loc (loc, type,
10114 fold_build_pointer_plus_loc (loc,
10115 arg00, inner));
10118 /* PTR_CST +p CST -> CST1 */
10119 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10120 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10121 fold_convert_loc (loc, type, arg1));
10123 return NULL_TREE;
10125 case PLUS_EXPR:
10126 /* A + (-B) -> A - B */
10127 if (TREE_CODE (arg1) == NEGATE_EXPR
10128 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10129 return fold_build2_loc (loc, MINUS_EXPR, type,
10130 fold_convert_loc (loc, type, arg0),
10131 fold_convert_loc (loc, type,
10132 TREE_OPERAND (arg1, 0)));
10133 /* (-A) + B -> B - A */
10134 if (TREE_CODE (arg0) == NEGATE_EXPR
10135 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)
10136 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10137 return fold_build2_loc (loc, MINUS_EXPR, type,
10138 fold_convert_loc (loc, type, arg1),
10139 fold_convert_loc (loc, type,
10140 TREE_OPERAND (arg0, 0)));
10142 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10144 /* Convert ~A + 1 to -A. */
10145 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10146 && integer_each_onep (arg1))
10147 return fold_build1_loc (loc, NEGATE_EXPR, type,
10148 fold_convert_loc (loc, type,
10149 TREE_OPERAND (arg0, 0)));
10151 /* ~X + X is -1. */
10152 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10153 && !TYPE_OVERFLOW_TRAPS (type))
10155 tree tem = TREE_OPERAND (arg0, 0);
10157 STRIP_NOPS (tem);
10158 if (operand_equal_p (tem, arg1, 0))
10160 t1 = build_all_ones_cst (type);
10161 return omit_one_operand_loc (loc, type, t1, arg1);
10165 /* X + ~X is -1. */
10166 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10167 && !TYPE_OVERFLOW_TRAPS (type))
10169 tree tem = TREE_OPERAND (arg1, 0);
10171 STRIP_NOPS (tem);
10172 if (operand_equal_p (arg0, tem, 0))
10174 t1 = build_all_ones_cst (type);
10175 return omit_one_operand_loc (loc, type, t1, arg0);
10179 /* X + (X / CST) * -CST is X % CST. */
10180 if (TREE_CODE (arg1) == MULT_EXPR
10181 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10182 && operand_equal_p (arg0,
10183 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10185 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10186 tree cst1 = TREE_OPERAND (arg1, 1);
10187 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10188 cst1, cst0);
10189 if (sum && integer_zerop (sum))
10190 return fold_convert_loc (loc, type,
10191 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10192 TREE_TYPE (arg0), arg0,
10193 cst0));
10197 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10198 one. Make sure the type is not saturating and has the signedness of
10199 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10200 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10201 if ((TREE_CODE (arg0) == MULT_EXPR
10202 || TREE_CODE (arg1) == MULT_EXPR)
10203 && !TYPE_SATURATING (type)
10204 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10205 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10206 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10208 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10209 if (tem)
10210 return tem;
10213 if (! FLOAT_TYPE_P (type))
10215 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10216 with a constant, and the two constants have no bits in common,
10217 we should treat this as a BIT_IOR_EXPR since this may produce more
10218 simplifications. */
10219 if (TREE_CODE (arg0) == BIT_AND_EXPR
10220 && TREE_CODE (arg1) == BIT_AND_EXPR
10221 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10222 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10223 && wi::bit_and (TREE_OPERAND (arg0, 1),
10224 TREE_OPERAND (arg1, 1)) == 0)
10226 code = BIT_IOR_EXPR;
10227 goto bit_ior;
10230 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10231 (plus (plus (mult) (mult)) (foo)) so that we can
10232 take advantage of the factoring cases below. */
10233 if (TYPE_OVERFLOW_WRAPS (type)
10234 && (((TREE_CODE (arg0) == PLUS_EXPR
10235 || TREE_CODE (arg0) == MINUS_EXPR)
10236 && TREE_CODE (arg1) == MULT_EXPR)
10237 || ((TREE_CODE (arg1) == PLUS_EXPR
10238 || TREE_CODE (arg1) == MINUS_EXPR)
10239 && TREE_CODE (arg0) == MULT_EXPR)))
10241 tree parg0, parg1, parg, marg;
10242 enum tree_code pcode;
10244 if (TREE_CODE (arg1) == MULT_EXPR)
10245 parg = arg0, marg = arg1;
10246 else
10247 parg = arg1, marg = arg0;
10248 pcode = TREE_CODE (parg);
10249 parg0 = TREE_OPERAND (parg, 0);
10250 parg1 = TREE_OPERAND (parg, 1);
10251 STRIP_NOPS (parg0);
10252 STRIP_NOPS (parg1);
10254 if (TREE_CODE (parg0) == MULT_EXPR
10255 && TREE_CODE (parg1) != MULT_EXPR)
10256 return fold_build2_loc (loc, pcode, type,
10257 fold_build2_loc (loc, PLUS_EXPR, type,
10258 fold_convert_loc (loc, type,
10259 parg0),
10260 fold_convert_loc (loc, type,
10261 marg)),
10262 fold_convert_loc (loc, type, parg1));
10263 if (TREE_CODE (parg0) != MULT_EXPR
10264 && TREE_CODE (parg1) == MULT_EXPR)
10265 return
10266 fold_build2_loc (loc, PLUS_EXPR, type,
10267 fold_convert_loc (loc, type, parg0),
10268 fold_build2_loc (loc, pcode, type,
10269 fold_convert_loc (loc, type, marg),
10270 fold_convert_loc (loc, type,
10271 parg1)));
10274 else
10276 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10277 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10278 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10280 /* Likewise if the operands are reversed. */
10281 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10282 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10284 /* Convert X + -C into X - C. */
10285 if (TREE_CODE (arg1) == REAL_CST
10286 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10288 tem = fold_negate_const (arg1, type);
10289 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10290 return fold_build2_loc (loc, MINUS_EXPR, type,
10291 fold_convert_loc (loc, type, arg0),
10292 fold_convert_loc (loc, type, tem));
10295 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10296 to __complex__ ( x, y ). This is not the same for SNaNs or
10297 if signed zeros are involved. */
10298 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10299 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10300 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10302 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10303 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10304 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10305 bool arg0rz = false, arg0iz = false;
10306 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10307 || (arg0i && (arg0iz = real_zerop (arg0i))))
10309 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10310 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10311 if (arg0rz && arg1i && real_zerop (arg1i))
10313 tree rp = arg1r ? arg1r
10314 : build1 (REALPART_EXPR, rtype, arg1);
10315 tree ip = arg0i ? arg0i
10316 : build1 (IMAGPART_EXPR, rtype, arg0);
10317 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10319 else if (arg0iz && arg1r && real_zerop (arg1r))
10321 tree rp = arg0r ? arg0r
10322 : build1 (REALPART_EXPR, rtype, arg0);
10323 tree ip = arg1i ? arg1i
10324 : build1 (IMAGPART_EXPR, rtype, arg1);
10325 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10330 if (flag_unsafe_math_optimizations
10331 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10332 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10333 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10334 return tem;
10336 /* Convert x+x into x*2.0. */
10337 if (operand_equal_p (arg0, arg1, 0)
10338 && SCALAR_FLOAT_TYPE_P (type))
10339 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10340 build_real (type, dconst2));
10342 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10343 We associate floats only if the user has specified
10344 -fassociative-math. */
10345 if (flag_associative_math
10346 && TREE_CODE (arg1) == PLUS_EXPR
10347 && TREE_CODE (arg0) != MULT_EXPR)
10349 tree tree10 = TREE_OPERAND (arg1, 0);
10350 tree tree11 = TREE_OPERAND (arg1, 1);
10351 if (TREE_CODE (tree11) == MULT_EXPR
10352 && TREE_CODE (tree10) == MULT_EXPR)
10354 tree tree0;
10355 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10356 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10359 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10360 We associate floats only if the user has specified
10361 -fassociative-math. */
10362 if (flag_associative_math
10363 && TREE_CODE (arg0) == PLUS_EXPR
10364 && TREE_CODE (arg1) != MULT_EXPR)
10366 tree tree00 = TREE_OPERAND (arg0, 0);
10367 tree tree01 = TREE_OPERAND (arg0, 1);
10368 if (TREE_CODE (tree01) == MULT_EXPR
10369 && TREE_CODE (tree00) == MULT_EXPR)
10371 tree tree0;
10372 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10373 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10378 bit_rotate:
10379 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10380 is a rotate of A by C1 bits. */
10381 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10382 is a rotate of A by B bits. */
10384 enum tree_code code0, code1;
10385 tree rtype;
10386 code0 = TREE_CODE (arg0);
10387 code1 = TREE_CODE (arg1);
10388 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10389 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10390 && operand_equal_p (TREE_OPERAND (arg0, 0),
10391 TREE_OPERAND (arg1, 0), 0)
10392 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10393 TYPE_UNSIGNED (rtype))
10394 /* Only create rotates in complete modes. Other cases are not
10395 expanded properly. */
10396 && (element_precision (rtype)
10397 == element_precision (TYPE_MODE (rtype))))
10399 tree tree01, tree11;
10400 enum tree_code code01, code11;
10402 tree01 = TREE_OPERAND (arg0, 1);
10403 tree11 = TREE_OPERAND (arg1, 1);
10404 STRIP_NOPS (tree01);
10405 STRIP_NOPS (tree11);
10406 code01 = TREE_CODE (tree01);
10407 code11 = TREE_CODE (tree11);
10408 if (code01 == INTEGER_CST
10409 && code11 == INTEGER_CST
10410 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10411 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10413 tem = build2_loc (loc, LROTATE_EXPR,
10414 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10415 TREE_OPERAND (arg0, 0),
10416 code0 == LSHIFT_EXPR ? tree01 : tree11);
10417 return fold_convert_loc (loc, type, tem);
10419 else if (code11 == MINUS_EXPR)
10421 tree tree110, tree111;
10422 tree110 = TREE_OPERAND (tree11, 0);
10423 tree111 = TREE_OPERAND (tree11, 1);
10424 STRIP_NOPS (tree110);
10425 STRIP_NOPS (tree111);
10426 if (TREE_CODE (tree110) == INTEGER_CST
10427 && 0 == compare_tree_int (tree110,
10428 element_precision
10429 (TREE_TYPE (TREE_OPERAND
10430 (arg0, 0))))
10431 && operand_equal_p (tree01, tree111, 0))
10432 return
10433 fold_convert_loc (loc, type,
10434 build2 ((code0 == LSHIFT_EXPR
10435 ? LROTATE_EXPR
10436 : RROTATE_EXPR),
10437 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10438 TREE_OPERAND (arg0, 0), tree01));
10440 else if (code01 == MINUS_EXPR)
10442 tree tree010, tree011;
10443 tree010 = TREE_OPERAND (tree01, 0);
10444 tree011 = TREE_OPERAND (tree01, 1);
10445 STRIP_NOPS (tree010);
10446 STRIP_NOPS (tree011);
10447 if (TREE_CODE (tree010) == INTEGER_CST
10448 && 0 == compare_tree_int (tree010,
10449 element_precision
10450 (TREE_TYPE (TREE_OPERAND
10451 (arg0, 0))))
10452 && operand_equal_p (tree11, tree011, 0))
10453 return fold_convert_loc
10454 (loc, type,
10455 build2 ((code0 != LSHIFT_EXPR
10456 ? LROTATE_EXPR
10457 : RROTATE_EXPR),
10458 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10459 TREE_OPERAND (arg0, 0), tree11));
10464 associate:
10465 /* In most languages, can't associate operations on floats through
10466 parentheses. Rather than remember where the parentheses were, we
10467 don't associate floats at all, unless the user has specified
10468 -fassociative-math.
10469 And, we need to make sure type is not saturating. */
10471 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10472 && !TYPE_SATURATING (type))
10474 tree var0, con0, lit0, minus_lit0;
10475 tree var1, con1, lit1, minus_lit1;
10476 tree atype = type;
10477 bool ok = true;
10479 /* Split both trees into variables, constants, and literals. Then
10480 associate each group together, the constants with literals,
10481 then the result with variables. This increases the chances of
10482 literals being recombined later and of generating relocatable
10483 expressions for the sum of a constant and literal. */
10484 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10485 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10486 code == MINUS_EXPR);
10488 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10489 if (code == MINUS_EXPR)
10490 code = PLUS_EXPR;
10492 /* With undefined overflow prefer doing association in a type
10493 which wraps on overflow, if that is one of the operand types. */
10494 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10495 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10497 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10498 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10499 atype = TREE_TYPE (arg0);
10500 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10501 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10502 atype = TREE_TYPE (arg1);
10503 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10506 /* With undefined overflow we can only associate constants with one
10507 variable, and constants whose association doesn't overflow. */
10508 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10509 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10511 if (var0 && var1)
10513 tree tmp0 = var0;
10514 tree tmp1 = var1;
10516 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10517 tmp0 = TREE_OPERAND (tmp0, 0);
10518 if (CONVERT_EXPR_P (tmp0)
10519 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10520 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10521 <= TYPE_PRECISION (atype)))
10522 tmp0 = TREE_OPERAND (tmp0, 0);
10523 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10524 tmp1 = TREE_OPERAND (tmp1, 0);
10525 if (CONVERT_EXPR_P (tmp1)
10526 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10527 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10528 <= TYPE_PRECISION (atype)))
10529 tmp1 = TREE_OPERAND (tmp1, 0);
10530 /* The only case we can still associate with two variables
10531 is if they are the same, modulo negation and bit-pattern
10532 preserving conversions. */
10533 if (!operand_equal_p (tmp0, tmp1, 0))
10534 ok = false;
10538 /* Only do something if we found more than two objects. Otherwise,
10539 nothing has changed and we risk infinite recursion. */
10540 if (ok
10541 && (2 < ((var0 != 0) + (var1 != 0)
10542 + (con0 != 0) + (con1 != 0)
10543 + (lit0 != 0) + (lit1 != 0)
10544 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10546 bool any_overflows = false;
10547 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10548 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10549 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10550 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10551 var0 = associate_trees (loc, var0, var1, code, atype);
10552 con0 = associate_trees (loc, con0, con1, code, atype);
10553 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10554 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10555 code, atype);
10557 /* Preserve the MINUS_EXPR if the negative part of the literal is
10558 greater than the positive part. Otherwise, the multiplicative
10559 folding code (i.e extract_muldiv) may be fooled in case
10560 unsigned constants are subtracted, like in the following
10561 example: ((X*2 + 4) - 8U)/2. */
10562 if (minus_lit0 && lit0)
10564 if (TREE_CODE (lit0) == INTEGER_CST
10565 && TREE_CODE (minus_lit0) == INTEGER_CST
10566 && tree_int_cst_lt (lit0, minus_lit0))
10568 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10569 MINUS_EXPR, atype);
10570 lit0 = 0;
10572 else
10574 lit0 = associate_trees (loc, lit0, minus_lit0,
10575 MINUS_EXPR, atype);
10576 minus_lit0 = 0;
10580 /* Don't introduce overflows through reassociation. */
10581 if (!any_overflows
10582 && ((lit0 && TREE_OVERFLOW (lit0))
10583 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10584 return NULL_TREE;
10586 if (minus_lit0)
10588 if (con0 == 0)
10589 return
10590 fold_convert_loc (loc, type,
10591 associate_trees (loc, var0, minus_lit0,
10592 MINUS_EXPR, atype));
10593 else
10595 con0 = associate_trees (loc, con0, minus_lit0,
10596 MINUS_EXPR, atype);
10597 return
10598 fold_convert_loc (loc, type,
10599 associate_trees (loc, var0, con0,
10600 PLUS_EXPR, atype));
10604 con0 = associate_trees (loc, con0, lit0, code, atype);
10605 return
10606 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10607 code, atype));
10611 return NULL_TREE;
10613 case MINUS_EXPR:
10614 /* Pointer simplifications for subtraction, simple reassociations. */
10615 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10617 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10618 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10619 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10621 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10622 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10623 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10624 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10625 return fold_build2_loc (loc, PLUS_EXPR, type,
10626 fold_build2_loc (loc, MINUS_EXPR, type,
10627 arg00, arg10),
10628 fold_build2_loc (loc, MINUS_EXPR, type,
10629 arg01, arg11));
10631 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10632 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10634 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10635 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10636 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10637 fold_convert_loc (loc, type, arg1));
10638 if (tmp)
10639 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10641 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10642 simplifies. */
10643 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10645 tree arg10 = fold_convert_loc (loc, type,
10646 TREE_OPERAND (arg1, 0));
10647 tree arg11 = fold_convert_loc (loc, type,
10648 TREE_OPERAND (arg1, 1));
10649 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10650 fold_convert_loc (loc, type, arg0),
10651 arg10);
10652 if (tmp)
10653 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10656 /* A - (-B) -> A + B */
10657 if (TREE_CODE (arg1) == NEGATE_EXPR)
10658 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10659 fold_convert_loc (loc, type,
10660 TREE_OPERAND (arg1, 0)));
10661 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10662 if (TREE_CODE (arg0) == NEGATE_EXPR
10663 && negate_expr_p (arg1)
10664 && reorder_operands_p (arg0, arg1))
10665 return fold_build2_loc (loc, MINUS_EXPR, type,
10666 fold_convert_loc (loc, type,
10667 negate_expr (arg1)),
10668 fold_convert_loc (loc, type,
10669 TREE_OPERAND (arg0, 0)));
10670 /* Convert -A - 1 to ~A. */
10671 if (TREE_CODE (arg0) == NEGATE_EXPR
10672 && integer_each_onep (arg1)
10673 && !TYPE_OVERFLOW_TRAPS (type))
10674 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10675 fold_convert_loc (loc, type,
10676 TREE_OPERAND (arg0, 0)));
10678 /* Convert -1 - A to ~A. */
10679 if (TREE_CODE (type) != COMPLEX_TYPE
10680 && integer_all_onesp (arg0))
10681 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10684 /* X - (X / Y) * Y is X % Y. */
10685 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10686 && TREE_CODE (arg1) == MULT_EXPR
10687 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10688 && operand_equal_p (arg0,
10689 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10690 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10691 TREE_OPERAND (arg1, 1), 0))
10692 return
10693 fold_convert_loc (loc, type,
10694 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10695 arg0, TREE_OPERAND (arg1, 1)));
10697 if (! FLOAT_TYPE_P (type))
10699 if (integer_zerop (arg0))
10700 return negate_expr (fold_convert_loc (loc, type, arg1));
10702 /* Fold A - (A & B) into ~B & A. */
10703 if (!TREE_SIDE_EFFECTS (arg0)
10704 && TREE_CODE (arg1) == BIT_AND_EXPR)
10706 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10708 tree arg10 = fold_convert_loc (loc, type,
10709 TREE_OPERAND (arg1, 0));
10710 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10711 fold_build1_loc (loc, BIT_NOT_EXPR,
10712 type, arg10),
10713 fold_convert_loc (loc, type, arg0));
10715 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10717 tree arg11 = fold_convert_loc (loc,
10718 type, TREE_OPERAND (arg1, 1));
10719 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10720 fold_build1_loc (loc, BIT_NOT_EXPR,
10721 type, arg11),
10722 fold_convert_loc (loc, type, arg0));
10726 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10727 any power of 2 minus 1. */
10728 if (TREE_CODE (arg0) == BIT_AND_EXPR
10729 && TREE_CODE (arg1) == BIT_AND_EXPR
10730 && operand_equal_p (TREE_OPERAND (arg0, 0),
10731 TREE_OPERAND (arg1, 0), 0))
10733 tree mask0 = TREE_OPERAND (arg0, 1);
10734 tree mask1 = TREE_OPERAND (arg1, 1);
10735 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10737 if (operand_equal_p (tem, mask1, 0))
10739 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10740 TREE_OPERAND (arg0, 0), mask1);
10741 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10746 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10747 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10748 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10750 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10751 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10752 (-ARG1 + ARG0) reduces to -ARG1. */
10753 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10754 return negate_expr (fold_convert_loc (loc, type, arg1));
10756 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10757 __complex__ ( x, -y ). This is not the same for SNaNs or if
10758 signed zeros are involved. */
10759 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10760 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10761 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10763 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10764 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10765 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10766 bool arg0rz = false, arg0iz = false;
10767 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10768 || (arg0i && (arg0iz = real_zerop (arg0i))))
10770 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10771 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10772 if (arg0rz && arg1i && real_zerop (arg1i))
10774 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10775 arg1r ? arg1r
10776 : build1 (REALPART_EXPR, rtype, arg1));
10777 tree ip = arg0i ? arg0i
10778 : build1 (IMAGPART_EXPR, rtype, arg0);
10779 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10781 else if (arg0iz && arg1r && real_zerop (arg1r))
10783 tree rp = arg0r ? arg0r
10784 : build1 (REALPART_EXPR, rtype, arg0);
10785 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10786 arg1i ? arg1i
10787 : build1 (IMAGPART_EXPR, rtype, arg1));
10788 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10793 /* A - B -> A + (-B) if B is easily negatable. */
10794 if (negate_expr_p (arg1)
10795 && ((FLOAT_TYPE_P (type)
10796 /* Avoid this transformation if B is a positive REAL_CST. */
10797 && (TREE_CODE (arg1) != REAL_CST
10798 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10799 || INTEGRAL_TYPE_P (type)))
10800 return fold_build2_loc (loc, PLUS_EXPR, type,
10801 fold_convert_loc (loc, type, arg0),
10802 fold_convert_loc (loc, type,
10803 negate_expr (arg1)));
10805 /* Try folding difference of addresses. */
10807 HOST_WIDE_INT diff;
10809 if ((TREE_CODE (arg0) == ADDR_EXPR
10810 || TREE_CODE (arg1) == ADDR_EXPR)
10811 && ptr_difference_const (arg0, arg1, &diff))
10812 return build_int_cst_type (type, diff);
10815 /* Fold &a[i] - &a[j] to i-j. */
10816 if (TREE_CODE (arg0) == ADDR_EXPR
10817 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10818 && TREE_CODE (arg1) == ADDR_EXPR
10819 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10821 tree tem = fold_addr_of_array_ref_difference (loc, type,
10822 TREE_OPERAND (arg0, 0),
10823 TREE_OPERAND (arg1, 0));
10824 if (tem)
10825 return tem;
10828 if (FLOAT_TYPE_P (type)
10829 && flag_unsafe_math_optimizations
10830 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10831 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10832 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10833 return tem;
10835 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10836 one. Make sure the type is not saturating and has the signedness of
10837 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10838 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10839 if ((TREE_CODE (arg0) == MULT_EXPR
10840 || TREE_CODE (arg1) == MULT_EXPR)
10841 && !TYPE_SATURATING (type)
10842 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10843 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10844 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10846 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10847 if (tem)
10848 return tem;
10851 goto associate;
10853 case MULT_EXPR:
10854 /* (-A) * (-B) -> A * B */
10855 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10856 return fold_build2_loc (loc, MULT_EXPR, type,
10857 fold_convert_loc (loc, type,
10858 TREE_OPERAND (arg0, 0)),
10859 fold_convert_loc (loc, type,
10860 negate_expr (arg1)));
10861 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10862 return fold_build2_loc (loc, MULT_EXPR, type,
10863 fold_convert_loc (loc, type,
10864 negate_expr (arg0)),
10865 fold_convert_loc (loc, type,
10866 TREE_OPERAND (arg1, 0)));
10868 if (! FLOAT_TYPE_P (type))
10870 /* Transform x * -1 into -x. Make sure to do the negation
10871 on the original operand with conversions not stripped
10872 because we can only strip non-sign-changing conversions. */
10873 if (integer_minus_onep (arg1))
10874 return fold_convert_loc (loc, type, negate_expr (op0));
10875 /* Transform x * -C into -x * C if x is easily negatable. */
10876 if (TREE_CODE (arg1) == INTEGER_CST
10877 && tree_int_cst_sgn (arg1) == -1
10878 && negate_expr_p (arg0)
10879 && (tem = negate_expr (arg1)) != arg1
10880 && !TREE_OVERFLOW (tem))
10881 return fold_build2_loc (loc, MULT_EXPR, type,
10882 fold_convert_loc (loc, type,
10883 negate_expr (arg0)),
10884 tem);
10886 /* (a * (1 << b)) is (a << b) */
10887 if (TREE_CODE (arg1) == LSHIFT_EXPR
10888 && integer_onep (TREE_OPERAND (arg1, 0)))
10889 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10890 TREE_OPERAND (arg1, 1));
10891 if (TREE_CODE (arg0) == LSHIFT_EXPR
10892 && integer_onep (TREE_OPERAND (arg0, 0)))
10893 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10894 TREE_OPERAND (arg0, 1));
10896 /* (A + A) * C -> A * 2 * C */
10897 if (TREE_CODE (arg0) == PLUS_EXPR
10898 && TREE_CODE (arg1) == INTEGER_CST
10899 && operand_equal_p (TREE_OPERAND (arg0, 0),
10900 TREE_OPERAND (arg0, 1), 0))
10901 return fold_build2_loc (loc, MULT_EXPR, type,
10902 omit_one_operand_loc (loc, type,
10903 TREE_OPERAND (arg0, 0),
10904 TREE_OPERAND (arg0, 1)),
10905 fold_build2_loc (loc, MULT_EXPR, type,
10906 build_int_cst (type, 2) , arg1));
10908 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10909 sign-changing only. */
10910 if (TREE_CODE (arg1) == INTEGER_CST
10911 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10912 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10913 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10915 strict_overflow_p = false;
10916 if (TREE_CODE (arg1) == INTEGER_CST
10917 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10918 &strict_overflow_p)))
10920 if (strict_overflow_p)
10921 fold_overflow_warning (("assuming signed overflow does not "
10922 "occur when simplifying "
10923 "multiplication"),
10924 WARN_STRICT_OVERFLOW_MISC);
10925 return fold_convert_loc (loc, type, tem);
10928 /* Optimize z * conj(z) for integer complex numbers. */
10929 if (TREE_CODE (arg0) == CONJ_EXPR
10930 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10931 return fold_mult_zconjz (loc, type, arg1);
10932 if (TREE_CODE (arg1) == CONJ_EXPR
10933 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10934 return fold_mult_zconjz (loc, type, arg0);
10936 else
10938 /* Maybe fold x * 0 to 0. The expressions aren't the same
10939 when x is NaN, since x * 0 is also NaN. Nor are they the
10940 same in modes with signed zeros, since multiplying a
10941 negative value by 0 gives -0, not +0. */
10942 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10943 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10944 && real_zerop (arg1))
10945 return omit_one_operand_loc (loc, type, arg1, arg0);
10946 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10947 Likewise for complex arithmetic with signed zeros. */
10948 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10949 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10950 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10951 && real_onep (arg1))
10952 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10954 /* Transform x * -1.0 into -x. */
10955 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10956 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10957 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10958 && real_minus_onep (arg1))
10959 return fold_convert_loc (loc, type, negate_expr (arg0));
10961 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10962 the result for floating point types due to rounding so it is applied
10963 only if -fassociative-math was specify. */
10964 if (flag_associative_math
10965 && TREE_CODE (arg0) == RDIV_EXPR
10966 && TREE_CODE (arg1) == REAL_CST
10967 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10969 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10970 arg1);
10971 if (tem)
10972 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10973 TREE_OPERAND (arg0, 1));
10976 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10977 if (operand_equal_p (arg0, arg1, 0))
10979 tree tem = fold_strip_sign_ops (arg0);
10980 if (tem != NULL_TREE)
10982 tem = fold_convert_loc (loc, type, tem);
10983 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10987 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10988 This is not the same for NaNs or if signed zeros are
10989 involved. */
10990 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10991 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10992 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10993 && TREE_CODE (arg1) == COMPLEX_CST
10994 && real_zerop (TREE_REALPART (arg1)))
10996 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10997 if (real_onep (TREE_IMAGPART (arg1)))
10998 return
10999 fold_build2_loc (loc, COMPLEX_EXPR, type,
11000 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11001 rtype, arg0)),
11002 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11003 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11004 return
11005 fold_build2_loc (loc, COMPLEX_EXPR, type,
11006 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11007 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11008 rtype, arg0)));
11011 /* Optimize z * conj(z) for floating point complex numbers.
11012 Guarded by flag_unsafe_math_optimizations as non-finite
11013 imaginary components don't produce scalar results. */
11014 if (flag_unsafe_math_optimizations
11015 && TREE_CODE (arg0) == CONJ_EXPR
11016 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11017 return fold_mult_zconjz (loc, type, arg1);
11018 if (flag_unsafe_math_optimizations
11019 && TREE_CODE (arg1) == CONJ_EXPR
11020 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11021 return fold_mult_zconjz (loc, type, arg0);
11023 if (flag_unsafe_math_optimizations)
11025 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11026 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11028 /* Optimizations of root(...)*root(...). */
11029 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11031 tree rootfn, arg;
11032 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11033 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11035 /* Optimize sqrt(x)*sqrt(x) as x. */
11036 if (BUILTIN_SQRT_P (fcode0)
11037 && operand_equal_p (arg00, arg10, 0)
11038 && ! HONOR_SNANS (TYPE_MODE (type)))
11039 return arg00;
11041 /* Optimize root(x)*root(y) as root(x*y). */
11042 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11043 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11044 return build_call_expr_loc (loc, rootfn, 1, arg);
11047 /* Optimize expN(x)*expN(y) as expN(x+y). */
11048 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11050 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11051 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11052 CALL_EXPR_ARG (arg0, 0),
11053 CALL_EXPR_ARG (arg1, 0));
11054 return build_call_expr_loc (loc, expfn, 1, arg);
11057 /* Optimizations of pow(...)*pow(...). */
11058 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11059 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11060 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11062 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11063 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11064 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11065 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11067 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11068 if (operand_equal_p (arg01, arg11, 0))
11070 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11071 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11072 arg00, arg10);
11073 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11076 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11077 if (operand_equal_p (arg00, arg10, 0))
11079 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11080 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11081 arg01, arg11);
11082 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11086 /* Optimize tan(x)*cos(x) as sin(x). */
11087 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11088 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11089 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11090 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11091 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11092 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11093 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11094 CALL_EXPR_ARG (arg1, 0), 0))
11096 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11098 if (sinfn != NULL_TREE)
11099 return build_call_expr_loc (loc, sinfn, 1,
11100 CALL_EXPR_ARG (arg0, 0));
11103 /* Optimize x*pow(x,c) as pow(x,c+1). */
11104 if (fcode1 == BUILT_IN_POW
11105 || fcode1 == BUILT_IN_POWF
11106 || fcode1 == BUILT_IN_POWL)
11108 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11109 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11110 if (TREE_CODE (arg11) == REAL_CST
11111 && !TREE_OVERFLOW (arg11)
11112 && operand_equal_p (arg0, arg10, 0))
11114 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11115 REAL_VALUE_TYPE c;
11116 tree arg;
11118 c = TREE_REAL_CST (arg11);
11119 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11120 arg = build_real (type, c);
11121 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11125 /* Optimize pow(x,c)*x as pow(x,c+1). */
11126 if (fcode0 == BUILT_IN_POW
11127 || fcode0 == BUILT_IN_POWF
11128 || fcode0 == BUILT_IN_POWL)
11130 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11131 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11132 if (TREE_CODE (arg01) == REAL_CST
11133 && !TREE_OVERFLOW (arg01)
11134 && operand_equal_p (arg1, arg00, 0))
11136 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11137 REAL_VALUE_TYPE c;
11138 tree arg;
11140 c = TREE_REAL_CST (arg01);
11141 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11142 arg = build_real (type, c);
11143 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11147 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11148 if (!in_gimple_form
11149 && optimize
11150 && operand_equal_p (arg0, arg1, 0))
11152 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11154 if (powfn)
11156 tree arg = build_real (type, dconst2);
11157 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11162 goto associate;
11164 case BIT_IOR_EXPR:
11165 bit_ior:
11166 /* ~X | X is -1. */
11167 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11168 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11170 t1 = build_zero_cst (type);
11171 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11172 return omit_one_operand_loc (loc, type, t1, arg1);
11175 /* X | ~X is -1. */
11176 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11177 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11179 t1 = build_zero_cst (type);
11180 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11181 return omit_one_operand_loc (loc, type, t1, arg0);
11184 /* Canonicalize (X & C1) | C2. */
11185 if (TREE_CODE (arg0) == BIT_AND_EXPR
11186 && TREE_CODE (arg1) == INTEGER_CST
11187 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11189 int width = TYPE_PRECISION (type), w;
11190 wide_int c1 = TREE_OPERAND (arg0, 1);
11191 wide_int c2 = arg1;
11193 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11194 if ((c1 & c2) == c1)
11195 return omit_one_operand_loc (loc, type, arg1,
11196 TREE_OPERAND (arg0, 0));
11198 wide_int msk = wi::mask (width, false,
11199 TYPE_PRECISION (TREE_TYPE (arg1)));
11201 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11202 if (msk.and_not (c1 | c2) == 0)
11203 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11204 TREE_OPERAND (arg0, 0), arg1);
11206 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11207 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11208 mode which allows further optimizations. */
11209 c1 &= msk;
11210 c2 &= msk;
11211 wide_int c3 = c1.and_not (c2);
11212 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11214 wide_int mask = wi::mask (w, false,
11215 TYPE_PRECISION (type));
11216 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11218 c3 = mask;
11219 break;
11223 if (c3 != c1)
11224 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11225 fold_build2_loc (loc, BIT_AND_EXPR, type,
11226 TREE_OPERAND (arg0, 0),
11227 wide_int_to_tree (type,
11228 c3)),
11229 arg1);
11232 /* (X & Y) | Y is (X, Y). */
11233 if (TREE_CODE (arg0) == BIT_AND_EXPR
11234 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11235 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11236 /* (X & Y) | X is (Y, X). */
11237 if (TREE_CODE (arg0) == BIT_AND_EXPR
11238 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11239 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11240 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11241 /* X | (X & Y) is (Y, X). */
11242 if (TREE_CODE (arg1) == BIT_AND_EXPR
11243 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11244 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11245 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11246 /* X | (Y & X) is (Y, X). */
11247 if (TREE_CODE (arg1) == BIT_AND_EXPR
11248 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11249 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11250 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11252 /* (X & ~Y) | (~X & Y) is X ^ Y */
11253 if (TREE_CODE (arg0) == BIT_AND_EXPR
11254 && TREE_CODE (arg1) == BIT_AND_EXPR)
11256 tree a0, a1, l0, l1, n0, n1;
11258 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11259 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11261 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11262 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11264 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11265 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11267 if ((operand_equal_p (n0, a0, 0)
11268 && operand_equal_p (n1, a1, 0))
11269 || (operand_equal_p (n0, a1, 0)
11270 && operand_equal_p (n1, a0, 0)))
11271 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11274 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11275 if (t1 != NULL_TREE)
11276 return t1;
11278 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11280 This results in more efficient code for machines without a NAND
11281 instruction. Combine will canonicalize to the first form
11282 which will allow use of NAND instructions provided by the
11283 backend if they exist. */
11284 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11285 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11287 return
11288 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11289 build2 (BIT_AND_EXPR, type,
11290 fold_convert_loc (loc, type,
11291 TREE_OPERAND (arg0, 0)),
11292 fold_convert_loc (loc, type,
11293 TREE_OPERAND (arg1, 0))));
11296 /* See if this can be simplified into a rotate first. If that
11297 is unsuccessful continue in the association code. */
11298 goto bit_rotate;
11300 case BIT_XOR_EXPR:
11301 /* ~X ^ X is -1. */
11302 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11303 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11305 t1 = build_zero_cst (type);
11306 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11307 return omit_one_operand_loc (loc, type, t1, arg1);
11310 /* X ^ ~X is -1. */
11311 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11312 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11314 t1 = build_zero_cst (type);
11315 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11316 return omit_one_operand_loc (loc, type, t1, arg0);
11319 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11320 with a constant, and the two constants have no bits in common,
11321 we should treat this as a BIT_IOR_EXPR since this may produce more
11322 simplifications. */
11323 if (TREE_CODE (arg0) == BIT_AND_EXPR
11324 && TREE_CODE (arg1) == BIT_AND_EXPR
11325 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11326 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11327 && wi::bit_and (TREE_OPERAND (arg0, 1),
11328 TREE_OPERAND (arg1, 1)) == 0)
11330 code = BIT_IOR_EXPR;
11331 goto bit_ior;
11334 /* (X | Y) ^ X -> Y & ~ X*/
11335 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11336 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11338 tree t2 = TREE_OPERAND (arg0, 1);
11339 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11340 arg1);
11341 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11342 fold_convert_loc (loc, type, t2),
11343 fold_convert_loc (loc, type, t1));
11344 return t1;
11347 /* (Y | X) ^ X -> Y & ~ X*/
11348 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11349 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11351 tree t2 = TREE_OPERAND (arg0, 0);
11352 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11353 arg1);
11354 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11355 fold_convert_loc (loc, type, t2),
11356 fold_convert_loc (loc, type, t1));
11357 return t1;
11360 /* X ^ (X | Y) -> Y & ~ X*/
11361 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11362 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11364 tree t2 = TREE_OPERAND (arg1, 1);
11365 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11366 arg0);
11367 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11368 fold_convert_loc (loc, type, t2),
11369 fold_convert_loc (loc, type, t1));
11370 return t1;
11373 /* X ^ (Y | X) -> Y & ~ X*/
11374 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11375 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11377 tree t2 = TREE_OPERAND (arg1, 0);
11378 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11379 arg0);
11380 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11381 fold_convert_loc (loc, type, t2),
11382 fold_convert_loc (loc, type, t1));
11383 return t1;
11386 /* Convert ~X ^ ~Y to X ^ Y. */
11387 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11388 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11389 return fold_build2_loc (loc, code, type,
11390 fold_convert_loc (loc, type,
11391 TREE_OPERAND (arg0, 0)),
11392 fold_convert_loc (loc, type,
11393 TREE_OPERAND (arg1, 0)));
11395 /* Convert ~X ^ C to X ^ ~C. */
11396 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11397 && TREE_CODE (arg1) == INTEGER_CST)
11398 return fold_build2_loc (loc, code, type,
11399 fold_convert_loc (loc, type,
11400 TREE_OPERAND (arg0, 0)),
11401 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11403 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11404 if (TREE_CODE (arg0) == BIT_AND_EXPR
11405 && INTEGRAL_TYPE_P (type)
11406 && integer_onep (TREE_OPERAND (arg0, 1))
11407 && integer_onep (arg1))
11408 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11409 build_zero_cst (TREE_TYPE (arg0)));
11411 /* Fold (X & Y) ^ Y as ~X & Y. */
11412 if (TREE_CODE (arg0) == BIT_AND_EXPR
11413 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11415 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11416 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11417 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11418 fold_convert_loc (loc, type, arg1));
11420 /* Fold (X & Y) ^ X as ~Y & X. */
11421 if (TREE_CODE (arg0) == BIT_AND_EXPR
11422 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11423 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11425 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11426 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11427 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11428 fold_convert_loc (loc, type, arg1));
11430 /* Fold X ^ (X & Y) as X & ~Y. */
11431 if (TREE_CODE (arg1) == BIT_AND_EXPR
11432 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11434 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11435 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11436 fold_convert_loc (loc, type, arg0),
11437 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11439 /* Fold X ^ (Y & X) as ~Y & X. */
11440 if (TREE_CODE (arg1) == BIT_AND_EXPR
11441 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11442 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11444 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11445 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11446 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11447 fold_convert_loc (loc, type, arg0));
11450 /* See if this can be simplified into a rotate first. If that
11451 is unsuccessful continue in the association code. */
11452 goto bit_rotate;
11454 case BIT_AND_EXPR:
11455 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11456 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11457 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11458 || (TREE_CODE (arg0) == EQ_EXPR
11459 && integer_zerop (TREE_OPERAND (arg0, 1))))
11460 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11461 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11463 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11464 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11465 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11466 || (TREE_CODE (arg1) == EQ_EXPR
11467 && integer_zerop (TREE_OPERAND (arg1, 1))))
11468 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11469 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11471 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11472 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11473 && TREE_CODE (arg1) == INTEGER_CST
11474 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11476 tree tmp1 = fold_convert_loc (loc, type, arg1);
11477 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11478 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11479 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11480 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11481 return
11482 fold_convert_loc (loc, type,
11483 fold_build2_loc (loc, BIT_IOR_EXPR,
11484 type, tmp2, tmp3));
11487 /* (X | Y) & Y is (X, Y). */
11488 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11489 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11490 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11491 /* (X | Y) & X is (Y, X). */
11492 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11493 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11494 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11495 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11496 /* X & (X | Y) is (Y, X). */
11497 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11498 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11499 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11500 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11501 /* X & (Y | X) is (Y, X). */
11502 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11503 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11504 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11505 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11507 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11508 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11509 && INTEGRAL_TYPE_P (type)
11510 && integer_onep (TREE_OPERAND (arg0, 1))
11511 && integer_onep (arg1))
11513 tree tem2;
11514 tem = TREE_OPERAND (arg0, 0);
11515 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11516 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11517 tem, tem2);
11518 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11519 build_zero_cst (TREE_TYPE (tem)));
11521 /* Fold ~X & 1 as (X & 1) == 0. */
11522 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11523 && INTEGRAL_TYPE_P (type)
11524 && integer_onep (arg1))
11526 tree tem2;
11527 tem = TREE_OPERAND (arg0, 0);
11528 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11529 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11530 tem, tem2);
11531 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11532 build_zero_cst (TREE_TYPE (tem)));
11534 /* Fold !X & 1 as X == 0. */
11535 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11536 && integer_onep (arg1))
11538 tem = TREE_OPERAND (arg0, 0);
11539 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11540 build_zero_cst (TREE_TYPE (tem)));
11543 /* Fold (X ^ Y) & Y as ~X & Y. */
11544 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11545 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11547 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11548 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11549 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11550 fold_convert_loc (loc, type, arg1));
11552 /* Fold (X ^ Y) & X as ~Y & X. */
11553 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11554 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11555 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11557 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11558 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11559 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11560 fold_convert_loc (loc, type, arg1));
11562 /* Fold X & (X ^ Y) as X & ~Y. */
11563 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11564 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11566 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11567 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11568 fold_convert_loc (loc, type, arg0),
11569 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11571 /* Fold X & (Y ^ X) as ~Y & X. */
11572 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11573 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11574 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11576 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11577 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11578 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11579 fold_convert_loc (loc, type, arg0));
11582 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11583 multiple of 1 << CST. */
11584 if (TREE_CODE (arg1) == INTEGER_CST)
11586 wide_int cst1 = arg1;
11587 wide_int ncst1 = -cst1;
11588 if ((cst1 & ncst1) == ncst1
11589 && multiple_of_p (type, arg0,
11590 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11591 return fold_convert_loc (loc, type, arg0);
11594 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11595 bits from CST2. */
11596 if (TREE_CODE (arg1) == INTEGER_CST
11597 && TREE_CODE (arg0) == MULT_EXPR
11598 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11600 wide_int warg1 = arg1;
11601 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11603 if (masked == 0)
11604 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11605 arg0, arg1);
11606 else if (masked != warg1)
11608 /* Avoid the transform if arg1 is a mask of some
11609 mode which allows further optimizations. */
11610 int pop = wi::popcount (warg1);
11611 if (!(pop >= BITS_PER_UNIT
11612 && exact_log2 (pop) != -1
11613 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11614 return fold_build2_loc (loc, code, type, op0,
11615 wide_int_to_tree (type, masked));
11619 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11620 ((A & N) + B) & M -> (A + B) & M
11621 Similarly if (N & M) == 0,
11622 ((A | N) + B) & M -> (A + B) & M
11623 and for - instead of + (or unary - instead of +)
11624 and/or ^ instead of |.
11625 If B is constant and (B & M) == 0, fold into A & M. */
11626 if (TREE_CODE (arg1) == INTEGER_CST)
11628 wide_int cst1 = arg1;
11629 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11630 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11631 && (TREE_CODE (arg0) == PLUS_EXPR
11632 || TREE_CODE (arg0) == MINUS_EXPR
11633 || TREE_CODE (arg0) == NEGATE_EXPR)
11634 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11635 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11637 tree pmop[2];
11638 int which = 0;
11639 wide_int cst0;
11641 /* Now we know that arg0 is (C + D) or (C - D) or
11642 -C and arg1 (M) is == (1LL << cst) - 1.
11643 Store C into PMOP[0] and D into PMOP[1]. */
11644 pmop[0] = TREE_OPERAND (arg0, 0);
11645 pmop[1] = NULL;
11646 if (TREE_CODE (arg0) != NEGATE_EXPR)
11648 pmop[1] = TREE_OPERAND (arg0, 1);
11649 which = 1;
11652 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11653 which = -1;
11655 for (; which >= 0; which--)
11656 switch (TREE_CODE (pmop[which]))
11658 case BIT_AND_EXPR:
11659 case BIT_IOR_EXPR:
11660 case BIT_XOR_EXPR:
11661 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11662 != INTEGER_CST)
11663 break;
11664 cst0 = TREE_OPERAND (pmop[which], 1);
11665 cst0 &= cst1;
11666 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11668 if (cst0 != cst1)
11669 break;
11671 else if (cst0 != 0)
11672 break;
11673 /* If C or D is of the form (A & N) where
11674 (N & M) == M, or of the form (A | N) or
11675 (A ^ N) where (N & M) == 0, replace it with A. */
11676 pmop[which] = TREE_OPERAND (pmop[which], 0);
11677 break;
11678 case INTEGER_CST:
11679 /* If C or D is a N where (N & M) == 0, it can be
11680 omitted (assumed 0). */
11681 if ((TREE_CODE (arg0) == PLUS_EXPR
11682 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11683 && (cst1 & pmop[which]) == 0)
11684 pmop[which] = NULL;
11685 break;
11686 default:
11687 break;
11690 /* Only build anything new if we optimized one or both arguments
11691 above. */
11692 if (pmop[0] != TREE_OPERAND (arg0, 0)
11693 || (TREE_CODE (arg0) != NEGATE_EXPR
11694 && pmop[1] != TREE_OPERAND (arg0, 1)))
11696 tree utype = TREE_TYPE (arg0);
11697 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11699 /* Perform the operations in a type that has defined
11700 overflow behavior. */
11701 utype = unsigned_type_for (TREE_TYPE (arg0));
11702 if (pmop[0] != NULL)
11703 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11704 if (pmop[1] != NULL)
11705 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11708 if (TREE_CODE (arg0) == NEGATE_EXPR)
11709 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11710 else if (TREE_CODE (arg0) == PLUS_EXPR)
11712 if (pmop[0] != NULL && pmop[1] != NULL)
11713 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11714 pmop[0], pmop[1]);
11715 else if (pmop[0] != NULL)
11716 tem = pmop[0];
11717 else if (pmop[1] != NULL)
11718 tem = pmop[1];
11719 else
11720 return build_int_cst (type, 0);
11722 else if (pmop[0] == NULL)
11723 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11724 else
11725 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11726 pmop[0], pmop[1]);
11727 /* TEM is now the new binary +, - or unary - replacement. */
11728 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11729 fold_convert_loc (loc, utype, arg1));
11730 return fold_convert_loc (loc, type, tem);
11735 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11736 if (t1 != NULL_TREE)
11737 return t1;
11738 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11739 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11740 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11742 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11744 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11745 if (mask == -1)
11746 return
11747 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11750 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11752 This results in more efficient code for machines without a NOR
11753 instruction. Combine will canonicalize to the first form
11754 which will allow use of NOR instructions provided by the
11755 backend if they exist. */
11756 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11757 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11759 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11760 build2 (BIT_IOR_EXPR, type,
11761 fold_convert_loc (loc, type,
11762 TREE_OPERAND (arg0, 0)),
11763 fold_convert_loc (loc, type,
11764 TREE_OPERAND (arg1, 0))));
11767 /* If arg0 is derived from the address of an object or function, we may
11768 be able to fold this expression using the object or function's
11769 alignment. */
11770 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11772 unsigned HOST_WIDE_INT modulus, residue;
11773 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11775 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11776 integer_onep (arg1));
11778 /* This works because modulus is a power of 2. If this weren't the
11779 case, we'd have to replace it by its greatest power-of-2
11780 divisor: modulus & -modulus. */
11781 if (low < modulus)
11782 return build_int_cst (type, residue & low);
11785 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11786 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11787 if the new mask might be further optimized. */
11788 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11789 || TREE_CODE (arg0) == RSHIFT_EXPR)
11790 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11791 && TREE_CODE (arg1) == INTEGER_CST
11792 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11793 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11794 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11795 < TYPE_PRECISION (TREE_TYPE (arg0))))
11797 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11798 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11799 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11800 tree shift_type = TREE_TYPE (arg0);
11802 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11803 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11804 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11805 && TYPE_PRECISION (TREE_TYPE (arg0))
11806 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11808 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11809 tree arg00 = TREE_OPERAND (arg0, 0);
11810 /* See if more bits can be proven as zero because of
11811 zero extension. */
11812 if (TREE_CODE (arg00) == NOP_EXPR
11813 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11815 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11816 if (TYPE_PRECISION (inner_type)
11817 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11818 && TYPE_PRECISION (inner_type) < prec)
11820 prec = TYPE_PRECISION (inner_type);
11821 /* See if we can shorten the right shift. */
11822 if (shiftc < prec)
11823 shift_type = inner_type;
11824 /* Otherwise X >> C1 is all zeros, so we'll optimize
11825 it into (X, 0) later on by making sure zerobits
11826 is all ones. */
11829 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11830 if (shiftc < prec)
11832 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11833 zerobits <<= prec - shiftc;
11835 /* For arithmetic shift if sign bit could be set, zerobits
11836 can contain actually sign bits, so no transformation is
11837 possible, unless MASK masks them all away. In that
11838 case the shift needs to be converted into logical shift. */
11839 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11840 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11842 if ((mask & zerobits) == 0)
11843 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11844 else
11845 zerobits = 0;
11849 /* ((X << 16) & 0xff00) is (X, 0). */
11850 if ((mask & zerobits) == mask)
11851 return omit_one_operand_loc (loc, type,
11852 build_int_cst (type, 0), arg0);
11854 newmask = mask | zerobits;
11855 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11857 /* Only do the transformation if NEWMASK is some integer
11858 mode's mask. */
11859 for (prec = BITS_PER_UNIT;
11860 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11861 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11862 break;
11863 if (prec < HOST_BITS_PER_WIDE_INT
11864 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11866 tree newmaskt;
11868 if (shift_type != TREE_TYPE (arg0))
11870 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11871 fold_convert_loc (loc, shift_type,
11872 TREE_OPERAND (arg0, 0)),
11873 TREE_OPERAND (arg0, 1));
11874 tem = fold_convert_loc (loc, type, tem);
11876 else
11877 tem = op0;
11878 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11879 if (!tree_int_cst_equal (newmaskt, arg1))
11880 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11885 goto associate;
11887 case RDIV_EXPR:
11888 /* Don't touch a floating-point divide by zero unless the mode
11889 of the constant can represent infinity. */
11890 if (TREE_CODE (arg1) == REAL_CST
11891 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11892 && real_zerop (arg1))
11893 return NULL_TREE;
11895 /* Optimize A / A to 1.0 if we don't care about
11896 NaNs or Infinities. Skip the transformation
11897 for non-real operands. */
11898 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11899 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11900 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11901 && operand_equal_p (arg0, arg1, 0))
11903 tree r = build_real (TREE_TYPE (arg0), dconst1);
11905 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11908 /* The complex version of the above A / A optimization. */
11909 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11910 && operand_equal_p (arg0, arg1, 0))
11912 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11913 if (! HONOR_NANS (TYPE_MODE (elem_type))
11914 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11916 tree r = build_real (elem_type, dconst1);
11917 /* omit_two_operands will call fold_convert for us. */
11918 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11922 /* (-A) / (-B) -> A / B */
11923 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11924 return fold_build2_loc (loc, RDIV_EXPR, type,
11925 TREE_OPERAND (arg0, 0),
11926 negate_expr (arg1));
11927 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11928 return fold_build2_loc (loc, RDIV_EXPR, type,
11929 negate_expr (arg0),
11930 TREE_OPERAND (arg1, 0));
11932 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11933 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11934 && real_onep (arg1))
11935 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11937 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11938 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11939 && real_minus_onep (arg1))
11940 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11941 negate_expr (arg0)));
11943 /* If ARG1 is a constant, we can convert this to a multiply by the
11944 reciprocal. This does not have the same rounding properties,
11945 so only do this if -freciprocal-math. We can actually
11946 always safely do it if ARG1 is a power of two, but it's hard to
11947 tell if it is or not in a portable manner. */
11948 if (optimize
11949 && (TREE_CODE (arg1) == REAL_CST
11950 || (TREE_CODE (arg1) == COMPLEX_CST
11951 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11952 || (TREE_CODE (arg1) == VECTOR_CST
11953 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11955 if (flag_reciprocal_math
11956 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11957 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11958 /* Find the reciprocal if optimizing and the result is exact.
11959 TODO: Complex reciprocal not implemented. */
11960 if (TREE_CODE (arg1) != COMPLEX_CST)
11962 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11964 if (inverse)
11965 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11968 /* Convert A/B/C to A/(B*C). */
11969 if (flag_reciprocal_math
11970 && TREE_CODE (arg0) == RDIV_EXPR)
11971 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11972 fold_build2_loc (loc, MULT_EXPR, type,
11973 TREE_OPERAND (arg0, 1), arg1));
11975 /* Convert A/(B/C) to (A/B)*C. */
11976 if (flag_reciprocal_math
11977 && TREE_CODE (arg1) == RDIV_EXPR)
11978 return fold_build2_loc (loc, MULT_EXPR, type,
11979 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11980 TREE_OPERAND (arg1, 0)),
11981 TREE_OPERAND (arg1, 1));
11983 /* Convert C1/(X*C2) into (C1/C2)/X. */
11984 if (flag_reciprocal_math
11985 && TREE_CODE (arg1) == MULT_EXPR
11986 && TREE_CODE (arg0) == REAL_CST
11987 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11989 tree tem = const_binop (RDIV_EXPR, arg0,
11990 TREE_OPERAND (arg1, 1));
11991 if (tem)
11992 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11993 TREE_OPERAND (arg1, 0));
11996 if (flag_unsafe_math_optimizations)
11998 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11999 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12001 /* Optimize sin(x)/cos(x) as tan(x). */
12002 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12003 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12004 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12005 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12006 CALL_EXPR_ARG (arg1, 0), 0))
12008 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12010 if (tanfn != NULL_TREE)
12011 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12014 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12015 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12016 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12017 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12018 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12019 CALL_EXPR_ARG (arg1, 0), 0))
12021 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12023 if (tanfn != NULL_TREE)
12025 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12026 CALL_EXPR_ARG (arg0, 0));
12027 return fold_build2_loc (loc, RDIV_EXPR, type,
12028 build_real (type, dconst1), tmp);
12032 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12033 NaNs or Infinities. */
12034 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12035 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12036 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12038 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12039 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12041 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12042 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12043 && operand_equal_p (arg00, arg01, 0))
12045 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12047 if (cosfn != NULL_TREE)
12048 return build_call_expr_loc (loc, cosfn, 1, arg00);
12052 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12053 NaNs or Infinities. */
12054 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12055 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12056 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12058 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12059 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12061 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12062 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12063 && operand_equal_p (arg00, arg01, 0))
12065 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12067 if (cosfn != NULL_TREE)
12069 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12070 return fold_build2_loc (loc, RDIV_EXPR, type,
12071 build_real (type, dconst1),
12072 tmp);
12077 /* Optimize pow(x,c)/x as pow(x,c-1). */
12078 if (fcode0 == BUILT_IN_POW
12079 || fcode0 == BUILT_IN_POWF
12080 || fcode0 == BUILT_IN_POWL)
12082 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12083 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12084 if (TREE_CODE (arg01) == REAL_CST
12085 && !TREE_OVERFLOW (arg01)
12086 && operand_equal_p (arg1, arg00, 0))
12088 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12089 REAL_VALUE_TYPE c;
12090 tree arg;
12092 c = TREE_REAL_CST (arg01);
12093 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12094 arg = build_real (type, c);
12095 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12099 /* Optimize a/root(b/c) into a*root(c/b). */
12100 if (BUILTIN_ROOT_P (fcode1))
12102 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12104 if (TREE_CODE (rootarg) == RDIV_EXPR)
12106 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12107 tree b = TREE_OPERAND (rootarg, 0);
12108 tree c = TREE_OPERAND (rootarg, 1);
12110 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12112 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12113 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12117 /* Optimize x/expN(y) into x*expN(-y). */
12118 if (BUILTIN_EXPONENT_P (fcode1))
12120 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12121 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12122 arg1 = build_call_expr_loc (loc,
12123 expfn, 1,
12124 fold_convert_loc (loc, type, arg));
12125 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12128 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12129 if (fcode1 == BUILT_IN_POW
12130 || fcode1 == BUILT_IN_POWF
12131 || fcode1 == BUILT_IN_POWL)
12133 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12134 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12135 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12136 tree neg11 = fold_convert_loc (loc, type,
12137 negate_expr (arg11));
12138 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12139 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12142 return NULL_TREE;
12144 case TRUNC_DIV_EXPR:
12145 /* Optimize (X & (-A)) / A where A is a power of 2,
12146 to X >> log2(A) */
12147 if (TREE_CODE (arg0) == BIT_AND_EXPR
12148 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12149 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12151 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12152 arg1, TREE_OPERAND (arg0, 1));
12153 if (sum && integer_zerop (sum)) {
12154 tree pow2 = build_int_cst (integer_type_node,
12155 wi::exact_log2 (arg1));
12156 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12157 TREE_OPERAND (arg0, 0), pow2);
12161 /* Fall through */
12163 case FLOOR_DIV_EXPR:
12164 /* Simplify A / (B << N) where A and B are positive and B is
12165 a power of 2, to A >> (N + log2(B)). */
12166 strict_overflow_p = false;
12167 if (TREE_CODE (arg1) == LSHIFT_EXPR
12168 && (TYPE_UNSIGNED (type)
12169 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12171 tree sval = TREE_OPERAND (arg1, 0);
12172 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12174 tree sh_cnt = TREE_OPERAND (arg1, 1);
12175 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12176 wi::exact_log2 (sval));
12178 if (strict_overflow_p)
12179 fold_overflow_warning (("assuming signed overflow does not "
12180 "occur when simplifying A / (B << N)"),
12181 WARN_STRICT_OVERFLOW_MISC);
12183 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12184 sh_cnt, pow2);
12185 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12186 fold_convert_loc (loc, type, arg0), sh_cnt);
12190 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12191 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12192 if (INTEGRAL_TYPE_P (type)
12193 && TYPE_UNSIGNED (type)
12194 && code == FLOOR_DIV_EXPR)
12195 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12197 /* Fall through */
12199 case ROUND_DIV_EXPR:
12200 case CEIL_DIV_EXPR:
12201 case EXACT_DIV_EXPR:
12202 if (integer_zerop (arg1))
12203 return NULL_TREE;
12204 /* X / -1 is -X. */
12205 if (!TYPE_UNSIGNED (type)
12206 && TREE_CODE (arg1) == INTEGER_CST
12207 && wi::eq_p (arg1, -1))
12208 return fold_convert_loc (loc, type, negate_expr (arg0));
12210 /* Convert -A / -B to A / B when the type is signed and overflow is
12211 undefined. */
12212 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12213 && TREE_CODE (arg0) == NEGATE_EXPR
12214 && negate_expr_p (arg1))
12216 if (INTEGRAL_TYPE_P (type))
12217 fold_overflow_warning (("assuming signed overflow does not occur "
12218 "when distributing negation across "
12219 "division"),
12220 WARN_STRICT_OVERFLOW_MISC);
12221 return fold_build2_loc (loc, code, type,
12222 fold_convert_loc (loc, type,
12223 TREE_OPERAND (arg0, 0)),
12224 fold_convert_loc (loc, type,
12225 negate_expr (arg1)));
12227 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12228 && TREE_CODE (arg1) == NEGATE_EXPR
12229 && negate_expr_p (arg0))
12231 if (INTEGRAL_TYPE_P (type))
12232 fold_overflow_warning (("assuming signed overflow does not occur "
12233 "when distributing negation across "
12234 "division"),
12235 WARN_STRICT_OVERFLOW_MISC);
12236 return fold_build2_loc (loc, code, type,
12237 fold_convert_loc (loc, type,
12238 negate_expr (arg0)),
12239 fold_convert_loc (loc, type,
12240 TREE_OPERAND (arg1, 0)));
12243 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12244 operation, EXACT_DIV_EXPR.
12246 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12247 At one time others generated faster code, it's not clear if they do
12248 after the last round to changes to the DIV code in expmed.c. */
12249 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12250 && multiple_of_p (type, arg0, arg1))
12251 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12253 strict_overflow_p = false;
12254 if (TREE_CODE (arg1) == INTEGER_CST
12255 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12256 &strict_overflow_p)))
12258 if (strict_overflow_p)
12259 fold_overflow_warning (("assuming signed overflow does not occur "
12260 "when simplifying division"),
12261 WARN_STRICT_OVERFLOW_MISC);
12262 return fold_convert_loc (loc, type, tem);
12265 return NULL_TREE;
12267 case CEIL_MOD_EXPR:
12268 case FLOOR_MOD_EXPR:
12269 case ROUND_MOD_EXPR:
12270 case TRUNC_MOD_EXPR:
12271 /* X % -1 is zero. */
12272 if (!TYPE_UNSIGNED (type)
12273 && TREE_CODE (arg1) == INTEGER_CST
12274 && wi::eq_p (arg1, -1))
12275 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12277 /* X % -C is the same as X % C. */
12278 if (code == TRUNC_MOD_EXPR
12279 && TYPE_SIGN (type) == SIGNED
12280 && TREE_CODE (arg1) == INTEGER_CST
12281 && !TREE_OVERFLOW (arg1)
12282 && wi::neg_p (arg1)
12283 && !TYPE_OVERFLOW_TRAPS (type)
12284 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12285 && !sign_bit_p (arg1, arg1))
12286 return fold_build2_loc (loc, code, type,
12287 fold_convert_loc (loc, type, arg0),
12288 fold_convert_loc (loc, type,
12289 negate_expr (arg1)));
12291 /* X % -Y is the same as X % Y. */
12292 if (code == TRUNC_MOD_EXPR
12293 && !TYPE_UNSIGNED (type)
12294 && TREE_CODE (arg1) == NEGATE_EXPR
12295 && !TYPE_OVERFLOW_TRAPS (type))
12296 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12297 fold_convert_loc (loc, type,
12298 TREE_OPERAND (arg1, 0)));
12300 strict_overflow_p = false;
12301 if (TREE_CODE (arg1) == INTEGER_CST
12302 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12303 &strict_overflow_p)))
12305 if (strict_overflow_p)
12306 fold_overflow_warning (("assuming signed overflow does not occur "
12307 "when simplifying modulus"),
12308 WARN_STRICT_OVERFLOW_MISC);
12309 return fold_convert_loc (loc, type, tem);
12312 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12313 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12314 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12315 && (TYPE_UNSIGNED (type)
12316 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12318 tree c = arg1;
12319 /* Also optimize A % (C << N) where C is a power of 2,
12320 to A & ((C << N) - 1). */
12321 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12322 c = TREE_OPERAND (arg1, 0);
12324 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12326 tree mask
12327 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12328 build_int_cst (TREE_TYPE (arg1), 1));
12329 if (strict_overflow_p)
12330 fold_overflow_warning (("assuming signed overflow does not "
12331 "occur when simplifying "
12332 "X % (power of two)"),
12333 WARN_STRICT_OVERFLOW_MISC);
12334 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12335 fold_convert_loc (loc, type, arg0),
12336 fold_convert_loc (loc, type, mask));
12340 return NULL_TREE;
12342 case LROTATE_EXPR:
12343 case RROTATE_EXPR:
12344 if (integer_all_onesp (arg0))
12345 return omit_one_operand_loc (loc, type, arg0, arg1);
12346 goto shift;
12348 case RSHIFT_EXPR:
12349 /* Optimize -1 >> x for arithmetic right shifts. */
12350 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12351 && tree_expr_nonnegative_p (arg1))
12352 return omit_one_operand_loc (loc, type, arg0, arg1);
12353 /* ... fall through ... */
12355 case LSHIFT_EXPR:
12356 shift:
12357 if (integer_zerop (arg1))
12358 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12359 if (integer_zerop (arg0))
12360 return omit_one_operand_loc (loc, type, arg0, arg1);
12362 /* Prefer vector1 << scalar to vector1 << vector2
12363 if vector2 is uniform. */
12364 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12365 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12366 return fold_build2_loc (loc, code, type, op0, tem);
12368 /* Since negative shift count is not well-defined,
12369 don't try to compute it in the compiler. */
12370 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12371 return NULL_TREE;
12373 prec = element_precision (type);
12375 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12376 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12377 && tree_to_uhwi (arg1) < prec
12378 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12379 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12381 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12382 + tree_to_uhwi (arg1));
12384 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12385 being well defined. */
12386 if (low >= prec)
12388 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12389 low = low % prec;
12390 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12391 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12392 TREE_OPERAND (arg0, 0));
12393 else
12394 low = prec - 1;
12397 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12398 build_int_cst (TREE_TYPE (arg1), low));
12401 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12402 into x & ((unsigned)-1 >> c) for unsigned types. */
12403 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12404 || (TYPE_UNSIGNED (type)
12405 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12406 && tree_fits_uhwi_p (arg1)
12407 && tree_to_uhwi (arg1) < prec
12408 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12409 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12411 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12412 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12413 tree lshift;
12414 tree arg00;
12416 if (low0 == low1)
12418 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12420 lshift = build_minus_one_cst (type);
12421 lshift = const_binop (code, lshift, arg1);
12423 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12427 /* Rewrite an LROTATE_EXPR by a constant into an
12428 RROTATE_EXPR by a new constant. */
12429 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12431 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12432 tem = const_binop (MINUS_EXPR, tem, arg1);
12433 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12436 /* If we have a rotate of a bit operation with the rotate count and
12437 the second operand of the bit operation both constant,
12438 permute the two operations. */
12439 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12440 && (TREE_CODE (arg0) == BIT_AND_EXPR
12441 || TREE_CODE (arg0) == BIT_IOR_EXPR
12442 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12443 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12444 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12445 fold_build2_loc (loc, code, type,
12446 TREE_OPERAND (arg0, 0), arg1),
12447 fold_build2_loc (loc, code, type,
12448 TREE_OPERAND (arg0, 1), arg1));
12450 /* Two consecutive rotates adding up to the some integer
12451 multiple of the precision of the type can be ignored. */
12452 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12453 && TREE_CODE (arg0) == RROTATE_EXPR
12454 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12455 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12456 prec) == 0)
12457 return TREE_OPERAND (arg0, 0);
12459 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12460 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12461 if the latter can be further optimized. */
12462 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12463 && TREE_CODE (arg0) == BIT_AND_EXPR
12464 && TREE_CODE (arg1) == INTEGER_CST
12465 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12467 tree mask = fold_build2_loc (loc, code, type,
12468 fold_convert_loc (loc, type,
12469 TREE_OPERAND (arg0, 1)),
12470 arg1);
12471 tree shift = fold_build2_loc (loc, code, type,
12472 fold_convert_loc (loc, type,
12473 TREE_OPERAND (arg0, 0)),
12474 arg1);
12475 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12476 if (tem)
12477 return tem;
12480 return NULL_TREE;
12482 case MIN_EXPR:
12483 if (operand_equal_p (arg0, arg1, 0))
12484 return omit_one_operand_loc (loc, type, arg0, arg1);
12485 if (INTEGRAL_TYPE_P (type)
12486 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12487 return omit_one_operand_loc (loc, type, arg1, arg0);
12488 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12489 if (tem)
12490 return tem;
12491 goto associate;
12493 case MAX_EXPR:
12494 if (operand_equal_p (arg0, arg1, 0))
12495 return omit_one_operand_loc (loc, type, arg0, arg1);
12496 if (INTEGRAL_TYPE_P (type)
12497 && TYPE_MAX_VALUE (type)
12498 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12499 return omit_one_operand_loc (loc, type, arg1, arg0);
12500 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12501 if (tem)
12502 return tem;
12503 goto associate;
12505 case TRUTH_ANDIF_EXPR:
12506 /* Note that the operands of this must be ints
12507 and their values must be 0 or 1.
12508 ("true" is a fixed value perhaps depending on the language.) */
12509 /* If first arg is constant zero, return it. */
12510 if (integer_zerop (arg0))
12511 return fold_convert_loc (loc, type, arg0);
12512 case TRUTH_AND_EXPR:
12513 /* If either arg is constant true, drop it. */
12514 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12515 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12516 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12517 /* Preserve sequence points. */
12518 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12519 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12520 /* If second arg is constant zero, result is zero, but first arg
12521 must be evaluated. */
12522 if (integer_zerop (arg1))
12523 return omit_one_operand_loc (loc, type, arg1, arg0);
12524 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12525 case will be handled here. */
12526 if (integer_zerop (arg0))
12527 return omit_one_operand_loc (loc, type, arg0, arg1);
12529 /* !X && X is always false. */
12530 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12531 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12532 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12533 /* X && !X is always false. */
12534 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12535 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12536 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12538 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12539 means A >= Y && A != MAX, but in this case we know that
12540 A < X <= MAX. */
12542 if (!TREE_SIDE_EFFECTS (arg0)
12543 && !TREE_SIDE_EFFECTS (arg1))
12545 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12546 if (tem && !operand_equal_p (tem, arg0, 0))
12547 return fold_build2_loc (loc, code, type, tem, arg1);
12549 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12550 if (tem && !operand_equal_p (tem, arg1, 0))
12551 return fold_build2_loc (loc, code, type, arg0, tem);
12554 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12555 != NULL_TREE)
12556 return tem;
12558 return NULL_TREE;
12560 case TRUTH_ORIF_EXPR:
12561 /* Note that the operands of this must be ints
12562 and their values must be 0 or true.
12563 ("true" is a fixed value perhaps depending on the language.) */
12564 /* If first arg is constant true, return it. */
12565 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12566 return fold_convert_loc (loc, type, arg0);
12567 case TRUTH_OR_EXPR:
12568 /* If either arg is constant zero, drop it. */
12569 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12570 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12571 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12572 /* Preserve sequence points. */
12573 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12574 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12575 /* If second arg is constant true, result is true, but we must
12576 evaluate first arg. */
12577 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12578 return omit_one_operand_loc (loc, type, arg1, arg0);
12579 /* Likewise for first arg, but note this only occurs here for
12580 TRUTH_OR_EXPR. */
12581 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12582 return omit_one_operand_loc (loc, type, arg0, arg1);
12584 /* !X || X is always true. */
12585 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12586 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12587 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12588 /* X || !X is always true. */
12589 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12590 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12591 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12593 /* (X && !Y) || (!X && Y) is X ^ Y */
12594 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12595 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12597 tree a0, a1, l0, l1, n0, n1;
12599 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12600 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12602 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12603 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12605 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12606 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12608 if ((operand_equal_p (n0, a0, 0)
12609 && operand_equal_p (n1, a1, 0))
12610 || (operand_equal_p (n0, a1, 0)
12611 && operand_equal_p (n1, a0, 0)))
12612 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12615 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12616 != NULL_TREE)
12617 return tem;
12619 return NULL_TREE;
12621 case TRUTH_XOR_EXPR:
12622 /* If the second arg is constant zero, drop it. */
12623 if (integer_zerop (arg1))
12624 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12625 /* If the second arg is constant true, this is a logical inversion. */
12626 if (integer_onep (arg1))
12628 tem = invert_truthvalue_loc (loc, arg0);
12629 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12631 /* Identical arguments cancel to zero. */
12632 if (operand_equal_p (arg0, arg1, 0))
12633 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12635 /* !X ^ X is always true. */
12636 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12637 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12638 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12640 /* X ^ !X is always true. */
12641 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12642 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12643 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12645 return NULL_TREE;
12647 case EQ_EXPR:
12648 case NE_EXPR:
12649 STRIP_NOPS (arg0);
12650 STRIP_NOPS (arg1);
12652 tem = fold_comparison (loc, code, type, op0, op1);
12653 if (tem != NULL_TREE)
12654 return tem;
12656 /* bool_var != 0 becomes bool_var. */
12657 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12658 && code == NE_EXPR)
12659 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12661 /* bool_var == 1 becomes bool_var. */
12662 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12663 && code == EQ_EXPR)
12664 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12666 /* bool_var != 1 becomes !bool_var. */
12667 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12668 && code == NE_EXPR)
12669 return fold_convert_loc (loc, type,
12670 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12671 TREE_TYPE (arg0), arg0));
12673 /* bool_var == 0 becomes !bool_var. */
12674 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12675 && code == EQ_EXPR)
12676 return fold_convert_loc (loc, type,
12677 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12678 TREE_TYPE (arg0), arg0));
12680 /* !exp != 0 becomes !exp */
12681 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12682 && code == NE_EXPR)
12683 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12685 /* If this is an equality comparison of the address of two non-weak,
12686 unaliased symbols neither of which are extern (since we do not
12687 have access to attributes for externs), then we know the result. */
12688 if (TREE_CODE (arg0) == ADDR_EXPR
12689 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12690 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12691 && ! lookup_attribute ("alias",
12692 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12693 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12694 && TREE_CODE (arg1) == ADDR_EXPR
12695 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12696 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12697 && ! lookup_attribute ("alias",
12698 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12699 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12701 /* We know that we're looking at the address of two
12702 non-weak, unaliased, static _DECL nodes.
12704 It is both wasteful and incorrect to call operand_equal_p
12705 to compare the two ADDR_EXPR nodes. It is wasteful in that
12706 all we need to do is test pointer equality for the arguments
12707 to the two ADDR_EXPR nodes. It is incorrect to use
12708 operand_equal_p as that function is NOT equivalent to a
12709 C equality test. It can in fact return false for two
12710 objects which would test as equal using the C equality
12711 operator. */
12712 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12713 return constant_boolean_node (equal
12714 ? code == EQ_EXPR : code != EQ_EXPR,
12715 type);
12718 /* Similarly for a NEGATE_EXPR. */
12719 if (TREE_CODE (arg0) == NEGATE_EXPR
12720 && TREE_CODE (arg1) == INTEGER_CST
12721 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12722 arg1)))
12723 && TREE_CODE (tem) == INTEGER_CST
12724 && !TREE_OVERFLOW (tem))
12725 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12727 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12728 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12729 && TREE_CODE (arg1) == INTEGER_CST
12730 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12731 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12732 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12733 fold_convert_loc (loc,
12734 TREE_TYPE (arg0),
12735 arg1),
12736 TREE_OPERAND (arg0, 1)));
12738 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12739 if ((TREE_CODE (arg0) == PLUS_EXPR
12740 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12741 || TREE_CODE (arg0) == MINUS_EXPR)
12742 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12743 0)),
12744 arg1, 0)
12745 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12746 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12748 tree val = TREE_OPERAND (arg0, 1);
12749 return omit_two_operands_loc (loc, type,
12750 fold_build2_loc (loc, code, type,
12751 val,
12752 build_int_cst (TREE_TYPE (val),
12753 0)),
12754 TREE_OPERAND (arg0, 0), arg1);
12757 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12758 if (TREE_CODE (arg0) == MINUS_EXPR
12759 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12760 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12761 1)),
12762 arg1, 0)
12763 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12765 return omit_two_operands_loc (loc, type,
12766 code == NE_EXPR
12767 ? boolean_true_node : boolean_false_node,
12768 TREE_OPERAND (arg0, 1), arg1);
12771 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12772 if (TREE_CODE (arg0) == ABS_EXPR
12773 && (integer_zerop (arg1) || real_zerop (arg1)))
12774 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12776 /* If this is an EQ or NE comparison with zero and ARG0 is
12777 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12778 two operations, but the latter can be done in one less insn
12779 on machines that have only two-operand insns or on which a
12780 constant cannot be the first operand. */
12781 if (TREE_CODE (arg0) == BIT_AND_EXPR
12782 && integer_zerop (arg1))
12784 tree arg00 = TREE_OPERAND (arg0, 0);
12785 tree arg01 = TREE_OPERAND (arg0, 1);
12786 if (TREE_CODE (arg00) == LSHIFT_EXPR
12787 && integer_onep (TREE_OPERAND (arg00, 0)))
12789 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12790 arg01, TREE_OPERAND (arg00, 1));
12791 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12792 build_int_cst (TREE_TYPE (arg0), 1));
12793 return fold_build2_loc (loc, code, type,
12794 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12795 arg1);
12797 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12798 && integer_onep (TREE_OPERAND (arg01, 0)))
12800 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12801 arg00, TREE_OPERAND (arg01, 1));
12802 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12803 build_int_cst (TREE_TYPE (arg0), 1));
12804 return fold_build2_loc (loc, code, type,
12805 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12806 arg1);
12810 /* If this is an NE or EQ comparison of zero against the result of a
12811 signed MOD operation whose second operand is a power of 2, make
12812 the MOD operation unsigned since it is simpler and equivalent. */
12813 if (integer_zerop (arg1)
12814 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12815 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12816 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12817 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12818 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12819 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12821 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12822 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12823 fold_convert_loc (loc, newtype,
12824 TREE_OPERAND (arg0, 0)),
12825 fold_convert_loc (loc, newtype,
12826 TREE_OPERAND (arg0, 1)));
12828 return fold_build2_loc (loc, code, type, newmod,
12829 fold_convert_loc (loc, newtype, arg1));
12832 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12833 C1 is a valid shift constant, and C2 is a power of two, i.e.
12834 a single bit. */
12835 if (TREE_CODE (arg0) == BIT_AND_EXPR
12836 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12837 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12838 == INTEGER_CST
12839 && integer_pow2p (TREE_OPERAND (arg0, 1))
12840 && integer_zerop (arg1))
12842 tree itype = TREE_TYPE (arg0);
12843 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12844 prec = TYPE_PRECISION (itype);
12846 /* Check for a valid shift count. */
12847 if (wi::ltu_p (arg001, prec))
12849 tree arg01 = TREE_OPERAND (arg0, 1);
12850 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12851 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12852 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12853 can be rewritten as (X & (C2 << C1)) != 0. */
12854 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12856 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12857 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12858 return fold_build2_loc (loc, code, type, tem,
12859 fold_convert_loc (loc, itype, arg1));
12861 /* Otherwise, for signed (arithmetic) shifts,
12862 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12863 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12864 else if (!TYPE_UNSIGNED (itype))
12865 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12866 arg000, build_int_cst (itype, 0));
12867 /* Otherwise, of unsigned (logical) shifts,
12868 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12869 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12870 else
12871 return omit_one_operand_loc (loc, type,
12872 code == EQ_EXPR ? integer_one_node
12873 : integer_zero_node,
12874 arg000);
12878 /* If we have (A & C) == C where C is a power of 2, convert this into
12879 (A & C) != 0. Similarly for NE_EXPR. */
12880 if (TREE_CODE (arg0) == BIT_AND_EXPR
12881 && integer_pow2p (TREE_OPERAND (arg0, 1))
12882 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12883 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12884 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12885 integer_zero_node));
12887 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12888 bit, then fold the expression into A < 0 or A >= 0. */
12889 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12890 if (tem)
12891 return tem;
12893 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12894 Similarly for NE_EXPR. */
12895 if (TREE_CODE (arg0) == BIT_AND_EXPR
12896 && TREE_CODE (arg1) == INTEGER_CST
12897 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12899 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12900 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12901 TREE_OPERAND (arg0, 1));
12902 tree dandnotc
12903 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12904 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12905 notc);
12906 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12907 if (integer_nonzerop (dandnotc))
12908 return omit_one_operand_loc (loc, type, rslt, arg0);
12911 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12912 Similarly for NE_EXPR. */
12913 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12914 && TREE_CODE (arg1) == INTEGER_CST
12915 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12917 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12918 tree candnotd
12919 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12920 TREE_OPERAND (arg0, 1),
12921 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12922 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12923 if (integer_nonzerop (candnotd))
12924 return omit_one_operand_loc (loc, type, rslt, arg0);
12927 /* If this is a comparison of a field, we may be able to simplify it. */
12928 if ((TREE_CODE (arg0) == COMPONENT_REF
12929 || TREE_CODE (arg0) == BIT_FIELD_REF)
12930 /* Handle the constant case even without -O
12931 to make sure the warnings are given. */
12932 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12934 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12935 if (t1)
12936 return t1;
12939 /* Optimize comparisons of strlen vs zero to a compare of the
12940 first character of the string vs zero. To wit,
12941 strlen(ptr) == 0 => *ptr == 0
12942 strlen(ptr) != 0 => *ptr != 0
12943 Other cases should reduce to one of these two (or a constant)
12944 due to the return value of strlen being unsigned. */
12945 if (TREE_CODE (arg0) == CALL_EXPR
12946 && integer_zerop (arg1))
12948 tree fndecl = get_callee_fndecl (arg0);
12950 if (fndecl
12951 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12952 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12953 && call_expr_nargs (arg0) == 1
12954 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12956 tree iref = build_fold_indirect_ref_loc (loc,
12957 CALL_EXPR_ARG (arg0, 0));
12958 return fold_build2_loc (loc, code, type, iref,
12959 build_int_cst (TREE_TYPE (iref), 0));
12963 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12964 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12965 if (TREE_CODE (arg0) == RSHIFT_EXPR
12966 && integer_zerop (arg1)
12967 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12969 tree arg00 = TREE_OPERAND (arg0, 0);
12970 tree arg01 = TREE_OPERAND (arg0, 1);
12971 tree itype = TREE_TYPE (arg00);
12972 if (wi::eq_p (arg01, TYPE_PRECISION (itype) - 1))
12974 if (TYPE_UNSIGNED (itype))
12976 itype = signed_type_for (itype);
12977 arg00 = fold_convert_loc (loc, itype, arg00);
12979 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12980 type, arg00, build_zero_cst (itype));
12984 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12985 if (integer_zerop (arg1)
12986 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12987 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12988 TREE_OPERAND (arg0, 1));
12990 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12991 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12992 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12993 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12994 build_zero_cst (TREE_TYPE (arg0)));
12995 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12996 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12997 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12998 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12999 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13000 build_zero_cst (TREE_TYPE (arg0)));
13002 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13003 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13004 && TREE_CODE (arg1) == INTEGER_CST
13005 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13006 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13007 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13008 TREE_OPERAND (arg0, 1), arg1));
13010 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13011 (X & C) == 0 when C is a single bit. */
13012 if (TREE_CODE (arg0) == BIT_AND_EXPR
13013 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13014 && integer_zerop (arg1)
13015 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13017 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13018 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13019 TREE_OPERAND (arg0, 1));
13020 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13021 type, tem,
13022 fold_convert_loc (loc, TREE_TYPE (arg0),
13023 arg1));
13026 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13027 constant C is a power of two, i.e. a single bit. */
13028 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13029 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13030 && integer_zerop (arg1)
13031 && integer_pow2p (TREE_OPERAND (arg0, 1))
13032 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13033 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13035 tree arg00 = TREE_OPERAND (arg0, 0);
13036 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13037 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13040 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13041 when is C is a power of two, i.e. a single bit. */
13042 if (TREE_CODE (arg0) == BIT_AND_EXPR
13043 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13044 && integer_zerop (arg1)
13045 && integer_pow2p (TREE_OPERAND (arg0, 1))
13046 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13047 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13049 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13050 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13051 arg000, TREE_OPERAND (arg0, 1));
13052 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13053 tem, build_int_cst (TREE_TYPE (tem), 0));
13056 if (integer_zerop (arg1)
13057 && tree_expr_nonzero_p (arg0))
13059 tree res = constant_boolean_node (code==NE_EXPR, type);
13060 return omit_one_operand_loc (loc, type, res, arg0);
13063 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13064 if (TREE_CODE (arg0) == NEGATE_EXPR
13065 && TREE_CODE (arg1) == NEGATE_EXPR)
13066 return fold_build2_loc (loc, code, type,
13067 TREE_OPERAND (arg0, 0),
13068 fold_convert_loc (loc, TREE_TYPE (arg0),
13069 TREE_OPERAND (arg1, 0)));
13071 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13072 if (TREE_CODE (arg0) == BIT_AND_EXPR
13073 && TREE_CODE (arg1) == BIT_AND_EXPR)
13075 tree arg00 = TREE_OPERAND (arg0, 0);
13076 tree arg01 = TREE_OPERAND (arg0, 1);
13077 tree arg10 = TREE_OPERAND (arg1, 0);
13078 tree arg11 = TREE_OPERAND (arg1, 1);
13079 tree itype = TREE_TYPE (arg0);
13081 if (operand_equal_p (arg01, arg11, 0))
13082 return fold_build2_loc (loc, code, type,
13083 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13084 fold_build2_loc (loc,
13085 BIT_XOR_EXPR, itype,
13086 arg00, arg10),
13087 arg01),
13088 build_zero_cst (itype));
13090 if (operand_equal_p (arg01, arg10, 0))
13091 return fold_build2_loc (loc, code, type,
13092 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13093 fold_build2_loc (loc,
13094 BIT_XOR_EXPR, itype,
13095 arg00, arg11),
13096 arg01),
13097 build_zero_cst (itype));
13099 if (operand_equal_p (arg00, arg11, 0))
13100 return fold_build2_loc (loc, code, type,
13101 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13102 fold_build2_loc (loc,
13103 BIT_XOR_EXPR, itype,
13104 arg01, arg10),
13105 arg00),
13106 build_zero_cst (itype));
13108 if (operand_equal_p (arg00, arg10, 0))
13109 return fold_build2_loc (loc, code, type,
13110 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13111 fold_build2_loc (loc,
13112 BIT_XOR_EXPR, itype,
13113 arg01, arg11),
13114 arg00),
13115 build_zero_cst (itype));
13118 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13119 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13121 tree arg00 = TREE_OPERAND (arg0, 0);
13122 tree arg01 = TREE_OPERAND (arg0, 1);
13123 tree arg10 = TREE_OPERAND (arg1, 0);
13124 tree arg11 = TREE_OPERAND (arg1, 1);
13125 tree itype = TREE_TYPE (arg0);
13127 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13128 operand_equal_p guarantees no side-effects so we don't need
13129 to use omit_one_operand on Z. */
13130 if (operand_equal_p (arg01, arg11, 0))
13131 return fold_build2_loc (loc, code, type, arg00,
13132 fold_convert_loc (loc, TREE_TYPE (arg00),
13133 arg10));
13134 if (operand_equal_p (arg01, arg10, 0))
13135 return fold_build2_loc (loc, code, type, arg00,
13136 fold_convert_loc (loc, TREE_TYPE (arg00),
13137 arg11));
13138 if (operand_equal_p (arg00, arg11, 0))
13139 return fold_build2_loc (loc, code, type, arg01,
13140 fold_convert_loc (loc, TREE_TYPE (arg01),
13141 arg10));
13142 if (operand_equal_p (arg00, arg10, 0))
13143 return fold_build2_loc (loc, code, type, arg01,
13144 fold_convert_loc (loc, TREE_TYPE (arg01),
13145 arg11));
13147 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13148 if (TREE_CODE (arg01) == INTEGER_CST
13149 && TREE_CODE (arg11) == INTEGER_CST)
13151 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13152 fold_convert_loc (loc, itype, arg11));
13153 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13154 return fold_build2_loc (loc, code, type, tem,
13155 fold_convert_loc (loc, itype, arg10));
13159 /* Attempt to simplify equality/inequality comparisons of complex
13160 values. Only lower the comparison if the result is known or
13161 can be simplified to a single scalar comparison. */
13162 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13163 || TREE_CODE (arg0) == COMPLEX_CST)
13164 && (TREE_CODE (arg1) == COMPLEX_EXPR
13165 || TREE_CODE (arg1) == COMPLEX_CST))
13167 tree real0, imag0, real1, imag1;
13168 tree rcond, icond;
13170 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13172 real0 = TREE_OPERAND (arg0, 0);
13173 imag0 = TREE_OPERAND (arg0, 1);
13175 else
13177 real0 = TREE_REALPART (arg0);
13178 imag0 = TREE_IMAGPART (arg0);
13181 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13183 real1 = TREE_OPERAND (arg1, 0);
13184 imag1 = TREE_OPERAND (arg1, 1);
13186 else
13188 real1 = TREE_REALPART (arg1);
13189 imag1 = TREE_IMAGPART (arg1);
13192 rcond = fold_binary_loc (loc, code, type, real0, real1);
13193 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13195 if (integer_zerop (rcond))
13197 if (code == EQ_EXPR)
13198 return omit_two_operands_loc (loc, type, boolean_false_node,
13199 imag0, imag1);
13200 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13202 else
13204 if (code == NE_EXPR)
13205 return omit_two_operands_loc (loc, type, boolean_true_node,
13206 imag0, imag1);
13207 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13211 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13212 if (icond && TREE_CODE (icond) == INTEGER_CST)
13214 if (integer_zerop (icond))
13216 if (code == EQ_EXPR)
13217 return omit_two_operands_loc (loc, type, boolean_false_node,
13218 real0, real1);
13219 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13221 else
13223 if (code == NE_EXPR)
13224 return omit_two_operands_loc (loc, type, boolean_true_node,
13225 real0, real1);
13226 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13231 return NULL_TREE;
13233 case LT_EXPR:
13234 case GT_EXPR:
13235 case LE_EXPR:
13236 case GE_EXPR:
13237 tem = fold_comparison (loc, code, type, op0, op1);
13238 if (tem != NULL_TREE)
13239 return tem;
13241 /* Transform comparisons of the form X +- C CMP X. */
13242 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13243 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13244 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13245 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13246 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13247 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13249 tree arg01 = TREE_OPERAND (arg0, 1);
13250 enum tree_code code0 = TREE_CODE (arg0);
13251 int is_positive;
13253 if (TREE_CODE (arg01) == REAL_CST)
13254 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13255 else
13256 is_positive = tree_int_cst_sgn (arg01);
13258 /* (X - c) > X becomes false. */
13259 if (code == GT_EXPR
13260 && ((code0 == MINUS_EXPR && is_positive >= 0)
13261 || (code0 == PLUS_EXPR && is_positive <= 0)))
13263 if (TREE_CODE (arg01) == INTEGER_CST
13264 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13265 fold_overflow_warning (("assuming signed overflow does not "
13266 "occur when assuming that (X - c) > X "
13267 "is always false"),
13268 WARN_STRICT_OVERFLOW_ALL);
13269 return constant_boolean_node (0, type);
13272 /* Likewise (X + c) < X becomes false. */
13273 if (code == LT_EXPR
13274 && ((code0 == PLUS_EXPR && is_positive >= 0)
13275 || (code0 == MINUS_EXPR && is_positive <= 0)))
13277 if (TREE_CODE (arg01) == INTEGER_CST
13278 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13279 fold_overflow_warning (("assuming signed overflow does not "
13280 "occur when assuming that "
13281 "(X + c) < X is always false"),
13282 WARN_STRICT_OVERFLOW_ALL);
13283 return constant_boolean_node (0, type);
13286 /* Convert (X - c) <= X to true. */
13287 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13288 && code == LE_EXPR
13289 && ((code0 == MINUS_EXPR && is_positive >= 0)
13290 || (code0 == PLUS_EXPR && is_positive <= 0)))
13292 if (TREE_CODE (arg01) == INTEGER_CST
13293 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13294 fold_overflow_warning (("assuming signed overflow does not "
13295 "occur when assuming that "
13296 "(X - c) <= X is always true"),
13297 WARN_STRICT_OVERFLOW_ALL);
13298 return constant_boolean_node (1, type);
13301 /* Convert (X + c) >= X to true. */
13302 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13303 && code == GE_EXPR
13304 && ((code0 == PLUS_EXPR && is_positive >= 0)
13305 || (code0 == MINUS_EXPR && is_positive <= 0)))
13307 if (TREE_CODE (arg01) == INTEGER_CST
13308 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13309 fold_overflow_warning (("assuming signed overflow does not "
13310 "occur when assuming that "
13311 "(X + c) >= X is always true"),
13312 WARN_STRICT_OVERFLOW_ALL);
13313 return constant_boolean_node (1, type);
13316 if (TREE_CODE (arg01) == INTEGER_CST)
13318 /* Convert X + c > X and X - c < X to true for integers. */
13319 if (code == GT_EXPR
13320 && ((code0 == PLUS_EXPR && is_positive > 0)
13321 || (code0 == MINUS_EXPR && is_positive < 0)))
13323 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13324 fold_overflow_warning (("assuming signed overflow does "
13325 "not occur when assuming that "
13326 "(X + c) > X is always true"),
13327 WARN_STRICT_OVERFLOW_ALL);
13328 return constant_boolean_node (1, type);
13331 if (code == LT_EXPR
13332 && ((code0 == MINUS_EXPR && is_positive > 0)
13333 || (code0 == PLUS_EXPR && is_positive < 0)))
13335 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13336 fold_overflow_warning (("assuming signed overflow does "
13337 "not occur when assuming that "
13338 "(X - c) < X is always true"),
13339 WARN_STRICT_OVERFLOW_ALL);
13340 return constant_boolean_node (1, type);
13343 /* Convert X + c <= X and X - c >= X to false for integers. */
13344 if (code == LE_EXPR
13345 && ((code0 == PLUS_EXPR && is_positive > 0)
13346 || (code0 == MINUS_EXPR && is_positive < 0)))
13348 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13349 fold_overflow_warning (("assuming signed overflow does "
13350 "not occur when assuming that "
13351 "(X + c) <= X is always false"),
13352 WARN_STRICT_OVERFLOW_ALL);
13353 return constant_boolean_node (0, type);
13356 if (code == GE_EXPR
13357 && ((code0 == MINUS_EXPR && is_positive > 0)
13358 || (code0 == PLUS_EXPR && is_positive < 0)))
13360 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13361 fold_overflow_warning (("assuming signed overflow does "
13362 "not occur when assuming that "
13363 "(X - c) >= X is always false"),
13364 WARN_STRICT_OVERFLOW_ALL);
13365 return constant_boolean_node (0, type);
13370 /* Comparisons with the highest or lowest possible integer of
13371 the specified precision will have known values. */
13373 tree arg1_type = TREE_TYPE (arg1);
13374 unsigned int prec = TYPE_PRECISION (arg1_type);
13376 if (TREE_CODE (arg1) == INTEGER_CST
13377 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13379 wide_int max = wi::max_value (arg1_type);
13380 wide_int signed_max = wi::max_value (prec, SIGNED);
13381 wide_int min = wi::min_value (arg1_type);
13383 if (wi::eq_p (arg1, max))
13384 switch (code)
13386 case GT_EXPR:
13387 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13389 case GE_EXPR:
13390 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13392 case LE_EXPR:
13393 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13395 case LT_EXPR:
13396 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13398 /* The GE_EXPR and LT_EXPR cases above are not normally
13399 reached because of previous transformations. */
13401 default:
13402 break;
13404 else if (wi::eq_p (arg1, max - 1))
13405 switch (code)
13407 case GT_EXPR:
13408 arg1 = const_binop (PLUS_EXPR, arg1,
13409 build_int_cst (TREE_TYPE (arg1), 1));
13410 return fold_build2_loc (loc, EQ_EXPR, type,
13411 fold_convert_loc (loc,
13412 TREE_TYPE (arg1), arg0),
13413 arg1);
13414 case LE_EXPR:
13415 arg1 = const_binop (PLUS_EXPR, arg1,
13416 build_int_cst (TREE_TYPE (arg1), 1));
13417 return fold_build2_loc (loc, NE_EXPR, type,
13418 fold_convert_loc (loc, TREE_TYPE (arg1),
13419 arg0),
13420 arg1);
13421 default:
13422 break;
13424 else if (wi::eq_p (arg1, min))
13425 switch (code)
13427 case LT_EXPR:
13428 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13430 case LE_EXPR:
13431 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13433 case GE_EXPR:
13434 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13436 case GT_EXPR:
13437 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13439 default:
13440 break;
13442 else if (wi::eq_p (arg1, min + 1))
13443 switch (code)
13445 case GE_EXPR:
13446 arg1 = const_binop (MINUS_EXPR, arg1,
13447 build_int_cst (TREE_TYPE (arg1), 1));
13448 return fold_build2_loc (loc, NE_EXPR, type,
13449 fold_convert_loc (loc,
13450 TREE_TYPE (arg1), arg0),
13451 arg1);
13452 case LT_EXPR:
13453 arg1 = const_binop (MINUS_EXPR, arg1,
13454 build_int_cst (TREE_TYPE (arg1), 1));
13455 return fold_build2_loc (loc, EQ_EXPR, type,
13456 fold_convert_loc (loc, TREE_TYPE (arg1),
13457 arg0),
13458 arg1);
13459 default:
13460 break;
13463 else if (wi::eq_p (arg1, signed_max)
13464 && TYPE_UNSIGNED (arg1_type)
13465 /* We will flip the signedness of the comparison operator
13466 associated with the mode of arg1, so the sign bit is
13467 specified by this mode. Check that arg1 is the signed
13468 max associated with this sign bit. */
13469 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13470 /* signed_type does not work on pointer types. */
13471 && INTEGRAL_TYPE_P (arg1_type))
13473 /* The following case also applies to X < signed_max+1
13474 and X >= signed_max+1 because previous transformations. */
13475 if (code == LE_EXPR || code == GT_EXPR)
13477 tree st = signed_type_for (arg1_type);
13478 return fold_build2_loc (loc,
13479 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13480 type, fold_convert_loc (loc, st, arg0),
13481 build_int_cst (st, 0));
13487 /* If we are comparing an ABS_EXPR with a constant, we can
13488 convert all the cases into explicit comparisons, but they may
13489 well not be faster than doing the ABS and one comparison.
13490 But ABS (X) <= C is a range comparison, which becomes a subtraction
13491 and a comparison, and is probably faster. */
13492 if (code == LE_EXPR
13493 && TREE_CODE (arg1) == INTEGER_CST
13494 && TREE_CODE (arg0) == ABS_EXPR
13495 && ! TREE_SIDE_EFFECTS (arg0)
13496 && (0 != (tem = negate_expr (arg1)))
13497 && TREE_CODE (tem) == INTEGER_CST
13498 && !TREE_OVERFLOW (tem))
13499 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13500 build2 (GE_EXPR, type,
13501 TREE_OPERAND (arg0, 0), tem),
13502 build2 (LE_EXPR, type,
13503 TREE_OPERAND (arg0, 0), arg1));
13505 /* Convert ABS_EXPR<x> >= 0 to true. */
13506 strict_overflow_p = false;
13507 if (code == GE_EXPR
13508 && (integer_zerop (arg1)
13509 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13510 && real_zerop (arg1)))
13511 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13513 if (strict_overflow_p)
13514 fold_overflow_warning (("assuming signed overflow does not occur "
13515 "when simplifying comparison of "
13516 "absolute value and zero"),
13517 WARN_STRICT_OVERFLOW_CONDITIONAL);
13518 return omit_one_operand_loc (loc, type,
13519 constant_boolean_node (true, type),
13520 arg0);
13523 /* Convert ABS_EXPR<x> < 0 to false. */
13524 strict_overflow_p = false;
13525 if (code == LT_EXPR
13526 && (integer_zerop (arg1) || real_zerop (arg1))
13527 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13529 if (strict_overflow_p)
13530 fold_overflow_warning (("assuming signed overflow does not occur "
13531 "when simplifying comparison of "
13532 "absolute value and zero"),
13533 WARN_STRICT_OVERFLOW_CONDITIONAL);
13534 return omit_one_operand_loc (loc, type,
13535 constant_boolean_node (false, type),
13536 arg0);
13539 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13540 and similarly for >= into !=. */
13541 if ((code == LT_EXPR || code == GE_EXPR)
13542 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13543 && TREE_CODE (arg1) == LSHIFT_EXPR
13544 && integer_onep (TREE_OPERAND (arg1, 0)))
13545 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13546 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13547 TREE_OPERAND (arg1, 1)),
13548 build_zero_cst (TREE_TYPE (arg0)));
13550 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13551 otherwise Y might be >= # of bits in X's type and thus e.g.
13552 (unsigned char) (1 << Y) for Y 15 might be 0.
13553 If the cast is widening, then 1 << Y should have unsigned type,
13554 otherwise if Y is number of bits in the signed shift type minus 1,
13555 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13556 31 might be 0xffffffff80000000. */
13557 if ((code == LT_EXPR || code == GE_EXPR)
13558 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13559 && CONVERT_EXPR_P (arg1)
13560 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13561 && (TYPE_PRECISION (TREE_TYPE (arg1))
13562 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13563 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13564 || (TYPE_PRECISION (TREE_TYPE (arg1))
13565 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13566 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13568 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13569 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13570 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13571 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13572 build_zero_cst (TREE_TYPE (arg0)));
13575 return NULL_TREE;
13577 case UNORDERED_EXPR:
13578 case ORDERED_EXPR:
13579 case UNLT_EXPR:
13580 case UNLE_EXPR:
13581 case UNGT_EXPR:
13582 case UNGE_EXPR:
13583 case UNEQ_EXPR:
13584 case LTGT_EXPR:
13585 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13587 t1 = fold_relational_const (code, type, arg0, arg1);
13588 if (t1 != NULL_TREE)
13589 return t1;
13592 /* If the first operand is NaN, the result is constant. */
13593 if (TREE_CODE (arg0) == REAL_CST
13594 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13595 && (code != LTGT_EXPR || ! flag_trapping_math))
13597 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13598 ? integer_zero_node
13599 : integer_one_node;
13600 return omit_one_operand_loc (loc, type, t1, arg1);
13603 /* If the second operand is NaN, the result is constant. */
13604 if (TREE_CODE (arg1) == REAL_CST
13605 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13606 && (code != LTGT_EXPR || ! flag_trapping_math))
13608 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13609 ? integer_zero_node
13610 : integer_one_node;
13611 return omit_one_operand_loc (loc, type, t1, arg0);
13614 /* Simplify unordered comparison of something with itself. */
13615 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13616 && operand_equal_p (arg0, arg1, 0))
13617 return constant_boolean_node (1, type);
13619 if (code == LTGT_EXPR
13620 && !flag_trapping_math
13621 && operand_equal_p (arg0, arg1, 0))
13622 return constant_boolean_node (0, type);
13624 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13626 tree targ0 = strip_float_extensions (arg0);
13627 tree targ1 = strip_float_extensions (arg1);
13628 tree newtype = TREE_TYPE (targ0);
13630 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13631 newtype = TREE_TYPE (targ1);
13633 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13634 return fold_build2_loc (loc, code, type,
13635 fold_convert_loc (loc, newtype, targ0),
13636 fold_convert_loc (loc, newtype, targ1));
13639 return NULL_TREE;
13641 case COMPOUND_EXPR:
13642 /* When pedantic, a compound expression can be neither an lvalue
13643 nor an integer constant expression. */
13644 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13645 return NULL_TREE;
13646 /* Don't let (0, 0) be null pointer constant. */
13647 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13648 : fold_convert_loc (loc, type, arg1);
13649 return pedantic_non_lvalue_loc (loc, tem);
13651 case COMPLEX_EXPR:
13652 if ((TREE_CODE (arg0) == REAL_CST
13653 && TREE_CODE (arg1) == REAL_CST)
13654 || (TREE_CODE (arg0) == INTEGER_CST
13655 && TREE_CODE (arg1) == INTEGER_CST))
13656 return build_complex (type, arg0, arg1);
13657 if (TREE_CODE (arg0) == REALPART_EXPR
13658 && TREE_CODE (arg1) == IMAGPART_EXPR
13659 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13660 && operand_equal_p (TREE_OPERAND (arg0, 0),
13661 TREE_OPERAND (arg1, 0), 0))
13662 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13663 TREE_OPERAND (arg1, 0));
13664 return NULL_TREE;
13666 case ASSERT_EXPR:
13667 /* An ASSERT_EXPR should never be passed to fold_binary. */
13668 gcc_unreachable ();
13670 case VEC_PACK_TRUNC_EXPR:
13671 case VEC_PACK_FIX_TRUNC_EXPR:
13673 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13674 tree *elts;
13676 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13677 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13678 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13679 return NULL_TREE;
13681 elts = XALLOCAVEC (tree, nelts);
13682 if (!vec_cst_ctor_to_array (arg0, elts)
13683 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13684 return NULL_TREE;
13686 for (i = 0; i < nelts; i++)
13688 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13689 ? NOP_EXPR : FIX_TRUNC_EXPR,
13690 TREE_TYPE (type), elts[i]);
13691 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13692 return NULL_TREE;
13695 return build_vector (type, elts);
13698 case VEC_WIDEN_MULT_LO_EXPR:
13699 case VEC_WIDEN_MULT_HI_EXPR:
13700 case VEC_WIDEN_MULT_EVEN_EXPR:
13701 case VEC_WIDEN_MULT_ODD_EXPR:
13703 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13704 unsigned int out, ofs, scale;
13705 tree *elts;
13707 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13708 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13709 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13710 return NULL_TREE;
13712 elts = XALLOCAVEC (tree, nelts * 4);
13713 if (!vec_cst_ctor_to_array (arg0, elts)
13714 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13715 return NULL_TREE;
13717 if (code == VEC_WIDEN_MULT_LO_EXPR)
13718 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13719 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13720 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13721 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13722 scale = 1, ofs = 0;
13723 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13724 scale = 1, ofs = 1;
13726 for (out = 0; out < nelts; out++)
13728 unsigned int in1 = (out << scale) + ofs;
13729 unsigned int in2 = in1 + nelts * 2;
13730 tree t1, t2;
13732 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13733 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13735 if (t1 == NULL_TREE || t2 == NULL_TREE)
13736 return NULL_TREE;
13737 elts[out] = const_binop (MULT_EXPR, t1, t2);
13738 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13739 return NULL_TREE;
13742 return build_vector (type, elts);
13745 default:
13746 return NULL_TREE;
13747 } /* switch (code) */
13750 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13751 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13752 of GOTO_EXPR. */
13754 static tree
13755 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13757 switch (TREE_CODE (*tp))
13759 case LABEL_EXPR:
13760 return *tp;
13762 case GOTO_EXPR:
13763 *walk_subtrees = 0;
13765 /* ... fall through ... */
13767 default:
13768 return NULL_TREE;
13772 /* Return whether the sub-tree ST contains a label which is accessible from
13773 outside the sub-tree. */
13775 static bool
13776 contains_label_p (tree st)
13778 return
13779 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13782 /* Fold a ternary expression of code CODE and type TYPE with operands
13783 OP0, OP1, and OP2. Return the folded expression if folding is
13784 successful. Otherwise, return NULL_TREE. */
13786 tree
13787 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13788 tree op0, tree op1, tree op2)
13790 tree tem;
13791 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13792 enum tree_code_class kind = TREE_CODE_CLASS (code);
13794 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13795 && TREE_CODE_LENGTH (code) == 3);
13797 /* If this is a commutative operation, and OP0 is a constant, move it
13798 to OP1 to reduce the number of tests below. */
13799 if (commutative_ternary_tree_code (code)
13800 && tree_swap_operands_p (op0, op1, true))
13801 return fold_build3_loc (loc, code, type, op1, op0, op2);
13803 tem = generic_simplify (loc, code, type, op0, op1, op2);
13804 if (tem)
13805 return tem;
13807 /* Strip any conversions that don't change the mode. This is safe
13808 for every expression, except for a comparison expression because
13809 its signedness is derived from its operands. So, in the latter
13810 case, only strip conversions that don't change the signedness.
13812 Note that this is done as an internal manipulation within the
13813 constant folder, in order to find the simplest representation of
13814 the arguments so that their form can be studied. In any cases,
13815 the appropriate type conversions should be put back in the tree
13816 that will get out of the constant folder. */
13817 if (op0)
13819 arg0 = op0;
13820 STRIP_NOPS (arg0);
13823 if (op1)
13825 arg1 = op1;
13826 STRIP_NOPS (arg1);
13829 if (op2)
13831 arg2 = op2;
13832 STRIP_NOPS (arg2);
13835 switch (code)
13837 case COMPONENT_REF:
13838 if (TREE_CODE (arg0) == CONSTRUCTOR
13839 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13841 unsigned HOST_WIDE_INT idx;
13842 tree field, value;
13843 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13844 if (field == arg1)
13845 return value;
13847 return NULL_TREE;
13849 case COND_EXPR:
13850 case VEC_COND_EXPR:
13851 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13852 so all simple results must be passed through pedantic_non_lvalue. */
13853 if (TREE_CODE (arg0) == INTEGER_CST)
13855 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13856 tem = integer_zerop (arg0) ? op2 : op1;
13857 /* Only optimize constant conditions when the selected branch
13858 has the same type as the COND_EXPR. This avoids optimizing
13859 away "c ? x : throw", where the throw has a void type.
13860 Avoid throwing away that operand which contains label. */
13861 if ((!TREE_SIDE_EFFECTS (unused_op)
13862 || !contains_label_p (unused_op))
13863 && (! VOID_TYPE_P (TREE_TYPE (tem))
13864 || VOID_TYPE_P (type)))
13865 return pedantic_non_lvalue_loc (loc, tem);
13866 return NULL_TREE;
13868 else if (TREE_CODE (arg0) == VECTOR_CST)
13870 if (integer_all_onesp (arg0))
13871 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
13872 if (integer_zerop (arg0))
13873 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
13875 if ((TREE_CODE (arg1) == VECTOR_CST
13876 || TREE_CODE (arg1) == CONSTRUCTOR)
13877 && (TREE_CODE (arg2) == VECTOR_CST
13878 || TREE_CODE (arg2) == CONSTRUCTOR))
13880 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13881 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13882 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13883 for (i = 0; i < nelts; i++)
13885 tree val = VECTOR_CST_ELT (arg0, i);
13886 if (integer_all_onesp (val))
13887 sel[i] = i;
13888 else if (integer_zerop (val))
13889 sel[i] = nelts + i;
13890 else /* Currently unreachable. */
13891 return NULL_TREE;
13893 tree t = fold_vec_perm (type, arg1, arg2, sel);
13894 if (t != NULL_TREE)
13895 return t;
13899 if (operand_equal_p (arg1, op2, 0))
13900 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13902 /* If we have A op B ? A : C, we may be able to convert this to a
13903 simpler expression, depending on the operation and the values
13904 of B and C. Signed zeros prevent all of these transformations,
13905 for reasons given above each one.
13907 Also try swapping the arguments and inverting the conditional. */
13908 if (COMPARISON_CLASS_P (arg0)
13909 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13910 arg1, TREE_OPERAND (arg0, 1))
13911 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13913 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13914 if (tem)
13915 return tem;
13918 if (COMPARISON_CLASS_P (arg0)
13919 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13920 op2,
13921 TREE_OPERAND (arg0, 1))
13922 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13924 location_t loc0 = expr_location_or (arg0, loc);
13925 tem = fold_invert_truthvalue (loc0, arg0);
13926 if (tem && COMPARISON_CLASS_P (tem))
13928 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13929 if (tem)
13930 return tem;
13934 /* If the second operand is simpler than the third, swap them
13935 since that produces better jump optimization results. */
13936 if (truth_value_p (TREE_CODE (arg0))
13937 && tree_swap_operands_p (op1, op2, false))
13939 location_t loc0 = expr_location_or (arg0, loc);
13940 /* See if this can be inverted. If it can't, possibly because
13941 it was a floating-point inequality comparison, don't do
13942 anything. */
13943 tem = fold_invert_truthvalue (loc0, arg0);
13944 if (tem)
13945 return fold_build3_loc (loc, code, type, tem, op2, op1);
13948 /* Convert A ? 1 : 0 to simply A. */
13949 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13950 : (integer_onep (op1)
13951 && !VECTOR_TYPE_P (type)))
13952 && integer_zerop (op2)
13953 /* If we try to convert OP0 to our type, the
13954 call to fold will try to move the conversion inside
13955 a COND, which will recurse. In that case, the COND_EXPR
13956 is probably the best choice, so leave it alone. */
13957 && type == TREE_TYPE (arg0))
13958 return pedantic_non_lvalue_loc (loc, arg0);
13960 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13961 over COND_EXPR in cases such as floating point comparisons. */
13962 if (integer_zerop (op1)
13963 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13964 : (integer_onep (op2)
13965 && !VECTOR_TYPE_P (type)))
13966 && truth_value_p (TREE_CODE (arg0)))
13967 return pedantic_non_lvalue_loc (loc,
13968 fold_convert_loc (loc, type,
13969 invert_truthvalue_loc (loc,
13970 arg0)));
13972 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13973 if (TREE_CODE (arg0) == LT_EXPR
13974 && integer_zerop (TREE_OPERAND (arg0, 1))
13975 && integer_zerop (op2)
13976 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13978 /* sign_bit_p looks through both zero and sign extensions,
13979 but for this optimization only sign extensions are
13980 usable. */
13981 tree tem2 = TREE_OPERAND (arg0, 0);
13982 while (tem != tem2)
13984 if (TREE_CODE (tem2) != NOP_EXPR
13985 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13987 tem = NULL_TREE;
13988 break;
13990 tem2 = TREE_OPERAND (tem2, 0);
13992 /* sign_bit_p only checks ARG1 bits within A's precision.
13993 If <sign bit of A> has wider type than A, bits outside
13994 of A's precision in <sign bit of A> need to be checked.
13995 If they are all 0, this optimization needs to be done
13996 in unsigned A's type, if they are all 1 in signed A's type,
13997 otherwise this can't be done. */
13998 if (tem
13999 && TYPE_PRECISION (TREE_TYPE (tem))
14000 < TYPE_PRECISION (TREE_TYPE (arg1))
14001 && TYPE_PRECISION (TREE_TYPE (tem))
14002 < TYPE_PRECISION (type))
14004 int inner_width, outer_width;
14005 tree tem_type;
14007 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14008 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14009 if (outer_width > TYPE_PRECISION (type))
14010 outer_width = TYPE_PRECISION (type);
14012 wide_int mask = wi::shifted_mask
14013 (inner_width, outer_width - inner_width, false,
14014 TYPE_PRECISION (TREE_TYPE (arg1)));
14016 wide_int common = mask & arg1;
14017 if (common == mask)
14019 tem_type = signed_type_for (TREE_TYPE (tem));
14020 tem = fold_convert_loc (loc, tem_type, tem);
14022 else if (common == 0)
14024 tem_type = unsigned_type_for (TREE_TYPE (tem));
14025 tem = fold_convert_loc (loc, tem_type, tem);
14027 else
14028 tem = NULL;
14031 if (tem)
14032 return
14033 fold_convert_loc (loc, type,
14034 fold_build2_loc (loc, BIT_AND_EXPR,
14035 TREE_TYPE (tem), tem,
14036 fold_convert_loc (loc,
14037 TREE_TYPE (tem),
14038 arg1)));
14041 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14042 already handled above. */
14043 if (TREE_CODE (arg0) == BIT_AND_EXPR
14044 && integer_onep (TREE_OPERAND (arg0, 1))
14045 && integer_zerop (op2)
14046 && integer_pow2p (arg1))
14048 tree tem = TREE_OPERAND (arg0, 0);
14049 STRIP_NOPS (tem);
14050 if (TREE_CODE (tem) == RSHIFT_EXPR
14051 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
14052 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14053 tree_to_uhwi (TREE_OPERAND (tem, 1)))
14054 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14055 TREE_OPERAND (tem, 0), arg1);
14058 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14059 is probably obsolete because the first operand should be a
14060 truth value (that's why we have the two cases above), but let's
14061 leave it in until we can confirm this for all front-ends. */
14062 if (integer_zerop (op2)
14063 && TREE_CODE (arg0) == NE_EXPR
14064 && integer_zerop (TREE_OPERAND (arg0, 1))
14065 && integer_pow2p (arg1)
14066 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14067 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14068 arg1, OEP_ONLY_CONST))
14069 return pedantic_non_lvalue_loc (loc,
14070 fold_convert_loc (loc, type,
14071 TREE_OPERAND (arg0, 0)));
14073 /* Disable the transformations below for vectors, since
14074 fold_binary_op_with_conditional_arg may undo them immediately,
14075 yielding an infinite loop. */
14076 if (code == VEC_COND_EXPR)
14077 return NULL_TREE;
14079 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14080 if (integer_zerop (op2)
14081 && truth_value_p (TREE_CODE (arg0))
14082 && truth_value_p (TREE_CODE (arg1))
14083 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14084 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14085 : TRUTH_ANDIF_EXPR,
14086 type, fold_convert_loc (loc, type, arg0), arg1);
14088 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14089 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14090 && truth_value_p (TREE_CODE (arg0))
14091 && truth_value_p (TREE_CODE (arg1))
14092 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14094 location_t loc0 = expr_location_or (arg0, loc);
14095 /* Only perform transformation if ARG0 is easily inverted. */
14096 tem = fold_invert_truthvalue (loc0, arg0);
14097 if (tem)
14098 return fold_build2_loc (loc, code == VEC_COND_EXPR
14099 ? BIT_IOR_EXPR
14100 : TRUTH_ORIF_EXPR,
14101 type, fold_convert_loc (loc, type, tem),
14102 arg1);
14105 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14106 if (integer_zerop (arg1)
14107 && truth_value_p (TREE_CODE (arg0))
14108 && truth_value_p (TREE_CODE (op2))
14109 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14111 location_t loc0 = expr_location_or (arg0, loc);
14112 /* Only perform transformation if ARG0 is easily inverted. */
14113 tem = fold_invert_truthvalue (loc0, arg0);
14114 if (tem)
14115 return fold_build2_loc (loc, code == VEC_COND_EXPR
14116 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14117 type, fold_convert_loc (loc, type, tem),
14118 op2);
14121 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14122 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14123 && truth_value_p (TREE_CODE (arg0))
14124 && truth_value_p (TREE_CODE (op2))
14125 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14126 return fold_build2_loc (loc, code == VEC_COND_EXPR
14127 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14128 type, fold_convert_loc (loc, type, arg0), op2);
14130 return NULL_TREE;
14132 case CALL_EXPR:
14133 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14134 of fold_ternary on them. */
14135 gcc_unreachable ();
14137 case BIT_FIELD_REF:
14138 if ((TREE_CODE (arg0) == VECTOR_CST
14139 || (TREE_CODE (arg0) == CONSTRUCTOR
14140 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14141 && (type == TREE_TYPE (TREE_TYPE (arg0))
14142 || (TREE_CODE (type) == VECTOR_TYPE
14143 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14145 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14146 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14147 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14148 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14150 if (n != 0
14151 && (idx % width) == 0
14152 && (n % width) == 0
14153 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14155 idx = idx / width;
14156 n = n / width;
14158 if (TREE_CODE (arg0) == VECTOR_CST)
14160 if (n == 1)
14161 return VECTOR_CST_ELT (arg0, idx);
14163 tree *vals = XALLOCAVEC (tree, n);
14164 for (unsigned i = 0; i < n; ++i)
14165 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14166 return build_vector (type, vals);
14169 /* Constructor elements can be subvectors. */
14170 unsigned HOST_WIDE_INT k = 1;
14171 if (CONSTRUCTOR_NELTS (arg0) != 0)
14173 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14174 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14175 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14178 /* We keep an exact subset of the constructor elements. */
14179 if ((idx % k) == 0 && (n % k) == 0)
14181 if (CONSTRUCTOR_NELTS (arg0) == 0)
14182 return build_constructor (type, NULL);
14183 idx /= k;
14184 n /= k;
14185 if (n == 1)
14187 if (idx < CONSTRUCTOR_NELTS (arg0))
14188 return CONSTRUCTOR_ELT (arg0, idx)->value;
14189 return build_zero_cst (type);
14192 vec<constructor_elt, va_gc> *vals;
14193 vec_alloc (vals, n);
14194 for (unsigned i = 0;
14195 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14196 ++i)
14197 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14198 CONSTRUCTOR_ELT
14199 (arg0, idx + i)->value);
14200 return build_constructor (type, vals);
14202 /* The bitfield references a single constructor element. */
14203 else if (idx + n <= (idx / k + 1) * k)
14205 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14206 return build_zero_cst (type);
14207 else if (n == k)
14208 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14209 else
14210 return fold_build3_loc (loc, code, type,
14211 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14212 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14217 /* A bit-field-ref that referenced the full argument can be stripped. */
14218 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14219 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14220 && integer_zerop (op2))
14221 return fold_convert_loc (loc, type, arg0);
14223 /* On constants we can use native encode/interpret to constant
14224 fold (nearly) all BIT_FIELD_REFs. */
14225 if (CONSTANT_CLASS_P (arg0)
14226 && can_native_interpret_type_p (type)
14227 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14228 /* This limitation should not be necessary, we just need to
14229 round this up to mode size. */
14230 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14231 /* Need bit-shifting of the buffer to relax the following. */
14232 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14234 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14235 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14236 unsigned HOST_WIDE_INT clen;
14237 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14238 /* ??? We cannot tell native_encode_expr to start at
14239 some random byte only. So limit us to a reasonable amount
14240 of work. */
14241 if (clen <= 4096)
14243 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14244 unsigned HOST_WIDE_INT len
14245 = native_encode_expr (arg0, b, clen, false);
14246 if (len > 0 && len * BITS_PER_UNIT >= bitpos + bitsize)
14248 tree v = native_interpret_expr (type,
14249 b + bitpos / BITS_PER_UNIT,
14250 bitsize / BITS_PER_UNIT);
14251 if (v)
14252 return v;
14257 return NULL_TREE;
14259 case FMA_EXPR:
14260 /* For integers we can decompose the FMA if possible. */
14261 if (TREE_CODE (arg0) == INTEGER_CST
14262 && TREE_CODE (arg1) == INTEGER_CST)
14263 return fold_build2_loc (loc, PLUS_EXPR, type,
14264 const_binop (MULT_EXPR, arg0, arg1), arg2);
14265 if (integer_zerop (arg2))
14266 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14268 return fold_fma (loc, type, arg0, arg1, arg2);
14270 case VEC_PERM_EXPR:
14271 if (TREE_CODE (arg2) == VECTOR_CST)
14273 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
14274 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
14275 unsigned char *sel2 = sel + nelts;
14276 bool need_mask_canon = false;
14277 bool need_mask_canon2 = false;
14278 bool all_in_vec0 = true;
14279 bool all_in_vec1 = true;
14280 bool maybe_identity = true;
14281 bool single_arg = (op0 == op1);
14282 bool changed = false;
14284 mask2 = 2 * nelts - 1;
14285 mask = single_arg ? (nelts - 1) : mask2;
14286 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14287 for (i = 0; i < nelts; i++)
14289 tree val = VECTOR_CST_ELT (arg2, i);
14290 if (TREE_CODE (val) != INTEGER_CST)
14291 return NULL_TREE;
14293 /* Make sure that the perm value is in an acceptable
14294 range. */
14295 wide_int t = val;
14296 need_mask_canon |= wi::gtu_p (t, mask);
14297 need_mask_canon2 |= wi::gtu_p (t, mask2);
14298 sel[i] = t.to_uhwi () & mask;
14299 sel2[i] = t.to_uhwi () & mask2;
14301 if (sel[i] < nelts)
14302 all_in_vec1 = false;
14303 else
14304 all_in_vec0 = false;
14306 if ((sel[i] & (nelts-1)) != i)
14307 maybe_identity = false;
14310 if (maybe_identity)
14312 if (all_in_vec0)
14313 return op0;
14314 if (all_in_vec1)
14315 return op1;
14318 if (all_in_vec0)
14319 op1 = op0;
14320 else if (all_in_vec1)
14322 op0 = op1;
14323 for (i = 0; i < nelts; i++)
14324 sel[i] -= nelts;
14325 need_mask_canon = true;
14328 if ((TREE_CODE (op0) == VECTOR_CST
14329 || TREE_CODE (op0) == CONSTRUCTOR)
14330 && (TREE_CODE (op1) == VECTOR_CST
14331 || TREE_CODE (op1) == CONSTRUCTOR))
14333 tree t = fold_vec_perm (type, op0, op1, sel);
14334 if (t != NULL_TREE)
14335 return t;
14338 if (op0 == op1 && !single_arg)
14339 changed = true;
14341 /* Some targets are deficient and fail to expand a single
14342 argument permutation while still allowing an equivalent
14343 2-argument version. */
14344 if (need_mask_canon && arg2 == op2
14345 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
14346 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
14348 need_mask_canon = need_mask_canon2;
14349 sel = sel2;
14352 if (need_mask_canon && arg2 == op2)
14354 tree *tsel = XALLOCAVEC (tree, nelts);
14355 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14356 for (i = 0; i < nelts; i++)
14357 tsel[i] = build_int_cst (eltype, sel[i]);
14358 op2 = build_vector (TREE_TYPE (arg2), tsel);
14359 changed = true;
14362 if (changed)
14363 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14365 return NULL_TREE;
14367 default:
14368 return NULL_TREE;
14369 } /* switch (code) */
14372 /* Perform constant folding and related simplification of EXPR.
14373 The related simplifications include x*1 => x, x*0 => 0, etc.,
14374 and application of the associative law.
14375 NOP_EXPR conversions may be removed freely (as long as we
14376 are careful not to change the type of the overall expression).
14377 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14378 but we can constant-fold them if they have constant operands. */
14380 #ifdef ENABLE_FOLD_CHECKING
14381 # define fold(x) fold_1 (x)
14382 static tree fold_1 (tree);
14383 static
14384 #endif
14385 tree
14386 fold (tree expr)
14388 const tree t = expr;
14389 enum tree_code code = TREE_CODE (t);
14390 enum tree_code_class kind = TREE_CODE_CLASS (code);
14391 tree tem;
14392 location_t loc = EXPR_LOCATION (expr);
14394 /* Return right away if a constant. */
14395 if (kind == tcc_constant)
14396 return t;
14398 /* CALL_EXPR-like objects with variable numbers of operands are
14399 treated specially. */
14400 if (kind == tcc_vl_exp)
14402 if (code == CALL_EXPR)
14404 tem = fold_call_expr (loc, expr, false);
14405 return tem ? tem : expr;
14407 return expr;
14410 if (IS_EXPR_CODE_CLASS (kind))
14412 tree type = TREE_TYPE (t);
14413 tree op0, op1, op2;
14415 switch (TREE_CODE_LENGTH (code))
14417 case 1:
14418 op0 = TREE_OPERAND (t, 0);
14419 tem = fold_unary_loc (loc, code, type, op0);
14420 return tem ? tem : expr;
14421 case 2:
14422 op0 = TREE_OPERAND (t, 0);
14423 op1 = TREE_OPERAND (t, 1);
14424 tem = fold_binary_loc (loc, code, type, op0, op1);
14425 return tem ? tem : expr;
14426 case 3:
14427 op0 = TREE_OPERAND (t, 0);
14428 op1 = TREE_OPERAND (t, 1);
14429 op2 = TREE_OPERAND (t, 2);
14430 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14431 return tem ? tem : expr;
14432 default:
14433 break;
14437 switch (code)
14439 case ARRAY_REF:
14441 tree op0 = TREE_OPERAND (t, 0);
14442 tree op1 = TREE_OPERAND (t, 1);
14444 if (TREE_CODE (op1) == INTEGER_CST
14445 && TREE_CODE (op0) == CONSTRUCTOR
14446 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14448 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14449 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14450 unsigned HOST_WIDE_INT begin = 0;
14452 /* Find a matching index by means of a binary search. */
14453 while (begin != end)
14455 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14456 tree index = (*elts)[middle].index;
14458 if (TREE_CODE (index) == INTEGER_CST
14459 && tree_int_cst_lt (index, op1))
14460 begin = middle + 1;
14461 else if (TREE_CODE (index) == INTEGER_CST
14462 && tree_int_cst_lt (op1, index))
14463 end = middle;
14464 else if (TREE_CODE (index) == RANGE_EXPR
14465 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14466 begin = middle + 1;
14467 else if (TREE_CODE (index) == RANGE_EXPR
14468 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14469 end = middle;
14470 else
14471 return (*elts)[middle].value;
14475 return t;
14478 /* Return a VECTOR_CST if possible. */
14479 case CONSTRUCTOR:
14481 tree type = TREE_TYPE (t);
14482 if (TREE_CODE (type) != VECTOR_TYPE)
14483 return t;
14485 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14486 unsigned HOST_WIDE_INT idx, pos = 0;
14487 tree value;
14489 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14491 if (!CONSTANT_CLASS_P (value))
14492 return t;
14493 if (TREE_CODE (value) == VECTOR_CST)
14495 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14496 vec[pos++] = VECTOR_CST_ELT (value, i);
14498 else
14499 vec[pos++] = value;
14501 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14502 vec[pos] = build_zero_cst (TREE_TYPE (type));
14504 return build_vector (type, vec);
14507 case CONST_DECL:
14508 return fold (DECL_INITIAL (t));
14510 default:
14511 return t;
14512 } /* switch (code) */
14515 #ifdef ENABLE_FOLD_CHECKING
14516 #undef fold
14518 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14519 hash_table<pointer_hash<const tree_node> > *);
14520 static void fold_check_failed (const_tree, const_tree);
14521 void print_fold_checksum (const_tree);
14523 /* When --enable-checking=fold, compute a digest of expr before
14524 and after actual fold call to see if fold did not accidentally
14525 change original expr. */
14527 tree
14528 fold (tree expr)
14530 tree ret;
14531 struct md5_ctx ctx;
14532 unsigned char checksum_before[16], checksum_after[16];
14533 hash_table<pointer_hash<const tree_node> > ht (32);
14535 md5_init_ctx (&ctx);
14536 fold_checksum_tree (expr, &ctx, &ht);
14537 md5_finish_ctx (&ctx, checksum_before);
14538 ht.empty ();
14540 ret = fold_1 (expr);
14542 md5_init_ctx (&ctx);
14543 fold_checksum_tree (expr, &ctx, &ht);
14544 md5_finish_ctx (&ctx, checksum_after);
14546 if (memcmp (checksum_before, checksum_after, 16))
14547 fold_check_failed (expr, ret);
14549 return ret;
14552 void
14553 print_fold_checksum (const_tree expr)
14555 struct md5_ctx ctx;
14556 unsigned char checksum[16], cnt;
14557 hash_table<pointer_hash<const tree_node> > ht (32);
14559 md5_init_ctx (&ctx);
14560 fold_checksum_tree (expr, &ctx, &ht);
14561 md5_finish_ctx (&ctx, checksum);
14562 for (cnt = 0; cnt < 16; ++cnt)
14563 fprintf (stderr, "%02x", checksum[cnt]);
14564 putc ('\n', stderr);
14567 static void
14568 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14570 internal_error ("fold check: original tree changed by fold");
14573 static void
14574 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14575 hash_table<pointer_hash <const tree_node> > *ht)
14577 const tree_node **slot;
14578 enum tree_code code;
14579 union tree_node buf;
14580 int i, len;
14582 recursive_label:
14583 if (expr == NULL)
14584 return;
14585 slot = ht->find_slot (expr, INSERT);
14586 if (*slot != NULL)
14587 return;
14588 *slot = expr;
14589 code = TREE_CODE (expr);
14590 if (TREE_CODE_CLASS (code) == tcc_declaration
14591 && DECL_ASSEMBLER_NAME_SET_P (expr))
14593 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14594 memcpy ((char *) &buf, expr, tree_size (expr));
14595 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14596 expr = (tree) &buf;
14598 else if (TREE_CODE_CLASS (code) == tcc_type
14599 && (TYPE_POINTER_TO (expr)
14600 || TYPE_REFERENCE_TO (expr)
14601 || TYPE_CACHED_VALUES_P (expr)
14602 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14603 || TYPE_NEXT_VARIANT (expr)))
14605 /* Allow these fields to be modified. */
14606 tree tmp;
14607 memcpy ((char *) &buf, expr, tree_size (expr));
14608 expr = tmp = (tree) &buf;
14609 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14610 TYPE_POINTER_TO (tmp) = NULL;
14611 TYPE_REFERENCE_TO (tmp) = NULL;
14612 TYPE_NEXT_VARIANT (tmp) = NULL;
14613 if (TYPE_CACHED_VALUES_P (tmp))
14615 TYPE_CACHED_VALUES_P (tmp) = 0;
14616 TYPE_CACHED_VALUES (tmp) = NULL;
14619 md5_process_bytes (expr, tree_size (expr), ctx);
14620 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14621 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14622 if (TREE_CODE_CLASS (code) != tcc_type
14623 && TREE_CODE_CLASS (code) != tcc_declaration
14624 && code != TREE_LIST
14625 && code != SSA_NAME
14626 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14627 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14628 switch (TREE_CODE_CLASS (code))
14630 case tcc_constant:
14631 switch (code)
14633 case STRING_CST:
14634 md5_process_bytes (TREE_STRING_POINTER (expr),
14635 TREE_STRING_LENGTH (expr), ctx);
14636 break;
14637 case COMPLEX_CST:
14638 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14639 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14640 break;
14641 case VECTOR_CST:
14642 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14643 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14644 break;
14645 default:
14646 break;
14648 break;
14649 case tcc_exceptional:
14650 switch (code)
14652 case TREE_LIST:
14653 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14654 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14655 expr = TREE_CHAIN (expr);
14656 goto recursive_label;
14657 break;
14658 case TREE_VEC:
14659 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14660 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14661 break;
14662 default:
14663 break;
14665 break;
14666 case tcc_expression:
14667 case tcc_reference:
14668 case tcc_comparison:
14669 case tcc_unary:
14670 case tcc_binary:
14671 case tcc_statement:
14672 case tcc_vl_exp:
14673 len = TREE_OPERAND_LENGTH (expr);
14674 for (i = 0; i < len; ++i)
14675 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14676 break;
14677 case tcc_declaration:
14678 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14679 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14680 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14682 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14683 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14684 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14685 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14686 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14689 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14691 if (TREE_CODE (expr) == FUNCTION_DECL)
14693 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14694 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14696 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14698 break;
14699 case tcc_type:
14700 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14701 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14702 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14703 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14704 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14705 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14706 if (INTEGRAL_TYPE_P (expr)
14707 || SCALAR_FLOAT_TYPE_P (expr))
14709 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14710 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14712 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14713 if (TREE_CODE (expr) == RECORD_TYPE
14714 || TREE_CODE (expr) == UNION_TYPE
14715 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14716 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14717 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14718 break;
14719 default:
14720 break;
14724 /* Helper function for outputting the checksum of a tree T. When
14725 debugging with gdb, you can "define mynext" to be "next" followed
14726 by "call debug_fold_checksum (op0)", then just trace down till the
14727 outputs differ. */
14729 DEBUG_FUNCTION void
14730 debug_fold_checksum (const_tree t)
14732 int i;
14733 unsigned char checksum[16];
14734 struct md5_ctx ctx;
14735 hash_table<pointer_hash<const tree_node> > ht (32);
14737 md5_init_ctx (&ctx);
14738 fold_checksum_tree (t, &ctx, &ht);
14739 md5_finish_ctx (&ctx, checksum);
14740 ht.empty ();
14742 for (i = 0; i < 16; i++)
14743 fprintf (stderr, "%d ", checksum[i]);
14745 fprintf (stderr, "\n");
14748 #endif
14750 /* Fold a unary tree expression with code CODE of type TYPE with an
14751 operand OP0. LOC is the location of the resulting expression.
14752 Return a folded expression if successful. Otherwise, return a tree
14753 expression with code CODE of type TYPE with an operand OP0. */
14755 tree
14756 fold_build1_stat_loc (location_t loc,
14757 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14759 tree tem;
14760 #ifdef ENABLE_FOLD_CHECKING
14761 unsigned char checksum_before[16], checksum_after[16];
14762 struct md5_ctx ctx;
14763 hash_table<pointer_hash<const tree_node> > ht (32);
14765 md5_init_ctx (&ctx);
14766 fold_checksum_tree (op0, &ctx, &ht);
14767 md5_finish_ctx (&ctx, checksum_before);
14768 ht.empty ();
14769 #endif
14771 tem = fold_unary_loc (loc, code, type, op0);
14772 if (!tem)
14773 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14775 #ifdef ENABLE_FOLD_CHECKING
14776 md5_init_ctx (&ctx);
14777 fold_checksum_tree (op0, &ctx, &ht);
14778 md5_finish_ctx (&ctx, checksum_after);
14780 if (memcmp (checksum_before, checksum_after, 16))
14781 fold_check_failed (op0, tem);
14782 #endif
14783 return tem;
14786 /* Fold a binary tree expression with code CODE of type TYPE with
14787 operands OP0 and OP1. LOC is the location of the resulting
14788 expression. Return a folded expression if successful. Otherwise,
14789 return a tree expression with code CODE of type TYPE with operands
14790 OP0 and OP1. */
14792 tree
14793 fold_build2_stat_loc (location_t loc,
14794 enum tree_code code, tree type, tree op0, tree op1
14795 MEM_STAT_DECL)
14797 tree tem;
14798 #ifdef ENABLE_FOLD_CHECKING
14799 unsigned char checksum_before_op0[16],
14800 checksum_before_op1[16],
14801 checksum_after_op0[16],
14802 checksum_after_op1[16];
14803 struct md5_ctx ctx;
14804 hash_table<pointer_hash<const tree_node> > ht (32);
14806 md5_init_ctx (&ctx);
14807 fold_checksum_tree (op0, &ctx, &ht);
14808 md5_finish_ctx (&ctx, checksum_before_op0);
14809 ht.empty ();
14811 md5_init_ctx (&ctx);
14812 fold_checksum_tree (op1, &ctx, &ht);
14813 md5_finish_ctx (&ctx, checksum_before_op1);
14814 ht.empty ();
14815 #endif
14817 tem = fold_binary_loc (loc, code, type, op0, op1);
14818 if (!tem)
14819 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14821 #ifdef ENABLE_FOLD_CHECKING
14822 md5_init_ctx (&ctx);
14823 fold_checksum_tree (op0, &ctx, &ht);
14824 md5_finish_ctx (&ctx, checksum_after_op0);
14825 ht.empty ();
14827 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14828 fold_check_failed (op0, tem);
14830 md5_init_ctx (&ctx);
14831 fold_checksum_tree (op1, &ctx, &ht);
14832 md5_finish_ctx (&ctx, checksum_after_op1);
14834 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14835 fold_check_failed (op1, tem);
14836 #endif
14837 return tem;
14840 /* Fold a ternary tree expression with code CODE of type TYPE with
14841 operands OP0, OP1, and OP2. Return a folded expression if
14842 successful. Otherwise, return a tree expression with code CODE of
14843 type TYPE with operands OP0, OP1, and OP2. */
14845 tree
14846 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14847 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14849 tree tem;
14850 #ifdef ENABLE_FOLD_CHECKING
14851 unsigned char checksum_before_op0[16],
14852 checksum_before_op1[16],
14853 checksum_before_op2[16],
14854 checksum_after_op0[16],
14855 checksum_after_op1[16],
14856 checksum_after_op2[16];
14857 struct md5_ctx ctx;
14858 hash_table<pointer_hash<const tree_node> > ht (32);
14860 md5_init_ctx (&ctx);
14861 fold_checksum_tree (op0, &ctx, &ht);
14862 md5_finish_ctx (&ctx, checksum_before_op0);
14863 ht.empty ();
14865 md5_init_ctx (&ctx);
14866 fold_checksum_tree (op1, &ctx, &ht);
14867 md5_finish_ctx (&ctx, checksum_before_op1);
14868 ht.empty ();
14870 md5_init_ctx (&ctx);
14871 fold_checksum_tree (op2, &ctx, &ht);
14872 md5_finish_ctx (&ctx, checksum_before_op2);
14873 ht.empty ();
14874 #endif
14876 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14877 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14878 if (!tem)
14879 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14881 #ifdef ENABLE_FOLD_CHECKING
14882 md5_init_ctx (&ctx);
14883 fold_checksum_tree (op0, &ctx, &ht);
14884 md5_finish_ctx (&ctx, checksum_after_op0);
14885 ht.empty ();
14887 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14888 fold_check_failed (op0, tem);
14890 md5_init_ctx (&ctx);
14891 fold_checksum_tree (op1, &ctx, &ht);
14892 md5_finish_ctx (&ctx, checksum_after_op1);
14893 ht.empty ();
14895 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14896 fold_check_failed (op1, tem);
14898 md5_init_ctx (&ctx);
14899 fold_checksum_tree (op2, &ctx, &ht);
14900 md5_finish_ctx (&ctx, checksum_after_op2);
14902 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14903 fold_check_failed (op2, tem);
14904 #endif
14905 return tem;
14908 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14909 arguments in ARGARRAY, and a null static chain.
14910 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14911 of type TYPE from the given operands as constructed by build_call_array. */
14913 tree
14914 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14915 int nargs, tree *argarray)
14917 tree tem;
14918 #ifdef ENABLE_FOLD_CHECKING
14919 unsigned char checksum_before_fn[16],
14920 checksum_before_arglist[16],
14921 checksum_after_fn[16],
14922 checksum_after_arglist[16];
14923 struct md5_ctx ctx;
14924 hash_table<pointer_hash<const tree_node> > ht (32);
14925 int i;
14927 md5_init_ctx (&ctx);
14928 fold_checksum_tree (fn, &ctx, &ht);
14929 md5_finish_ctx (&ctx, checksum_before_fn);
14930 ht.empty ();
14932 md5_init_ctx (&ctx);
14933 for (i = 0; i < nargs; i++)
14934 fold_checksum_tree (argarray[i], &ctx, &ht);
14935 md5_finish_ctx (&ctx, checksum_before_arglist);
14936 ht.empty ();
14937 #endif
14939 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14941 #ifdef ENABLE_FOLD_CHECKING
14942 md5_init_ctx (&ctx);
14943 fold_checksum_tree (fn, &ctx, &ht);
14944 md5_finish_ctx (&ctx, checksum_after_fn);
14945 ht.empty ();
14947 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14948 fold_check_failed (fn, tem);
14950 md5_init_ctx (&ctx);
14951 for (i = 0; i < nargs; i++)
14952 fold_checksum_tree (argarray[i], &ctx, &ht);
14953 md5_finish_ctx (&ctx, checksum_after_arglist);
14955 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14956 fold_check_failed (NULL_TREE, tem);
14957 #endif
14958 return tem;
14961 /* Perform constant folding and related simplification of initializer
14962 expression EXPR. These behave identically to "fold_buildN" but ignore
14963 potential run-time traps and exceptions that fold must preserve. */
14965 #define START_FOLD_INIT \
14966 int saved_signaling_nans = flag_signaling_nans;\
14967 int saved_trapping_math = flag_trapping_math;\
14968 int saved_rounding_math = flag_rounding_math;\
14969 int saved_trapv = flag_trapv;\
14970 int saved_folding_initializer = folding_initializer;\
14971 flag_signaling_nans = 0;\
14972 flag_trapping_math = 0;\
14973 flag_rounding_math = 0;\
14974 flag_trapv = 0;\
14975 folding_initializer = 1;
14977 #define END_FOLD_INIT \
14978 flag_signaling_nans = saved_signaling_nans;\
14979 flag_trapping_math = saved_trapping_math;\
14980 flag_rounding_math = saved_rounding_math;\
14981 flag_trapv = saved_trapv;\
14982 folding_initializer = saved_folding_initializer;
14984 tree
14985 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14986 tree type, tree op)
14988 tree result;
14989 START_FOLD_INIT;
14991 result = fold_build1_loc (loc, code, type, op);
14993 END_FOLD_INIT;
14994 return result;
14997 tree
14998 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14999 tree type, tree op0, tree op1)
15001 tree result;
15002 START_FOLD_INIT;
15004 result = fold_build2_loc (loc, code, type, op0, op1);
15006 END_FOLD_INIT;
15007 return result;
15010 tree
15011 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15012 int nargs, tree *argarray)
15014 tree result;
15015 START_FOLD_INIT;
15017 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15019 END_FOLD_INIT;
15020 return result;
15023 #undef START_FOLD_INIT
15024 #undef END_FOLD_INIT
15026 /* Determine if first argument is a multiple of second argument. Return 0 if
15027 it is not, or we cannot easily determined it to be.
15029 An example of the sort of thing we care about (at this point; this routine
15030 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15031 fold cases do now) is discovering that
15033 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15035 is a multiple of
15037 SAVE_EXPR (J * 8)
15039 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15041 This code also handles discovering that
15043 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15045 is a multiple of 8 so we don't have to worry about dealing with a
15046 possible remainder.
15048 Note that we *look* inside a SAVE_EXPR only to determine how it was
15049 calculated; it is not safe for fold to do much of anything else with the
15050 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15051 at run time. For example, the latter example above *cannot* be implemented
15052 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15053 evaluation time of the original SAVE_EXPR is not necessarily the same at
15054 the time the new expression is evaluated. The only optimization of this
15055 sort that would be valid is changing
15057 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15059 divided by 8 to
15061 SAVE_EXPR (I) * SAVE_EXPR (J)
15063 (where the same SAVE_EXPR (J) is used in the original and the
15064 transformed version). */
15067 multiple_of_p (tree type, const_tree top, const_tree bottom)
15069 if (operand_equal_p (top, bottom, 0))
15070 return 1;
15072 if (TREE_CODE (type) != INTEGER_TYPE)
15073 return 0;
15075 switch (TREE_CODE (top))
15077 case BIT_AND_EXPR:
15078 /* Bitwise and provides a power of two multiple. If the mask is
15079 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15080 if (!integer_pow2p (bottom))
15081 return 0;
15082 /* FALLTHRU */
15084 case MULT_EXPR:
15085 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15086 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15088 case PLUS_EXPR:
15089 case MINUS_EXPR:
15090 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15091 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15093 case LSHIFT_EXPR:
15094 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15096 tree op1, t1;
15098 op1 = TREE_OPERAND (top, 1);
15099 /* const_binop may not detect overflow correctly,
15100 so check for it explicitly here. */
15101 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
15102 && 0 != (t1 = fold_convert (type,
15103 const_binop (LSHIFT_EXPR,
15104 size_one_node,
15105 op1)))
15106 && !TREE_OVERFLOW (t1))
15107 return multiple_of_p (type, t1, bottom);
15109 return 0;
15111 case NOP_EXPR:
15112 /* Can't handle conversions from non-integral or wider integral type. */
15113 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15114 || (TYPE_PRECISION (type)
15115 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15116 return 0;
15118 /* .. fall through ... */
15120 case SAVE_EXPR:
15121 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15123 case COND_EXPR:
15124 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15125 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15127 case INTEGER_CST:
15128 if (TREE_CODE (bottom) != INTEGER_CST
15129 || integer_zerop (bottom)
15130 || (TYPE_UNSIGNED (type)
15131 && (tree_int_cst_sgn (top) < 0
15132 || tree_int_cst_sgn (bottom) < 0)))
15133 return 0;
15134 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
15135 SIGNED);
15137 default:
15138 return 0;
15142 /* Return true if CODE or TYPE is known to be non-negative. */
15144 static bool
15145 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15147 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15148 && truth_value_p (code))
15149 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15150 have a signed:1 type (where the value is -1 and 0). */
15151 return true;
15152 return false;
15155 /* Return true if (CODE OP0) is known to be non-negative. If the return
15156 value is based on the assumption that signed overflow is undefined,
15157 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15158 *STRICT_OVERFLOW_P. */
15160 bool
15161 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15162 bool *strict_overflow_p)
15164 if (TYPE_UNSIGNED (type))
15165 return true;
15167 switch (code)
15169 case ABS_EXPR:
15170 /* We can't return 1 if flag_wrapv is set because
15171 ABS_EXPR<INT_MIN> = INT_MIN. */
15172 if (!INTEGRAL_TYPE_P (type))
15173 return true;
15174 if (TYPE_OVERFLOW_UNDEFINED (type))
15176 *strict_overflow_p = true;
15177 return true;
15179 break;
15181 case NON_LVALUE_EXPR:
15182 case FLOAT_EXPR:
15183 case FIX_TRUNC_EXPR:
15184 return tree_expr_nonnegative_warnv_p (op0,
15185 strict_overflow_p);
15187 case NOP_EXPR:
15189 tree inner_type = TREE_TYPE (op0);
15190 tree outer_type = type;
15192 if (TREE_CODE (outer_type) == REAL_TYPE)
15194 if (TREE_CODE (inner_type) == REAL_TYPE)
15195 return tree_expr_nonnegative_warnv_p (op0,
15196 strict_overflow_p);
15197 if (INTEGRAL_TYPE_P (inner_type))
15199 if (TYPE_UNSIGNED (inner_type))
15200 return true;
15201 return tree_expr_nonnegative_warnv_p (op0,
15202 strict_overflow_p);
15205 else if (INTEGRAL_TYPE_P (outer_type))
15207 if (TREE_CODE (inner_type) == REAL_TYPE)
15208 return tree_expr_nonnegative_warnv_p (op0,
15209 strict_overflow_p);
15210 if (INTEGRAL_TYPE_P (inner_type))
15211 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15212 && TYPE_UNSIGNED (inner_type);
15215 break;
15217 default:
15218 return tree_simple_nonnegative_warnv_p (code, type);
15221 /* We don't know sign of `t', so be conservative and return false. */
15222 return false;
15225 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15226 value is based on the assumption that signed overflow is undefined,
15227 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15228 *STRICT_OVERFLOW_P. */
15230 bool
15231 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15232 tree op1, bool *strict_overflow_p)
15234 if (TYPE_UNSIGNED (type))
15235 return true;
15237 switch (code)
15239 case POINTER_PLUS_EXPR:
15240 case PLUS_EXPR:
15241 if (FLOAT_TYPE_P (type))
15242 return (tree_expr_nonnegative_warnv_p (op0,
15243 strict_overflow_p)
15244 && tree_expr_nonnegative_warnv_p (op1,
15245 strict_overflow_p));
15247 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15248 both unsigned and at least 2 bits shorter than the result. */
15249 if (TREE_CODE (type) == INTEGER_TYPE
15250 && TREE_CODE (op0) == NOP_EXPR
15251 && TREE_CODE (op1) == NOP_EXPR)
15253 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15254 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15255 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15256 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15258 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15259 TYPE_PRECISION (inner2)) + 1;
15260 return prec < TYPE_PRECISION (type);
15263 break;
15265 case MULT_EXPR:
15266 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15268 /* x * x is always non-negative for floating point x
15269 or without overflow. */
15270 if (operand_equal_p (op0, op1, 0)
15271 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15272 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15274 if (TYPE_OVERFLOW_UNDEFINED (type))
15275 *strict_overflow_p = true;
15276 return true;
15280 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15281 both unsigned and their total bits is shorter than the result. */
15282 if (TREE_CODE (type) == INTEGER_TYPE
15283 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15284 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15286 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15287 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15288 : TREE_TYPE (op0);
15289 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15290 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15291 : TREE_TYPE (op1);
15293 bool unsigned0 = TYPE_UNSIGNED (inner0);
15294 bool unsigned1 = TYPE_UNSIGNED (inner1);
15296 if (TREE_CODE (op0) == INTEGER_CST)
15297 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15299 if (TREE_CODE (op1) == INTEGER_CST)
15300 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15302 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15303 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15305 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15306 ? tree_int_cst_min_precision (op0, UNSIGNED)
15307 : TYPE_PRECISION (inner0);
15309 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15310 ? tree_int_cst_min_precision (op1, UNSIGNED)
15311 : TYPE_PRECISION (inner1);
15313 return precision0 + precision1 < TYPE_PRECISION (type);
15316 return false;
15318 case BIT_AND_EXPR:
15319 case MAX_EXPR:
15320 return (tree_expr_nonnegative_warnv_p (op0,
15321 strict_overflow_p)
15322 || tree_expr_nonnegative_warnv_p (op1,
15323 strict_overflow_p));
15325 case BIT_IOR_EXPR:
15326 case BIT_XOR_EXPR:
15327 case MIN_EXPR:
15328 case RDIV_EXPR:
15329 case TRUNC_DIV_EXPR:
15330 case CEIL_DIV_EXPR:
15331 case FLOOR_DIV_EXPR:
15332 case ROUND_DIV_EXPR:
15333 return (tree_expr_nonnegative_warnv_p (op0,
15334 strict_overflow_p)
15335 && tree_expr_nonnegative_warnv_p (op1,
15336 strict_overflow_p));
15338 case TRUNC_MOD_EXPR:
15339 case CEIL_MOD_EXPR:
15340 case FLOOR_MOD_EXPR:
15341 case ROUND_MOD_EXPR:
15342 return tree_expr_nonnegative_warnv_p (op0,
15343 strict_overflow_p);
15344 default:
15345 return tree_simple_nonnegative_warnv_p (code, type);
15348 /* We don't know sign of `t', so be conservative and return false. */
15349 return false;
15352 /* Return true if T is known to be non-negative. If the return
15353 value is based on the assumption that signed overflow is undefined,
15354 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15355 *STRICT_OVERFLOW_P. */
15357 bool
15358 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15360 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15361 return true;
15363 switch (TREE_CODE (t))
15365 case INTEGER_CST:
15366 return tree_int_cst_sgn (t) >= 0;
15368 case REAL_CST:
15369 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15371 case FIXED_CST:
15372 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15374 case COND_EXPR:
15375 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15376 strict_overflow_p)
15377 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15378 strict_overflow_p));
15379 default:
15380 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15381 TREE_TYPE (t));
15383 /* We don't know sign of `t', so be conservative and return false. */
15384 return false;
15387 /* Return true if T is known to be non-negative. If the return
15388 value is based on the assumption that signed overflow is undefined,
15389 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15390 *STRICT_OVERFLOW_P. */
15392 bool
15393 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15394 tree arg0, tree arg1, bool *strict_overflow_p)
15396 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15397 switch (DECL_FUNCTION_CODE (fndecl))
15399 CASE_FLT_FN (BUILT_IN_ACOS):
15400 CASE_FLT_FN (BUILT_IN_ACOSH):
15401 CASE_FLT_FN (BUILT_IN_CABS):
15402 CASE_FLT_FN (BUILT_IN_COSH):
15403 CASE_FLT_FN (BUILT_IN_ERFC):
15404 CASE_FLT_FN (BUILT_IN_EXP):
15405 CASE_FLT_FN (BUILT_IN_EXP10):
15406 CASE_FLT_FN (BUILT_IN_EXP2):
15407 CASE_FLT_FN (BUILT_IN_FABS):
15408 CASE_FLT_FN (BUILT_IN_FDIM):
15409 CASE_FLT_FN (BUILT_IN_HYPOT):
15410 CASE_FLT_FN (BUILT_IN_POW10):
15411 CASE_INT_FN (BUILT_IN_FFS):
15412 CASE_INT_FN (BUILT_IN_PARITY):
15413 CASE_INT_FN (BUILT_IN_POPCOUNT):
15414 CASE_INT_FN (BUILT_IN_CLZ):
15415 CASE_INT_FN (BUILT_IN_CLRSB):
15416 case BUILT_IN_BSWAP32:
15417 case BUILT_IN_BSWAP64:
15418 /* Always true. */
15419 return true;
15421 CASE_FLT_FN (BUILT_IN_SQRT):
15422 /* sqrt(-0.0) is -0.0. */
15423 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15424 return true;
15425 return tree_expr_nonnegative_warnv_p (arg0,
15426 strict_overflow_p);
15428 CASE_FLT_FN (BUILT_IN_ASINH):
15429 CASE_FLT_FN (BUILT_IN_ATAN):
15430 CASE_FLT_FN (BUILT_IN_ATANH):
15431 CASE_FLT_FN (BUILT_IN_CBRT):
15432 CASE_FLT_FN (BUILT_IN_CEIL):
15433 CASE_FLT_FN (BUILT_IN_ERF):
15434 CASE_FLT_FN (BUILT_IN_EXPM1):
15435 CASE_FLT_FN (BUILT_IN_FLOOR):
15436 CASE_FLT_FN (BUILT_IN_FMOD):
15437 CASE_FLT_FN (BUILT_IN_FREXP):
15438 CASE_FLT_FN (BUILT_IN_ICEIL):
15439 CASE_FLT_FN (BUILT_IN_IFLOOR):
15440 CASE_FLT_FN (BUILT_IN_IRINT):
15441 CASE_FLT_FN (BUILT_IN_IROUND):
15442 CASE_FLT_FN (BUILT_IN_LCEIL):
15443 CASE_FLT_FN (BUILT_IN_LDEXP):
15444 CASE_FLT_FN (BUILT_IN_LFLOOR):
15445 CASE_FLT_FN (BUILT_IN_LLCEIL):
15446 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15447 CASE_FLT_FN (BUILT_IN_LLRINT):
15448 CASE_FLT_FN (BUILT_IN_LLROUND):
15449 CASE_FLT_FN (BUILT_IN_LRINT):
15450 CASE_FLT_FN (BUILT_IN_LROUND):
15451 CASE_FLT_FN (BUILT_IN_MODF):
15452 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15453 CASE_FLT_FN (BUILT_IN_RINT):
15454 CASE_FLT_FN (BUILT_IN_ROUND):
15455 CASE_FLT_FN (BUILT_IN_SCALB):
15456 CASE_FLT_FN (BUILT_IN_SCALBLN):
15457 CASE_FLT_FN (BUILT_IN_SCALBN):
15458 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15459 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15460 CASE_FLT_FN (BUILT_IN_SINH):
15461 CASE_FLT_FN (BUILT_IN_TANH):
15462 CASE_FLT_FN (BUILT_IN_TRUNC):
15463 /* True if the 1st argument is nonnegative. */
15464 return tree_expr_nonnegative_warnv_p (arg0,
15465 strict_overflow_p);
15467 CASE_FLT_FN (BUILT_IN_FMAX):
15468 /* True if the 1st OR 2nd arguments are nonnegative. */
15469 return (tree_expr_nonnegative_warnv_p (arg0,
15470 strict_overflow_p)
15471 || (tree_expr_nonnegative_warnv_p (arg1,
15472 strict_overflow_p)));
15474 CASE_FLT_FN (BUILT_IN_FMIN):
15475 /* True if the 1st AND 2nd arguments are nonnegative. */
15476 return (tree_expr_nonnegative_warnv_p (arg0,
15477 strict_overflow_p)
15478 && (tree_expr_nonnegative_warnv_p (arg1,
15479 strict_overflow_p)));
15481 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15482 /* True if the 2nd argument is nonnegative. */
15483 return tree_expr_nonnegative_warnv_p (arg1,
15484 strict_overflow_p);
15486 CASE_FLT_FN (BUILT_IN_POWI):
15487 /* True if the 1st argument is nonnegative or the second
15488 argument is an even integer. */
15489 if (TREE_CODE (arg1) == INTEGER_CST
15490 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15491 return true;
15492 return tree_expr_nonnegative_warnv_p (arg0,
15493 strict_overflow_p);
15495 CASE_FLT_FN (BUILT_IN_POW):
15496 /* True if the 1st argument is nonnegative or the second
15497 argument is an even integer valued real. */
15498 if (TREE_CODE (arg1) == REAL_CST)
15500 REAL_VALUE_TYPE c;
15501 HOST_WIDE_INT n;
15503 c = TREE_REAL_CST (arg1);
15504 n = real_to_integer (&c);
15505 if ((n & 1) == 0)
15507 REAL_VALUE_TYPE cint;
15508 real_from_integer (&cint, VOIDmode, n, SIGNED);
15509 if (real_identical (&c, &cint))
15510 return true;
15513 return tree_expr_nonnegative_warnv_p (arg0,
15514 strict_overflow_p);
15516 default:
15517 break;
15519 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15520 type);
15523 /* Return true if T is known to be non-negative. If the return
15524 value is based on the assumption that signed overflow is undefined,
15525 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15526 *STRICT_OVERFLOW_P. */
15528 static bool
15529 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15531 enum tree_code code = TREE_CODE (t);
15532 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15533 return true;
15535 switch (code)
15537 case TARGET_EXPR:
15539 tree temp = TARGET_EXPR_SLOT (t);
15540 t = TARGET_EXPR_INITIAL (t);
15542 /* If the initializer is non-void, then it's a normal expression
15543 that will be assigned to the slot. */
15544 if (!VOID_TYPE_P (t))
15545 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15547 /* Otherwise, the initializer sets the slot in some way. One common
15548 way is an assignment statement at the end of the initializer. */
15549 while (1)
15551 if (TREE_CODE (t) == BIND_EXPR)
15552 t = expr_last (BIND_EXPR_BODY (t));
15553 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15554 || TREE_CODE (t) == TRY_CATCH_EXPR)
15555 t = expr_last (TREE_OPERAND (t, 0));
15556 else if (TREE_CODE (t) == STATEMENT_LIST)
15557 t = expr_last (t);
15558 else
15559 break;
15561 if (TREE_CODE (t) == MODIFY_EXPR
15562 && TREE_OPERAND (t, 0) == temp)
15563 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15564 strict_overflow_p);
15566 return false;
15569 case CALL_EXPR:
15571 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15572 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15574 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15575 get_callee_fndecl (t),
15576 arg0,
15577 arg1,
15578 strict_overflow_p);
15580 case COMPOUND_EXPR:
15581 case MODIFY_EXPR:
15582 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15583 strict_overflow_p);
15584 case BIND_EXPR:
15585 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15586 strict_overflow_p);
15587 case SAVE_EXPR:
15588 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15589 strict_overflow_p);
15591 default:
15592 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15593 TREE_TYPE (t));
15596 /* We don't know sign of `t', so be conservative and return false. */
15597 return false;
15600 /* Return true if T is known to be non-negative. If the return
15601 value is based on the assumption that signed overflow is undefined,
15602 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15603 *STRICT_OVERFLOW_P. */
15605 bool
15606 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15608 enum tree_code code;
15609 if (t == error_mark_node)
15610 return false;
15612 code = TREE_CODE (t);
15613 switch (TREE_CODE_CLASS (code))
15615 case tcc_binary:
15616 case tcc_comparison:
15617 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15618 TREE_TYPE (t),
15619 TREE_OPERAND (t, 0),
15620 TREE_OPERAND (t, 1),
15621 strict_overflow_p);
15623 case tcc_unary:
15624 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15625 TREE_TYPE (t),
15626 TREE_OPERAND (t, 0),
15627 strict_overflow_p);
15629 case tcc_constant:
15630 case tcc_declaration:
15631 case tcc_reference:
15632 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15634 default:
15635 break;
15638 switch (code)
15640 case TRUTH_AND_EXPR:
15641 case TRUTH_OR_EXPR:
15642 case TRUTH_XOR_EXPR:
15643 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15644 TREE_TYPE (t),
15645 TREE_OPERAND (t, 0),
15646 TREE_OPERAND (t, 1),
15647 strict_overflow_p);
15648 case TRUTH_NOT_EXPR:
15649 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15650 TREE_TYPE (t),
15651 TREE_OPERAND (t, 0),
15652 strict_overflow_p);
15654 case COND_EXPR:
15655 case CONSTRUCTOR:
15656 case OBJ_TYPE_REF:
15657 case ASSERT_EXPR:
15658 case ADDR_EXPR:
15659 case WITH_SIZE_EXPR:
15660 case SSA_NAME:
15661 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15663 default:
15664 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15668 /* Return true if `t' is known to be non-negative. Handle warnings
15669 about undefined signed overflow. */
15671 bool
15672 tree_expr_nonnegative_p (tree t)
15674 bool ret, strict_overflow_p;
15676 strict_overflow_p = false;
15677 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15678 if (strict_overflow_p)
15679 fold_overflow_warning (("assuming signed overflow does not occur when "
15680 "determining that expression is always "
15681 "non-negative"),
15682 WARN_STRICT_OVERFLOW_MISC);
15683 return ret;
15687 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15688 For floating point we further ensure that T is not denormal.
15689 Similar logic is present in nonzero_address in rtlanal.h.
15691 If the return value is based on the assumption that signed overflow
15692 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15693 change *STRICT_OVERFLOW_P. */
15695 bool
15696 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15697 bool *strict_overflow_p)
15699 switch (code)
15701 case ABS_EXPR:
15702 return tree_expr_nonzero_warnv_p (op0,
15703 strict_overflow_p);
15705 case NOP_EXPR:
15707 tree inner_type = TREE_TYPE (op0);
15708 tree outer_type = type;
15710 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15711 && tree_expr_nonzero_warnv_p (op0,
15712 strict_overflow_p));
15714 break;
15716 case NON_LVALUE_EXPR:
15717 return tree_expr_nonzero_warnv_p (op0,
15718 strict_overflow_p);
15720 default:
15721 break;
15724 return false;
15727 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15728 For floating point we further ensure that T is not denormal.
15729 Similar logic is present in nonzero_address in rtlanal.h.
15731 If the return value is based on the assumption that signed overflow
15732 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15733 change *STRICT_OVERFLOW_P. */
15735 bool
15736 tree_binary_nonzero_warnv_p (enum tree_code code,
15737 tree type,
15738 tree op0,
15739 tree op1, bool *strict_overflow_p)
15741 bool sub_strict_overflow_p;
15742 switch (code)
15744 case POINTER_PLUS_EXPR:
15745 case PLUS_EXPR:
15746 if (TYPE_OVERFLOW_UNDEFINED (type))
15748 /* With the presence of negative values it is hard
15749 to say something. */
15750 sub_strict_overflow_p = false;
15751 if (!tree_expr_nonnegative_warnv_p (op0,
15752 &sub_strict_overflow_p)
15753 || !tree_expr_nonnegative_warnv_p (op1,
15754 &sub_strict_overflow_p))
15755 return false;
15756 /* One of operands must be positive and the other non-negative. */
15757 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15758 overflows, on a twos-complement machine the sum of two
15759 nonnegative numbers can never be zero. */
15760 return (tree_expr_nonzero_warnv_p (op0,
15761 strict_overflow_p)
15762 || tree_expr_nonzero_warnv_p (op1,
15763 strict_overflow_p));
15765 break;
15767 case MULT_EXPR:
15768 if (TYPE_OVERFLOW_UNDEFINED (type))
15770 if (tree_expr_nonzero_warnv_p (op0,
15771 strict_overflow_p)
15772 && tree_expr_nonzero_warnv_p (op1,
15773 strict_overflow_p))
15775 *strict_overflow_p = true;
15776 return true;
15779 break;
15781 case MIN_EXPR:
15782 sub_strict_overflow_p = false;
15783 if (tree_expr_nonzero_warnv_p (op0,
15784 &sub_strict_overflow_p)
15785 && tree_expr_nonzero_warnv_p (op1,
15786 &sub_strict_overflow_p))
15788 if (sub_strict_overflow_p)
15789 *strict_overflow_p = true;
15791 break;
15793 case MAX_EXPR:
15794 sub_strict_overflow_p = false;
15795 if (tree_expr_nonzero_warnv_p (op0,
15796 &sub_strict_overflow_p))
15798 if (sub_strict_overflow_p)
15799 *strict_overflow_p = true;
15801 /* When both operands are nonzero, then MAX must be too. */
15802 if (tree_expr_nonzero_warnv_p (op1,
15803 strict_overflow_p))
15804 return true;
15806 /* MAX where operand 0 is positive is positive. */
15807 return tree_expr_nonnegative_warnv_p (op0,
15808 strict_overflow_p);
15810 /* MAX where operand 1 is positive is positive. */
15811 else if (tree_expr_nonzero_warnv_p (op1,
15812 &sub_strict_overflow_p)
15813 && tree_expr_nonnegative_warnv_p (op1,
15814 &sub_strict_overflow_p))
15816 if (sub_strict_overflow_p)
15817 *strict_overflow_p = true;
15818 return true;
15820 break;
15822 case BIT_IOR_EXPR:
15823 return (tree_expr_nonzero_warnv_p (op1,
15824 strict_overflow_p)
15825 || tree_expr_nonzero_warnv_p (op0,
15826 strict_overflow_p));
15828 default:
15829 break;
15832 return false;
15835 /* Return true when T is an address and is known to be nonzero.
15836 For floating point we further ensure that T is not denormal.
15837 Similar logic is present in nonzero_address in rtlanal.h.
15839 If the return value is based on the assumption that signed overflow
15840 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15841 change *STRICT_OVERFLOW_P. */
15843 bool
15844 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15846 bool sub_strict_overflow_p;
15847 switch (TREE_CODE (t))
15849 case INTEGER_CST:
15850 return !integer_zerop (t);
15852 case ADDR_EXPR:
15854 tree base = TREE_OPERAND (t, 0);
15856 if (!DECL_P (base))
15857 base = get_base_address (base);
15859 if (!base)
15860 return false;
15862 /* For objects in symbol table check if we know they are non-zero.
15863 Don't do anything for variables and functions before symtab is built;
15864 it is quite possible that they will be declared weak later. */
15865 if (DECL_P (base) && decl_in_symtab_p (base))
15867 struct symtab_node *symbol;
15869 symbol = symtab_node::get_create (base);
15870 if (symbol)
15871 return symbol->nonzero_address ();
15872 else
15873 return false;
15876 /* Function local objects are never NULL. */
15877 if (DECL_P (base)
15878 && (DECL_CONTEXT (base)
15879 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15880 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15881 return true;
15883 /* Constants are never weak. */
15884 if (CONSTANT_CLASS_P (base))
15885 return true;
15887 return false;
15890 case COND_EXPR:
15891 sub_strict_overflow_p = false;
15892 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15893 &sub_strict_overflow_p)
15894 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15895 &sub_strict_overflow_p))
15897 if (sub_strict_overflow_p)
15898 *strict_overflow_p = true;
15899 return true;
15901 break;
15903 default:
15904 break;
15906 return false;
15909 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15910 attempt to fold the expression to a constant without modifying TYPE,
15911 OP0 or OP1.
15913 If the expression could be simplified to a constant, then return
15914 the constant. If the expression would not be simplified to a
15915 constant, then return NULL_TREE. */
15917 tree
15918 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15920 tree tem = fold_binary (code, type, op0, op1);
15921 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15924 /* Given the components of a unary expression CODE, TYPE and OP0,
15925 attempt to fold the expression to a constant without modifying
15926 TYPE or OP0.
15928 If the expression could be simplified to a constant, then return
15929 the constant. If the expression would not be simplified to a
15930 constant, then return NULL_TREE. */
15932 tree
15933 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15935 tree tem = fold_unary (code, type, op0);
15936 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15939 /* If EXP represents referencing an element in a constant string
15940 (either via pointer arithmetic or array indexing), return the
15941 tree representing the value accessed, otherwise return NULL. */
15943 tree
15944 fold_read_from_constant_string (tree exp)
15946 if ((TREE_CODE (exp) == INDIRECT_REF
15947 || TREE_CODE (exp) == ARRAY_REF)
15948 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15950 tree exp1 = TREE_OPERAND (exp, 0);
15951 tree index;
15952 tree string;
15953 location_t loc = EXPR_LOCATION (exp);
15955 if (TREE_CODE (exp) == INDIRECT_REF)
15956 string = string_constant (exp1, &index);
15957 else
15959 tree low_bound = array_ref_low_bound (exp);
15960 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15962 /* Optimize the special-case of a zero lower bound.
15964 We convert the low_bound to sizetype to avoid some problems
15965 with constant folding. (E.g. suppose the lower bound is 1,
15966 and its mode is QI. Without the conversion,l (ARRAY
15967 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15968 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15969 if (! integer_zerop (low_bound))
15970 index = size_diffop_loc (loc, index,
15971 fold_convert_loc (loc, sizetype, low_bound));
15973 string = exp1;
15976 if (string
15977 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15978 && TREE_CODE (string) == STRING_CST
15979 && TREE_CODE (index) == INTEGER_CST
15980 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15981 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15982 == MODE_INT)
15983 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15984 return build_int_cst_type (TREE_TYPE (exp),
15985 (TREE_STRING_POINTER (string)
15986 [TREE_INT_CST_LOW (index)]));
15988 return NULL;
15991 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15992 an integer constant, real, or fixed-point constant.
15994 TYPE is the type of the result. */
15996 static tree
15997 fold_negate_const (tree arg0, tree type)
15999 tree t = NULL_TREE;
16001 switch (TREE_CODE (arg0))
16003 case INTEGER_CST:
16005 bool overflow;
16006 wide_int val = wi::neg (arg0, &overflow);
16007 t = force_fit_type (type, val, 1,
16008 (overflow | TREE_OVERFLOW (arg0))
16009 && !TYPE_UNSIGNED (type));
16010 break;
16013 case REAL_CST:
16014 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16015 break;
16017 case FIXED_CST:
16019 FIXED_VALUE_TYPE f;
16020 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16021 &(TREE_FIXED_CST (arg0)), NULL,
16022 TYPE_SATURATING (type));
16023 t = build_fixed (type, f);
16024 /* Propagate overflow flags. */
16025 if (overflow_p | TREE_OVERFLOW (arg0))
16026 TREE_OVERFLOW (t) = 1;
16027 break;
16030 default:
16031 gcc_unreachable ();
16034 return t;
16037 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16038 an integer constant or real constant.
16040 TYPE is the type of the result. */
16042 tree
16043 fold_abs_const (tree arg0, tree type)
16045 tree t = NULL_TREE;
16047 switch (TREE_CODE (arg0))
16049 case INTEGER_CST:
16051 /* If the value is unsigned or non-negative, then the absolute value
16052 is the same as the ordinary value. */
16053 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
16054 t = arg0;
16056 /* If the value is negative, then the absolute value is
16057 its negation. */
16058 else
16060 bool overflow;
16061 wide_int val = wi::neg (arg0, &overflow);
16062 t = force_fit_type (type, val, -1,
16063 overflow | TREE_OVERFLOW (arg0));
16066 break;
16068 case REAL_CST:
16069 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16070 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16071 else
16072 t = arg0;
16073 break;
16075 default:
16076 gcc_unreachable ();
16079 return t;
16082 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16083 constant. TYPE is the type of the result. */
16085 static tree
16086 fold_not_const (const_tree arg0, tree type)
16088 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16090 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
16093 /* Given CODE, a relational operator, the target type, TYPE and two
16094 constant operands OP0 and OP1, return the result of the
16095 relational operation. If the result is not a compile time
16096 constant, then return NULL_TREE. */
16098 static tree
16099 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16101 int result, invert;
16103 /* From here on, the only cases we handle are when the result is
16104 known to be a constant. */
16106 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16108 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16109 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16111 /* Handle the cases where either operand is a NaN. */
16112 if (real_isnan (c0) || real_isnan (c1))
16114 switch (code)
16116 case EQ_EXPR:
16117 case ORDERED_EXPR:
16118 result = 0;
16119 break;
16121 case NE_EXPR:
16122 case UNORDERED_EXPR:
16123 case UNLT_EXPR:
16124 case UNLE_EXPR:
16125 case UNGT_EXPR:
16126 case UNGE_EXPR:
16127 case UNEQ_EXPR:
16128 result = 1;
16129 break;
16131 case LT_EXPR:
16132 case LE_EXPR:
16133 case GT_EXPR:
16134 case GE_EXPR:
16135 case LTGT_EXPR:
16136 if (flag_trapping_math)
16137 return NULL_TREE;
16138 result = 0;
16139 break;
16141 default:
16142 gcc_unreachable ();
16145 return constant_boolean_node (result, type);
16148 return constant_boolean_node (real_compare (code, c0, c1), type);
16151 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16153 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16154 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16155 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16158 /* Handle equality/inequality of complex constants. */
16159 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16161 tree rcond = fold_relational_const (code, type,
16162 TREE_REALPART (op0),
16163 TREE_REALPART (op1));
16164 tree icond = fold_relational_const (code, type,
16165 TREE_IMAGPART (op0),
16166 TREE_IMAGPART (op1));
16167 if (code == EQ_EXPR)
16168 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16169 else if (code == NE_EXPR)
16170 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16171 else
16172 return NULL_TREE;
16175 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16177 unsigned count = VECTOR_CST_NELTS (op0);
16178 tree *elts = XALLOCAVEC (tree, count);
16179 gcc_assert (VECTOR_CST_NELTS (op1) == count
16180 && TYPE_VECTOR_SUBPARTS (type) == count);
16182 for (unsigned i = 0; i < count; i++)
16184 tree elem_type = TREE_TYPE (type);
16185 tree elem0 = VECTOR_CST_ELT (op0, i);
16186 tree elem1 = VECTOR_CST_ELT (op1, i);
16188 tree tem = fold_relational_const (code, elem_type,
16189 elem0, elem1);
16191 if (tem == NULL_TREE)
16192 return NULL_TREE;
16194 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16197 return build_vector (type, elts);
16200 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16202 To compute GT, swap the arguments and do LT.
16203 To compute GE, do LT and invert the result.
16204 To compute LE, swap the arguments, do LT and invert the result.
16205 To compute NE, do EQ and invert the result.
16207 Therefore, the code below must handle only EQ and LT. */
16209 if (code == LE_EXPR || code == GT_EXPR)
16211 tree tem = op0;
16212 op0 = op1;
16213 op1 = tem;
16214 code = swap_tree_comparison (code);
16217 /* Note that it is safe to invert for real values here because we
16218 have already handled the one case that it matters. */
16220 invert = 0;
16221 if (code == NE_EXPR || code == GE_EXPR)
16223 invert = 1;
16224 code = invert_tree_comparison (code, false);
16227 /* Compute a result for LT or EQ if args permit;
16228 Otherwise return T. */
16229 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16231 if (code == EQ_EXPR)
16232 result = tree_int_cst_equal (op0, op1);
16233 else
16234 result = tree_int_cst_lt (op0, op1);
16236 else
16237 return NULL_TREE;
16239 if (invert)
16240 result ^= 1;
16241 return constant_boolean_node (result, type);
16244 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16245 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16246 itself. */
16248 tree
16249 fold_build_cleanup_point_expr (tree type, tree expr)
16251 /* If the expression does not have side effects then we don't have to wrap
16252 it with a cleanup point expression. */
16253 if (!TREE_SIDE_EFFECTS (expr))
16254 return expr;
16256 /* If the expression is a return, check to see if the expression inside the
16257 return has no side effects or the right hand side of the modify expression
16258 inside the return. If either don't have side effects set we don't need to
16259 wrap the expression in a cleanup point expression. Note we don't check the
16260 left hand side of the modify because it should always be a return decl. */
16261 if (TREE_CODE (expr) == RETURN_EXPR)
16263 tree op = TREE_OPERAND (expr, 0);
16264 if (!op || !TREE_SIDE_EFFECTS (op))
16265 return expr;
16266 op = TREE_OPERAND (op, 1);
16267 if (!TREE_SIDE_EFFECTS (op))
16268 return expr;
16271 return build1 (CLEANUP_POINT_EXPR, type, expr);
16274 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16275 of an indirection through OP0, or NULL_TREE if no simplification is
16276 possible. */
16278 tree
16279 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16281 tree sub = op0;
16282 tree subtype;
16284 STRIP_NOPS (sub);
16285 subtype = TREE_TYPE (sub);
16286 if (!POINTER_TYPE_P (subtype))
16287 return NULL_TREE;
16289 if (TREE_CODE (sub) == ADDR_EXPR)
16291 tree op = TREE_OPERAND (sub, 0);
16292 tree optype = TREE_TYPE (op);
16293 /* *&CONST_DECL -> to the value of the const decl. */
16294 if (TREE_CODE (op) == CONST_DECL)
16295 return DECL_INITIAL (op);
16296 /* *&p => p; make sure to handle *&"str"[cst] here. */
16297 if (type == optype)
16299 tree fop = fold_read_from_constant_string (op);
16300 if (fop)
16301 return fop;
16302 else
16303 return op;
16305 /* *(foo *)&fooarray => fooarray[0] */
16306 else if (TREE_CODE (optype) == ARRAY_TYPE
16307 && type == TREE_TYPE (optype)
16308 && (!in_gimple_form
16309 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16311 tree type_domain = TYPE_DOMAIN (optype);
16312 tree min_val = size_zero_node;
16313 if (type_domain && TYPE_MIN_VALUE (type_domain))
16314 min_val = TYPE_MIN_VALUE (type_domain);
16315 if (in_gimple_form
16316 && TREE_CODE (min_val) != INTEGER_CST)
16317 return NULL_TREE;
16318 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16319 NULL_TREE, NULL_TREE);
16321 /* *(foo *)&complexfoo => __real__ complexfoo */
16322 else if (TREE_CODE (optype) == COMPLEX_TYPE
16323 && type == TREE_TYPE (optype))
16324 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16325 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16326 else if (TREE_CODE (optype) == VECTOR_TYPE
16327 && type == TREE_TYPE (optype))
16329 tree part_width = TYPE_SIZE (type);
16330 tree index = bitsize_int (0);
16331 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16335 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16336 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16338 tree op00 = TREE_OPERAND (sub, 0);
16339 tree op01 = TREE_OPERAND (sub, 1);
16341 STRIP_NOPS (op00);
16342 if (TREE_CODE (op00) == ADDR_EXPR)
16344 tree op00type;
16345 op00 = TREE_OPERAND (op00, 0);
16346 op00type = TREE_TYPE (op00);
16348 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16349 if (TREE_CODE (op00type) == VECTOR_TYPE
16350 && type == TREE_TYPE (op00type))
16352 HOST_WIDE_INT offset = tree_to_shwi (op01);
16353 tree part_width = TYPE_SIZE (type);
16354 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16355 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16356 tree index = bitsize_int (indexi);
16358 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16359 return fold_build3_loc (loc,
16360 BIT_FIELD_REF, type, op00,
16361 part_width, index);
16364 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16365 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16366 && type == TREE_TYPE (op00type))
16368 tree size = TYPE_SIZE_UNIT (type);
16369 if (tree_int_cst_equal (size, op01))
16370 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16372 /* ((foo *)&fooarray)[1] => fooarray[1] */
16373 else if (TREE_CODE (op00type) == ARRAY_TYPE
16374 && type == TREE_TYPE (op00type))
16376 tree type_domain = TYPE_DOMAIN (op00type);
16377 tree min_val = size_zero_node;
16378 if (type_domain && TYPE_MIN_VALUE (type_domain))
16379 min_val = TYPE_MIN_VALUE (type_domain);
16380 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16381 TYPE_SIZE_UNIT (type));
16382 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16383 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16384 NULL_TREE, NULL_TREE);
16389 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16390 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16391 && type == TREE_TYPE (TREE_TYPE (subtype))
16392 && (!in_gimple_form
16393 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16395 tree type_domain;
16396 tree min_val = size_zero_node;
16397 sub = build_fold_indirect_ref_loc (loc, sub);
16398 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16399 if (type_domain && TYPE_MIN_VALUE (type_domain))
16400 min_val = TYPE_MIN_VALUE (type_domain);
16401 if (in_gimple_form
16402 && TREE_CODE (min_val) != INTEGER_CST)
16403 return NULL_TREE;
16404 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16405 NULL_TREE);
16408 return NULL_TREE;
16411 /* Builds an expression for an indirection through T, simplifying some
16412 cases. */
16414 tree
16415 build_fold_indirect_ref_loc (location_t loc, tree t)
16417 tree type = TREE_TYPE (TREE_TYPE (t));
16418 tree sub = fold_indirect_ref_1 (loc, type, t);
16420 if (sub)
16421 return sub;
16423 return build1_loc (loc, INDIRECT_REF, type, t);
16426 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16428 tree
16429 fold_indirect_ref_loc (location_t loc, tree t)
16431 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16433 if (sub)
16434 return sub;
16435 else
16436 return t;
16439 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16440 whose result is ignored. The type of the returned tree need not be
16441 the same as the original expression. */
16443 tree
16444 fold_ignored_result (tree t)
16446 if (!TREE_SIDE_EFFECTS (t))
16447 return integer_zero_node;
16449 for (;;)
16450 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16452 case tcc_unary:
16453 t = TREE_OPERAND (t, 0);
16454 break;
16456 case tcc_binary:
16457 case tcc_comparison:
16458 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16459 t = TREE_OPERAND (t, 0);
16460 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16461 t = TREE_OPERAND (t, 1);
16462 else
16463 return t;
16464 break;
16466 case tcc_expression:
16467 switch (TREE_CODE (t))
16469 case COMPOUND_EXPR:
16470 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16471 return t;
16472 t = TREE_OPERAND (t, 0);
16473 break;
16475 case COND_EXPR:
16476 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16477 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16478 return t;
16479 t = TREE_OPERAND (t, 0);
16480 break;
16482 default:
16483 return t;
16485 break;
16487 default:
16488 return t;
16492 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16494 tree
16495 round_up_loc (location_t loc, tree value, unsigned int divisor)
16497 tree div = NULL_TREE;
16499 if (divisor == 1)
16500 return value;
16502 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16503 have to do anything. Only do this when we are not given a const,
16504 because in that case, this check is more expensive than just
16505 doing it. */
16506 if (TREE_CODE (value) != INTEGER_CST)
16508 div = build_int_cst (TREE_TYPE (value), divisor);
16510 if (multiple_of_p (TREE_TYPE (value), value, div))
16511 return value;
16514 /* If divisor is a power of two, simplify this to bit manipulation. */
16515 if (divisor == (divisor & -divisor))
16517 if (TREE_CODE (value) == INTEGER_CST)
16519 wide_int val = value;
16520 bool overflow_p;
16522 if ((val & (divisor - 1)) == 0)
16523 return value;
16525 overflow_p = TREE_OVERFLOW (value);
16526 val &= ~(divisor - 1);
16527 val += divisor;
16528 if (val == 0)
16529 overflow_p = true;
16531 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16533 else
16535 tree t;
16537 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16538 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16539 t = build_int_cst (TREE_TYPE (value), -divisor);
16540 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16543 else
16545 if (!div)
16546 div = build_int_cst (TREE_TYPE (value), divisor);
16547 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16548 value = size_binop_loc (loc, MULT_EXPR, value, div);
16551 return value;
16554 /* Likewise, but round down. */
16556 tree
16557 round_down_loc (location_t loc, tree value, int divisor)
16559 tree div = NULL_TREE;
16561 gcc_assert (divisor > 0);
16562 if (divisor == 1)
16563 return value;
16565 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16566 have to do anything. Only do this when we are not given a const,
16567 because in that case, this check is more expensive than just
16568 doing it. */
16569 if (TREE_CODE (value) != INTEGER_CST)
16571 div = build_int_cst (TREE_TYPE (value), divisor);
16573 if (multiple_of_p (TREE_TYPE (value), value, div))
16574 return value;
16577 /* If divisor is a power of two, simplify this to bit manipulation. */
16578 if (divisor == (divisor & -divisor))
16580 tree t;
16582 t = build_int_cst (TREE_TYPE (value), -divisor);
16583 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16585 else
16587 if (!div)
16588 div = build_int_cst (TREE_TYPE (value), divisor);
16589 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16590 value = size_binop_loc (loc, MULT_EXPR, value, div);
16593 return value;
16596 /* Returns the pointer to the base of the object addressed by EXP and
16597 extracts the information about the offset of the access, storing it
16598 to PBITPOS and POFFSET. */
16600 static tree
16601 split_address_to_core_and_offset (tree exp,
16602 HOST_WIDE_INT *pbitpos, tree *poffset)
16604 tree core;
16605 machine_mode mode;
16606 int unsignedp, reversep, volatilep;
16607 HOST_WIDE_INT bitsize;
16608 location_t loc = EXPR_LOCATION (exp);
16610 if (TREE_CODE (exp) == ADDR_EXPR)
16612 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16613 poffset, &mode, &unsignedp, &reversep,
16614 &volatilep, false);
16615 core = build_fold_addr_expr_loc (loc, core);
16617 else
16619 core = exp;
16620 *pbitpos = 0;
16621 *poffset = NULL_TREE;
16624 return core;
16627 /* Returns true if addresses of E1 and E2 differ by a constant, false
16628 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16630 bool
16631 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16633 tree core1, core2;
16634 HOST_WIDE_INT bitpos1, bitpos2;
16635 tree toffset1, toffset2, tdiff, type;
16637 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16638 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16640 if (bitpos1 % BITS_PER_UNIT != 0
16641 || bitpos2 % BITS_PER_UNIT != 0
16642 || !operand_equal_p (core1, core2, 0))
16643 return false;
16645 if (toffset1 && toffset2)
16647 type = TREE_TYPE (toffset1);
16648 if (type != TREE_TYPE (toffset2))
16649 toffset2 = fold_convert (type, toffset2);
16651 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16652 if (!cst_and_fits_in_hwi (tdiff))
16653 return false;
16655 *diff = int_cst_value (tdiff);
16657 else if (toffset1 || toffset2)
16659 /* If only one of the offsets is non-constant, the difference cannot
16660 be a constant. */
16661 return false;
16663 else
16664 *diff = 0;
16666 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16667 return true;
16670 /* Simplify the floating point expression EXP when the sign of the
16671 result is not significant. Return NULL_TREE if no simplification
16672 is possible. */
16674 tree
16675 fold_strip_sign_ops (tree exp)
16677 tree arg0, arg1;
16678 location_t loc = EXPR_LOCATION (exp);
16680 switch (TREE_CODE (exp))
16682 case ABS_EXPR:
16683 case NEGATE_EXPR:
16684 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16685 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16687 case MULT_EXPR:
16688 case RDIV_EXPR:
16689 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16690 return NULL_TREE;
16691 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16692 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16693 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16694 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16695 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16696 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16697 break;
16699 case COMPOUND_EXPR:
16700 arg0 = TREE_OPERAND (exp, 0);
16701 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16702 if (arg1)
16703 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16704 break;
16706 case COND_EXPR:
16707 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16708 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16709 if (arg0 || arg1)
16710 return fold_build3_loc (loc,
16711 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16712 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16713 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16714 break;
16716 case CALL_EXPR:
16718 const enum built_in_function fcode = builtin_mathfn_code (exp);
16719 switch (fcode)
16721 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16722 /* Strip copysign function call, return the 1st argument. */
16723 arg0 = CALL_EXPR_ARG (exp, 0);
16724 arg1 = CALL_EXPR_ARG (exp, 1);
16725 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16727 default:
16728 /* Strip sign ops from the argument of "odd" math functions. */
16729 if (negate_mathfn_p (fcode))
16731 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16732 if (arg0)
16733 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16735 break;
16738 break;
16740 default:
16741 break;
16743 return NULL_TREE;