Define __void_t and SFINAE-friendly iterator_traits.
[official-gcc.git] / gcc / fold-const.c
blob756f469d0387a1ad5bca28ede7fbc229618395fc
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "predict.h"
62 #include "vec.h"
63 #include "hashtab.h"
64 #include "hash-set.h"
65 #include "machmode.h"
66 #include "hard-reg-set.h"
67 #include "input.h"
68 #include "function.h"
69 #include "basic-block.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "tree-eh.h"
73 #include "gimple-expr.h"
74 #include "is-a.h"
75 #include "gimple.h"
76 #include "gimplify.h"
77 #include "tree-dfa.h"
78 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
79 #include "builtins.h"
80 #include "hash-map.h"
81 #include "plugin-api.h"
82 #include "ipa-ref.h"
83 #include "cgraph.h"
84 #include "generic-match.h"
85 #include "optabs.h"
87 /* Nonzero if we are folding constants inside an initializer; zero
88 otherwise. */
89 int folding_initializer = 0;
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code {
95 COMPCODE_FALSE = 0,
96 COMPCODE_LT = 1,
97 COMPCODE_EQ = 2,
98 COMPCODE_LE = 3,
99 COMPCODE_GT = 4,
100 COMPCODE_LTGT = 5,
101 COMPCODE_GE = 6,
102 COMPCODE_ORD = 7,
103 COMPCODE_UNORD = 8,
104 COMPCODE_UNLT = 9,
105 COMPCODE_UNEQ = 10,
106 COMPCODE_UNLE = 11,
107 COMPCODE_UNGT = 12,
108 COMPCODE_NE = 13,
109 COMPCODE_UNGE = 14,
110 COMPCODE_TRUE = 15
113 static bool negate_mathfn_p (enum built_in_function);
114 static bool negate_expr_p (tree);
115 static tree negate_expr (tree);
116 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
117 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
118 static tree const_binop (enum tree_code, tree, tree);
119 static enum comparison_code comparison_to_compcode (enum tree_code);
120 static enum tree_code compcode_to_comparison (enum comparison_code);
121 static int operand_equal_for_comparison_p (tree, tree, tree);
122 static int twoval_comparison_p (tree, tree *, tree *, int *);
123 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
124 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
125 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
126 static tree make_bit_field_ref (location_t, tree, tree,
127 HOST_WIDE_INT, HOST_WIDE_INT, int);
128 static tree optimize_bit_field_compare (location_t, enum tree_code,
129 tree, tree, tree);
130 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
131 HOST_WIDE_INT *,
132 machine_mode *, int *, int *,
133 tree *, tree *);
134 static tree sign_bit_p (tree, const_tree);
135 static int simple_operand_p (const_tree);
136 static bool simple_operand_p_2 (tree);
137 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
138 static tree range_predecessor (tree);
139 static tree range_successor (tree);
140 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
141 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
142 static tree unextend (tree, int, int, tree);
143 static tree optimize_minmax_comparison (location_t, enum tree_code,
144 tree, tree, tree);
145 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
146 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
147 static tree fold_binary_op_with_conditional_arg (location_t,
148 enum tree_code, tree,
149 tree, tree,
150 tree, tree, int);
151 static tree fold_mathfn_compare (location_t,
152 enum built_in_function, enum tree_code,
153 tree, tree, tree);
154 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
155 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
156 static bool reorder_operands_p (const_tree, const_tree);
157 static tree fold_negate_const (tree, tree);
158 static tree fold_not_const (const_tree, tree);
159 static tree fold_relational_const (enum tree_code, tree, tree, tree);
160 static tree fold_convert_const (enum tree_code, tree, tree);
162 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
163 Otherwise, return LOC. */
165 static location_t
166 expr_location_or (tree t, location_t loc)
168 location_t tloc = EXPR_LOCATION (t);
169 return tloc == UNKNOWN_LOCATION ? loc : tloc;
172 /* Similar to protected_set_expr_location, but never modify x in place,
173 if location can and needs to be set, unshare it. */
175 static inline tree
176 protected_set_expr_location_unshare (tree x, location_t loc)
178 if (CAN_HAVE_LOCATION_P (x)
179 && EXPR_LOCATION (x) != loc
180 && !(TREE_CODE (x) == SAVE_EXPR
181 || TREE_CODE (x) == TARGET_EXPR
182 || TREE_CODE (x) == BIND_EXPR))
184 x = copy_node (x);
185 SET_EXPR_LOCATION (x, loc);
187 return x;
190 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
191 division and returns the quotient. Otherwise returns
192 NULL_TREE. */
194 tree
195 div_if_zero_remainder (const_tree arg1, const_tree arg2)
197 widest_int quo;
199 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
200 SIGNED, &quo))
201 return wide_int_to_tree (TREE_TYPE (arg1), quo);
203 return NULL_TREE;
206 /* This is nonzero if we should defer warnings about undefined
207 overflow. This facility exists because these warnings are a
208 special case. The code to estimate loop iterations does not want
209 to issue any warnings, since it works with expressions which do not
210 occur in user code. Various bits of cleanup code call fold(), but
211 only use the result if it has certain characteristics (e.g., is a
212 constant); that code only wants to issue a warning if the result is
213 used. */
215 static int fold_deferring_overflow_warnings;
217 /* If a warning about undefined overflow is deferred, this is the
218 warning. Note that this may cause us to turn two warnings into
219 one, but that is fine since it is sufficient to only give one
220 warning per expression. */
222 static const char* fold_deferred_overflow_warning;
224 /* If a warning about undefined overflow is deferred, this is the
225 level at which the warning should be emitted. */
227 static enum warn_strict_overflow_code fold_deferred_overflow_code;
229 /* Start deferring overflow warnings. We could use a stack here to
230 permit nested calls, but at present it is not necessary. */
232 void
233 fold_defer_overflow_warnings (void)
235 ++fold_deferring_overflow_warnings;
238 /* Stop deferring overflow warnings. If there is a pending warning,
239 and ISSUE is true, then issue the warning if appropriate. STMT is
240 the statement with which the warning should be associated (used for
241 location information); STMT may be NULL. CODE is the level of the
242 warning--a warn_strict_overflow_code value. This function will use
243 the smaller of CODE and the deferred code when deciding whether to
244 issue the warning. CODE may be zero to mean to always use the
245 deferred code. */
247 void
248 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
250 const char *warnmsg;
251 location_t locus;
253 gcc_assert (fold_deferring_overflow_warnings > 0);
254 --fold_deferring_overflow_warnings;
255 if (fold_deferring_overflow_warnings > 0)
257 if (fold_deferred_overflow_warning != NULL
258 && code != 0
259 && code < (int) fold_deferred_overflow_code)
260 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
261 return;
264 warnmsg = fold_deferred_overflow_warning;
265 fold_deferred_overflow_warning = NULL;
267 if (!issue || warnmsg == NULL)
268 return;
270 if (gimple_no_warning_p (stmt))
271 return;
273 /* Use the smallest code level when deciding to issue the
274 warning. */
275 if (code == 0 || code > (int) fold_deferred_overflow_code)
276 code = fold_deferred_overflow_code;
278 if (!issue_strict_overflow_warning (code))
279 return;
281 if (stmt == NULL)
282 locus = input_location;
283 else
284 locus = gimple_location (stmt);
285 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
288 /* Stop deferring overflow warnings, ignoring any deferred
289 warnings. */
291 void
292 fold_undefer_and_ignore_overflow_warnings (void)
294 fold_undefer_overflow_warnings (false, NULL, 0);
297 /* Whether we are deferring overflow warnings. */
299 bool
300 fold_deferring_overflow_warnings_p (void)
302 return fold_deferring_overflow_warnings > 0;
305 /* This is called when we fold something based on the fact that signed
306 overflow is undefined. */
308 static void
309 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
311 if (fold_deferring_overflow_warnings > 0)
313 if (fold_deferred_overflow_warning == NULL
314 || wc < fold_deferred_overflow_code)
316 fold_deferred_overflow_warning = gmsgid;
317 fold_deferred_overflow_code = wc;
320 else if (issue_strict_overflow_warning (wc))
321 warning (OPT_Wstrict_overflow, gmsgid);
324 /* Return true if the built-in mathematical function specified by CODE
325 is odd, i.e. -f(x) == f(-x). */
327 static bool
328 negate_mathfn_p (enum built_in_function code)
330 switch (code)
332 CASE_FLT_FN (BUILT_IN_ASIN):
333 CASE_FLT_FN (BUILT_IN_ASINH):
334 CASE_FLT_FN (BUILT_IN_ATAN):
335 CASE_FLT_FN (BUILT_IN_ATANH):
336 CASE_FLT_FN (BUILT_IN_CASIN):
337 CASE_FLT_FN (BUILT_IN_CASINH):
338 CASE_FLT_FN (BUILT_IN_CATAN):
339 CASE_FLT_FN (BUILT_IN_CATANH):
340 CASE_FLT_FN (BUILT_IN_CBRT):
341 CASE_FLT_FN (BUILT_IN_CPROJ):
342 CASE_FLT_FN (BUILT_IN_CSIN):
343 CASE_FLT_FN (BUILT_IN_CSINH):
344 CASE_FLT_FN (BUILT_IN_CTAN):
345 CASE_FLT_FN (BUILT_IN_CTANH):
346 CASE_FLT_FN (BUILT_IN_ERF):
347 CASE_FLT_FN (BUILT_IN_LLROUND):
348 CASE_FLT_FN (BUILT_IN_LROUND):
349 CASE_FLT_FN (BUILT_IN_ROUND):
350 CASE_FLT_FN (BUILT_IN_SIN):
351 CASE_FLT_FN (BUILT_IN_SINH):
352 CASE_FLT_FN (BUILT_IN_TAN):
353 CASE_FLT_FN (BUILT_IN_TANH):
354 CASE_FLT_FN (BUILT_IN_TRUNC):
355 return true;
357 CASE_FLT_FN (BUILT_IN_LLRINT):
358 CASE_FLT_FN (BUILT_IN_LRINT):
359 CASE_FLT_FN (BUILT_IN_NEARBYINT):
360 CASE_FLT_FN (BUILT_IN_RINT):
361 return !flag_rounding_math;
363 default:
364 break;
366 return false;
369 /* Check whether we may negate an integer constant T without causing
370 overflow. */
372 bool
373 may_negate_without_overflow_p (const_tree t)
375 tree type;
377 gcc_assert (TREE_CODE (t) == INTEGER_CST);
379 type = TREE_TYPE (t);
380 if (TYPE_UNSIGNED (type))
381 return false;
383 return !wi::only_sign_bit_p (t);
386 /* Determine whether an expression T can be cheaply negated using
387 the function negate_expr without introducing undefined overflow. */
389 static bool
390 negate_expr_p (tree t)
392 tree type;
394 if (t == 0)
395 return false;
397 type = TREE_TYPE (t);
399 STRIP_SIGN_NOPS (t);
400 switch (TREE_CODE (t))
402 case INTEGER_CST:
403 if (TYPE_OVERFLOW_WRAPS (type))
404 return true;
406 /* Check that -CST will not overflow type. */
407 return may_negate_without_overflow_p (t);
408 case BIT_NOT_EXPR:
409 return (INTEGRAL_TYPE_P (type)
410 && TYPE_OVERFLOW_WRAPS (type));
412 case FIXED_CST:
413 case NEGATE_EXPR:
414 return true;
416 case REAL_CST:
417 /* We want to canonicalize to positive real constants. Pretend
418 that only negative ones can be easily negated. */
419 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
421 case COMPLEX_CST:
422 return negate_expr_p (TREE_REALPART (t))
423 && negate_expr_p (TREE_IMAGPART (t));
425 case VECTOR_CST:
427 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
428 return true;
430 int count = TYPE_VECTOR_SUBPARTS (type), i;
432 for (i = 0; i < count; i++)
433 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
434 return false;
436 return true;
439 case COMPLEX_EXPR:
440 return negate_expr_p (TREE_OPERAND (t, 0))
441 && negate_expr_p (TREE_OPERAND (t, 1));
443 case CONJ_EXPR:
444 return negate_expr_p (TREE_OPERAND (t, 0));
446 case PLUS_EXPR:
447 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
448 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
449 return false;
450 /* -(A + B) -> (-B) - A. */
451 if (negate_expr_p (TREE_OPERAND (t, 1))
452 && reorder_operands_p (TREE_OPERAND (t, 0),
453 TREE_OPERAND (t, 1)))
454 return true;
455 /* -(A + B) -> (-A) - B. */
456 return negate_expr_p (TREE_OPERAND (t, 0));
458 case MINUS_EXPR:
459 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
460 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
461 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
462 && reorder_operands_p (TREE_OPERAND (t, 0),
463 TREE_OPERAND (t, 1));
465 case MULT_EXPR:
466 if (TYPE_UNSIGNED (TREE_TYPE (t)))
467 break;
469 /* Fall through. */
471 case RDIV_EXPR:
472 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
473 return negate_expr_p (TREE_OPERAND (t, 1))
474 || negate_expr_p (TREE_OPERAND (t, 0));
475 break;
477 case TRUNC_DIV_EXPR:
478 case ROUND_DIV_EXPR:
479 case EXACT_DIV_EXPR:
480 /* In general we can't negate A / B, because if A is INT_MIN and
481 B is 1, we may turn this into INT_MIN / -1 which is undefined
482 and actually traps on some architectures. But if overflow is
483 undefined, we can negate, because - (INT_MIN / 1) is an
484 overflow. */
485 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
487 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
488 break;
489 /* If overflow is undefined then we have to be careful because
490 we ask whether it's ok to associate the negate with the
491 division which is not ok for example for
492 -((a - b) / c) where (-(a - b)) / c may invoke undefined
493 overflow because of negating INT_MIN. So do not use
494 negate_expr_p here but open-code the two important cases. */
495 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
496 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
497 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
498 return true;
500 else if (negate_expr_p (TREE_OPERAND (t, 0)))
501 return true;
502 return negate_expr_p (TREE_OPERAND (t, 1));
504 case NOP_EXPR:
505 /* Negate -((double)float) as (double)(-float). */
506 if (TREE_CODE (type) == REAL_TYPE)
508 tree tem = strip_float_extensions (t);
509 if (tem != t)
510 return negate_expr_p (tem);
512 break;
514 case CALL_EXPR:
515 /* Negate -f(x) as f(-x). */
516 if (negate_mathfn_p (builtin_mathfn_code (t)))
517 return negate_expr_p (CALL_EXPR_ARG (t, 0));
518 break;
520 case RSHIFT_EXPR:
521 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
522 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
524 tree op1 = TREE_OPERAND (t, 1);
525 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
526 return true;
528 break;
530 default:
531 break;
533 return false;
536 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
537 simplification is possible.
538 If negate_expr_p would return true for T, NULL_TREE will never be
539 returned. */
541 static tree
542 fold_negate_expr (location_t loc, tree t)
544 tree type = TREE_TYPE (t);
545 tree tem;
547 switch (TREE_CODE (t))
549 /* Convert - (~A) to A + 1. */
550 case BIT_NOT_EXPR:
551 if (INTEGRAL_TYPE_P (type))
552 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
553 build_one_cst (type));
554 break;
556 case INTEGER_CST:
557 tem = fold_negate_const (t, type);
558 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
559 || !TYPE_OVERFLOW_TRAPS (type))
560 return tem;
561 break;
563 case REAL_CST:
564 tem = fold_negate_const (t, type);
565 /* Two's complement FP formats, such as c4x, may overflow. */
566 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
567 return tem;
568 break;
570 case FIXED_CST:
571 tem = fold_negate_const (t, type);
572 return tem;
574 case COMPLEX_CST:
576 tree rpart = negate_expr (TREE_REALPART (t));
577 tree ipart = negate_expr (TREE_IMAGPART (t));
579 if ((TREE_CODE (rpart) == REAL_CST
580 && TREE_CODE (ipart) == REAL_CST)
581 || (TREE_CODE (rpart) == INTEGER_CST
582 && TREE_CODE (ipart) == INTEGER_CST))
583 return build_complex (type, rpart, ipart);
585 break;
587 case VECTOR_CST:
589 int count = TYPE_VECTOR_SUBPARTS (type), i;
590 tree *elts = XALLOCAVEC (tree, count);
592 for (i = 0; i < count; i++)
594 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
595 if (elts[i] == NULL_TREE)
596 return NULL_TREE;
599 return build_vector (type, elts);
602 case COMPLEX_EXPR:
603 if (negate_expr_p (t))
604 return fold_build2_loc (loc, COMPLEX_EXPR, type,
605 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
606 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
607 break;
609 case CONJ_EXPR:
610 if (negate_expr_p (t))
611 return fold_build1_loc (loc, CONJ_EXPR, type,
612 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
613 break;
615 case NEGATE_EXPR:
616 return TREE_OPERAND (t, 0);
618 case PLUS_EXPR:
619 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
620 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
622 /* -(A + B) -> (-B) - A. */
623 if (negate_expr_p (TREE_OPERAND (t, 1))
624 && reorder_operands_p (TREE_OPERAND (t, 0),
625 TREE_OPERAND (t, 1)))
627 tem = negate_expr (TREE_OPERAND (t, 1));
628 return fold_build2_loc (loc, MINUS_EXPR, type,
629 tem, TREE_OPERAND (t, 0));
632 /* -(A + B) -> (-A) - B. */
633 if (negate_expr_p (TREE_OPERAND (t, 0)))
635 tem = negate_expr (TREE_OPERAND (t, 0));
636 return fold_build2_loc (loc, MINUS_EXPR, type,
637 tem, TREE_OPERAND (t, 1));
640 break;
642 case MINUS_EXPR:
643 /* - (A - B) -> B - A */
644 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
645 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
646 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
647 return fold_build2_loc (loc, MINUS_EXPR, type,
648 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
649 break;
651 case MULT_EXPR:
652 if (TYPE_UNSIGNED (type))
653 break;
655 /* Fall through. */
657 case RDIV_EXPR:
658 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
660 tem = TREE_OPERAND (t, 1);
661 if (negate_expr_p (tem))
662 return fold_build2_loc (loc, TREE_CODE (t), type,
663 TREE_OPERAND (t, 0), negate_expr (tem));
664 tem = TREE_OPERAND (t, 0);
665 if (negate_expr_p (tem))
666 return fold_build2_loc (loc, TREE_CODE (t), type,
667 negate_expr (tem), TREE_OPERAND (t, 1));
669 break;
671 case TRUNC_DIV_EXPR:
672 case ROUND_DIV_EXPR:
673 case EXACT_DIV_EXPR:
674 /* In general we can't negate A / B, because if A is INT_MIN and
675 B is 1, we may turn this into INT_MIN / -1 which is undefined
676 and actually traps on some architectures. But if overflow is
677 undefined, we can negate, because - (INT_MIN / 1) is an
678 overflow. */
679 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
681 const char * const warnmsg = G_("assuming signed overflow does not "
682 "occur when negating a division");
683 tem = TREE_OPERAND (t, 1);
684 if (negate_expr_p (tem))
686 if (INTEGRAL_TYPE_P (type)
687 && (TREE_CODE (tem) != INTEGER_CST
688 || integer_onep (tem)))
689 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
690 return fold_build2_loc (loc, TREE_CODE (t), type,
691 TREE_OPERAND (t, 0), negate_expr (tem));
693 /* If overflow is undefined then we have to be careful because
694 we ask whether it's ok to associate the negate with the
695 division which is not ok for example for
696 -((a - b) / c) where (-(a - b)) / c may invoke undefined
697 overflow because of negating INT_MIN. So do not use
698 negate_expr_p here but open-code the two important cases. */
699 tem = TREE_OPERAND (t, 0);
700 if ((INTEGRAL_TYPE_P (type)
701 && (TREE_CODE (tem) == NEGATE_EXPR
702 || (TREE_CODE (tem) == INTEGER_CST
703 && may_negate_without_overflow_p (tem))))
704 || !INTEGRAL_TYPE_P (type))
705 return fold_build2_loc (loc, TREE_CODE (t), type,
706 negate_expr (tem), TREE_OPERAND (t, 1));
708 break;
710 case NOP_EXPR:
711 /* Convert -((double)float) into (double)(-float). */
712 if (TREE_CODE (type) == REAL_TYPE)
714 tem = strip_float_extensions (t);
715 if (tem != t && negate_expr_p (tem))
716 return fold_convert_loc (loc, type, negate_expr (tem));
718 break;
720 case CALL_EXPR:
721 /* Negate -f(x) as f(-x). */
722 if (negate_mathfn_p (builtin_mathfn_code (t))
723 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
725 tree fndecl, arg;
727 fndecl = get_callee_fndecl (t);
728 arg = negate_expr (CALL_EXPR_ARG (t, 0));
729 return build_call_expr_loc (loc, fndecl, 1, arg);
731 break;
733 case RSHIFT_EXPR:
734 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
735 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
737 tree op1 = TREE_OPERAND (t, 1);
738 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
740 tree ntype = TYPE_UNSIGNED (type)
741 ? signed_type_for (type)
742 : unsigned_type_for (type);
743 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
744 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
745 return fold_convert_loc (loc, type, temp);
748 break;
750 default:
751 break;
754 return NULL_TREE;
757 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
758 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
759 return NULL_TREE. */
761 static tree
762 negate_expr (tree t)
764 tree type, tem;
765 location_t loc;
767 if (t == NULL_TREE)
768 return NULL_TREE;
770 loc = EXPR_LOCATION (t);
771 type = TREE_TYPE (t);
772 STRIP_SIGN_NOPS (t);
774 tem = fold_negate_expr (loc, t);
775 if (!tem)
776 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
777 return fold_convert_loc (loc, type, tem);
780 /* Split a tree IN into a constant, literal and variable parts that could be
781 combined with CODE to make IN. "constant" means an expression with
782 TREE_CONSTANT but that isn't an actual constant. CODE must be a
783 commutative arithmetic operation. Store the constant part into *CONP,
784 the literal in *LITP and return the variable part. If a part isn't
785 present, set it to null. If the tree does not decompose in this way,
786 return the entire tree as the variable part and the other parts as null.
788 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
789 case, we negate an operand that was subtracted. Except if it is a
790 literal for which we use *MINUS_LITP instead.
792 If NEGATE_P is true, we are negating all of IN, again except a literal
793 for which we use *MINUS_LITP instead.
795 If IN is itself a literal or constant, return it as appropriate.
797 Note that we do not guarantee that any of the three values will be the
798 same type as IN, but they will have the same signedness and mode. */
800 static tree
801 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
802 tree *minus_litp, int negate_p)
804 tree var = 0;
806 *conp = 0;
807 *litp = 0;
808 *minus_litp = 0;
810 /* Strip any conversions that don't change the machine mode or signedness. */
811 STRIP_SIGN_NOPS (in);
813 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
814 || TREE_CODE (in) == FIXED_CST)
815 *litp = in;
816 else if (TREE_CODE (in) == code
817 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
818 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
819 /* We can associate addition and subtraction together (even
820 though the C standard doesn't say so) for integers because
821 the value is not affected. For reals, the value might be
822 affected, so we can't. */
823 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
824 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
826 tree op0 = TREE_OPERAND (in, 0);
827 tree op1 = TREE_OPERAND (in, 1);
828 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
829 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
831 /* First see if either of the operands is a literal, then a constant. */
832 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
833 || TREE_CODE (op0) == FIXED_CST)
834 *litp = op0, op0 = 0;
835 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
836 || TREE_CODE (op1) == FIXED_CST)
837 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
839 if (op0 != 0 && TREE_CONSTANT (op0))
840 *conp = op0, op0 = 0;
841 else if (op1 != 0 && TREE_CONSTANT (op1))
842 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
844 /* If we haven't dealt with either operand, this is not a case we can
845 decompose. Otherwise, VAR is either of the ones remaining, if any. */
846 if (op0 != 0 && op1 != 0)
847 var = in;
848 else if (op0 != 0)
849 var = op0;
850 else
851 var = op1, neg_var_p = neg1_p;
853 /* Now do any needed negations. */
854 if (neg_litp_p)
855 *minus_litp = *litp, *litp = 0;
856 if (neg_conp_p)
857 *conp = negate_expr (*conp);
858 if (neg_var_p)
859 var = negate_expr (var);
861 else if (TREE_CODE (in) == BIT_NOT_EXPR
862 && code == PLUS_EXPR)
864 /* -X - 1 is folded to ~X, undo that here. */
865 *minus_litp = build_one_cst (TREE_TYPE (in));
866 var = negate_expr (TREE_OPERAND (in, 0));
868 else if (TREE_CONSTANT (in))
869 *conp = in;
870 else
871 var = in;
873 if (negate_p)
875 if (*litp)
876 *minus_litp = *litp, *litp = 0;
877 else if (*minus_litp)
878 *litp = *minus_litp, *minus_litp = 0;
879 *conp = negate_expr (*conp);
880 var = negate_expr (var);
883 return var;
886 /* Re-associate trees split by the above function. T1 and T2 are
887 either expressions to associate or null. Return the new
888 expression, if any. LOC is the location of the new expression. If
889 we build an operation, do it in TYPE and with CODE. */
891 static tree
892 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
894 if (t1 == 0)
895 return t2;
896 else if (t2 == 0)
897 return t1;
899 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
900 try to fold this since we will have infinite recursion. But do
901 deal with any NEGATE_EXPRs. */
902 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
903 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
905 if (code == PLUS_EXPR)
907 if (TREE_CODE (t1) == NEGATE_EXPR)
908 return build2_loc (loc, MINUS_EXPR, type,
909 fold_convert_loc (loc, type, t2),
910 fold_convert_loc (loc, type,
911 TREE_OPERAND (t1, 0)));
912 else if (TREE_CODE (t2) == NEGATE_EXPR)
913 return build2_loc (loc, MINUS_EXPR, type,
914 fold_convert_loc (loc, type, t1),
915 fold_convert_loc (loc, type,
916 TREE_OPERAND (t2, 0)));
917 else if (integer_zerop (t2))
918 return fold_convert_loc (loc, type, t1);
920 else if (code == MINUS_EXPR)
922 if (integer_zerop (t2))
923 return fold_convert_loc (loc, type, t1);
926 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
927 fold_convert_loc (loc, type, t2));
930 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
931 fold_convert_loc (loc, type, t2));
934 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
935 for use in int_const_binop, size_binop and size_diffop. */
937 static bool
938 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
940 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
941 return false;
942 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
943 return false;
945 switch (code)
947 case LSHIFT_EXPR:
948 case RSHIFT_EXPR:
949 case LROTATE_EXPR:
950 case RROTATE_EXPR:
951 return true;
953 default:
954 break;
957 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
958 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
959 && TYPE_MODE (type1) == TYPE_MODE (type2);
963 /* Combine two integer constants ARG1 and ARG2 under operation CODE
964 to produce a new constant. Return NULL_TREE if we don't know how
965 to evaluate CODE at compile-time. */
967 static tree
968 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
969 int overflowable)
971 wide_int res;
972 tree t;
973 tree type = TREE_TYPE (arg1);
974 signop sign = TYPE_SIGN (type);
975 bool overflow = false;
977 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
978 TYPE_SIGN (TREE_TYPE (parg2)));
980 switch (code)
982 case BIT_IOR_EXPR:
983 res = wi::bit_or (arg1, arg2);
984 break;
986 case BIT_XOR_EXPR:
987 res = wi::bit_xor (arg1, arg2);
988 break;
990 case BIT_AND_EXPR:
991 res = wi::bit_and (arg1, arg2);
992 break;
994 case RSHIFT_EXPR:
995 case LSHIFT_EXPR:
996 if (wi::neg_p (arg2))
998 arg2 = -arg2;
999 if (code == RSHIFT_EXPR)
1000 code = LSHIFT_EXPR;
1001 else
1002 code = RSHIFT_EXPR;
1005 if (code == RSHIFT_EXPR)
1006 /* It's unclear from the C standard whether shifts can overflow.
1007 The following code ignores overflow; perhaps a C standard
1008 interpretation ruling is needed. */
1009 res = wi::rshift (arg1, arg2, sign);
1010 else
1011 res = wi::lshift (arg1, arg2);
1012 break;
1014 case RROTATE_EXPR:
1015 case LROTATE_EXPR:
1016 if (wi::neg_p (arg2))
1018 arg2 = -arg2;
1019 if (code == RROTATE_EXPR)
1020 code = LROTATE_EXPR;
1021 else
1022 code = RROTATE_EXPR;
1025 if (code == RROTATE_EXPR)
1026 res = wi::rrotate (arg1, arg2);
1027 else
1028 res = wi::lrotate (arg1, arg2);
1029 break;
1031 case PLUS_EXPR:
1032 res = wi::add (arg1, arg2, sign, &overflow);
1033 break;
1035 case MINUS_EXPR:
1036 res = wi::sub (arg1, arg2, sign, &overflow);
1037 break;
1039 case MULT_EXPR:
1040 res = wi::mul (arg1, arg2, sign, &overflow);
1041 break;
1043 case MULT_HIGHPART_EXPR:
1044 res = wi::mul_high (arg1, arg2, sign);
1045 break;
1047 case TRUNC_DIV_EXPR:
1048 case EXACT_DIV_EXPR:
1049 if (arg2 == 0)
1050 return NULL_TREE;
1051 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1052 break;
1054 case FLOOR_DIV_EXPR:
1055 if (arg2 == 0)
1056 return NULL_TREE;
1057 res = wi::div_floor (arg1, arg2, sign, &overflow);
1058 break;
1060 case CEIL_DIV_EXPR:
1061 if (arg2 == 0)
1062 return NULL_TREE;
1063 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1064 break;
1066 case ROUND_DIV_EXPR:
1067 if (arg2 == 0)
1068 return NULL_TREE;
1069 res = wi::div_round (arg1, arg2, sign, &overflow);
1070 break;
1072 case TRUNC_MOD_EXPR:
1073 if (arg2 == 0)
1074 return NULL_TREE;
1075 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1076 break;
1078 case FLOOR_MOD_EXPR:
1079 if (arg2 == 0)
1080 return NULL_TREE;
1081 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1082 break;
1084 case CEIL_MOD_EXPR:
1085 if (arg2 == 0)
1086 return NULL_TREE;
1087 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1088 break;
1090 case ROUND_MOD_EXPR:
1091 if (arg2 == 0)
1092 return NULL_TREE;
1093 res = wi::mod_round (arg1, arg2, sign, &overflow);
1094 break;
1096 case MIN_EXPR:
1097 res = wi::min (arg1, arg2, sign);
1098 break;
1100 case MAX_EXPR:
1101 res = wi::max (arg1, arg2, sign);
1102 break;
1104 default:
1105 return NULL_TREE;
1108 t = force_fit_type (type, res, overflowable,
1109 (((sign == SIGNED || overflowable == -1)
1110 && overflow)
1111 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1113 return t;
1116 tree
1117 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1119 return int_const_binop_1 (code, arg1, arg2, 1);
1122 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1123 constant. We assume ARG1 and ARG2 have the same data type, or at least
1124 are the same kind of constant and the same machine mode. Return zero if
1125 combining the constants is not allowed in the current operating mode. */
1127 static tree
1128 const_binop (enum tree_code code, tree arg1, tree arg2)
1130 /* Sanity check for the recursive cases. */
1131 if (!arg1 || !arg2)
1132 return NULL_TREE;
1134 STRIP_NOPS (arg1);
1135 STRIP_NOPS (arg2);
1137 if (TREE_CODE (arg1) == INTEGER_CST)
1138 return int_const_binop (code, arg1, arg2);
1140 if (TREE_CODE (arg1) == REAL_CST)
1142 machine_mode mode;
1143 REAL_VALUE_TYPE d1;
1144 REAL_VALUE_TYPE d2;
1145 REAL_VALUE_TYPE value;
1146 REAL_VALUE_TYPE result;
1147 bool inexact;
1148 tree t, type;
1150 /* The following codes are handled by real_arithmetic. */
1151 switch (code)
1153 case PLUS_EXPR:
1154 case MINUS_EXPR:
1155 case MULT_EXPR:
1156 case RDIV_EXPR:
1157 case MIN_EXPR:
1158 case MAX_EXPR:
1159 break;
1161 default:
1162 return NULL_TREE;
1165 d1 = TREE_REAL_CST (arg1);
1166 d2 = TREE_REAL_CST (arg2);
1168 type = TREE_TYPE (arg1);
1169 mode = TYPE_MODE (type);
1171 /* Don't perform operation if we honor signaling NaNs and
1172 either operand is a NaN. */
1173 if (HONOR_SNANS (mode)
1174 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1175 return NULL_TREE;
1177 /* Don't perform operation if it would raise a division
1178 by zero exception. */
1179 if (code == RDIV_EXPR
1180 && REAL_VALUES_EQUAL (d2, dconst0)
1181 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1182 return NULL_TREE;
1184 /* If either operand is a NaN, just return it. Otherwise, set up
1185 for floating-point trap; we return an overflow. */
1186 if (REAL_VALUE_ISNAN (d1))
1187 return arg1;
1188 else if (REAL_VALUE_ISNAN (d2))
1189 return arg2;
1191 inexact = real_arithmetic (&value, code, &d1, &d2);
1192 real_convert (&result, mode, &value);
1194 /* Don't constant fold this floating point operation if
1195 the result has overflowed and flag_trapping_math. */
1196 if (flag_trapping_math
1197 && MODE_HAS_INFINITIES (mode)
1198 && REAL_VALUE_ISINF (result)
1199 && !REAL_VALUE_ISINF (d1)
1200 && !REAL_VALUE_ISINF (d2))
1201 return NULL_TREE;
1203 /* Don't constant fold this floating point operation if the
1204 result may dependent upon the run-time rounding mode and
1205 flag_rounding_math is set, or if GCC's software emulation
1206 is unable to accurately represent the result. */
1207 if ((flag_rounding_math
1208 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1209 && (inexact || !real_identical (&result, &value)))
1210 return NULL_TREE;
1212 t = build_real (type, result);
1214 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1215 return t;
1218 if (TREE_CODE (arg1) == FIXED_CST)
1220 FIXED_VALUE_TYPE f1;
1221 FIXED_VALUE_TYPE f2;
1222 FIXED_VALUE_TYPE result;
1223 tree t, type;
1224 int sat_p;
1225 bool overflow_p;
1227 /* The following codes are handled by fixed_arithmetic. */
1228 switch (code)
1230 case PLUS_EXPR:
1231 case MINUS_EXPR:
1232 case MULT_EXPR:
1233 case TRUNC_DIV_EXPR:
1234 f2 = TREE_FIXED_CST (arg2);
1235 break;
1237 case LSHIFT_EXPR:
1238 case RSHIFT_EXPR:
1240 wide_int w2 = arg2;
1241 f2.data.high = w2.elt (1);
1242 f2.data.low = w2.elt (0);
1243 f2.mode = SImode;
1245 break;
1247 default:
1248 return NULL_TREE;
1251 f1 = TREE_FIXED_CST (arg1);
1252 type = TREE_TYPE (arg1);
1253 sat_p = TYPE_SATURATING (type);
1254 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1255 t = build_fixed (type, result);
1256 /* Propagate overflow flags. */
1257 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1258 TREE_OVERFLOW (t) = 1;
1259 return t;
1262 if (TREE_CODE (arg1) == COMPLEX_CST)
1264 tree type = TREE_TYPE (arg1);
1265 tree r1 = TREE_REALPART (arg1);
1266 tree i1 = TREE_IMAGPART (arg1);
1267 tree r2 = TREE_REALPART (arg2);
1268 tree i2 = TREE_IMAGPART (arg2);
1269 tree real, imag;
1271 switch (code)
1273 case PLUS_EXPR:
1274 case MINUS_EXPR:
1275 real = const_binop (code, r1, r2);
1276 imag = const_binop (code, i1, i2);
1277 break;
1279 case MULT_EXPR:
1280 if (COMPLEX_FLOAT_TYPE_P (type))
1281 return do_mpc_arg2 (arg1, arg2, type,
1282 /* do_nonfinite= */ folding_initializer,
1283 mpc_mul);
1285 real = const_binop (MINUS_EXPR,
1286 const_binop (MULT_EXPR, r1, r2),
1287 const_binop (MULT_EXPR, i1, i2));
1288 imag = const_binop (PLUS_EXPR,
1289 const_binop (MULT_EXPR, r1, i2),
1290 const_binop (MULT_EXPR, i1, r2));
1291 break;
1293 case RDIV_EXPR:
1294 if (COMPLEX_FLOAT_TYPE_P (type))
1295 return do_mpc_arg2 (arg1, arg2, type,
1296 /* do_nonfinite= */ folding_initializer,
1297 mpc_div);
1298 /* Fallthru ... */
1299 case TRUNC_DIV_EXPR:
1300 case CEIL_DIV_EXPR:
1301 case FLOOR_DIV_EXPR:
1302 case ROUND_DIV_EXPR:
1303 if (flag_complex_method == 0)
1305 /* Keep this algorithm in sync with
1306 tree-complex.c:expand_complex_div_straight().
1308 Expand complex division to scalars, straightforward algorithm.
1309 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1310 t = br*br + bi*bi
1312 tree magsquared
1313 = const_binop (PLUS_EXPR,
1314 const_binop (MULT_EXPR, r2, r2),
1315 const_binop (MULT_EXPR, i2, i2));
1316 tree t1
1317 = const_binop (PLUS_EXPR,
1318 const_binop (MULT_EXPR, r1, r2),
1319 const_binop (MULT_EXPR, i1, i2));
1320 tree t2
1321 = const_binop (MINUS_EXPR,
1322 const_binop (MULT_EXPR, i1, r2),
1323 const_binop (MULT_EXPR, r1, i2));
1325 real = const_binop (code, t1, magsquared);
1326 imag = const_binop (code, t2, magsquared);
1328 else
1330 /* Keep this algorithm in sync with
1331 tree-complex.c:expand_complex_div_wide().
1333 Expand complex division to scalars, modified algorithm to minimize
1334 overflow with wide input ranges. */
1335 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1336 fold_abs_const (r2, TREE_TYPE (type)),
1337 fold_abs_const (i2, TREE_TYPE (type)));
1339 if (integer_nonzerop (compare))
1341 /* In the TRUE branch, we compute
1342 ratio = br/bi;
1343 div = (br * ratio) + bi;
1344 tr = (ar * ratio) + ai;
1345 ti = (ai * ratio) - ar;
1346 tr = tr / div;
1347 ti = ti / div; */
1348 tree ratio = const_binop (code, r2, i2);
1349 tree div = const_binop (PLUS_EXPR, i2,
1350 const_binop (MULT_EXPR, r2, ratio));
1351 real = const_binop (MULT_EXPR, r1, ratio);
1352 real = const_binop (PLUS_EXPR, real, i1);
1353 real = const_binop (code, real, div);
1355 imag = const_binop (MULT_EXPR, i1, ratio);
1356 imag = const_binop (MINUS_EXPR, imag, r1);
1357 imag = const_binop (code, imag, div);
1359 else
1361 /* In the FALSE branch, we compute
1362 ratio = d/c;
1363 divisor = (d * ratio) + c;
1364 tr = (b * ratio) + a;
1365 ti = b - (a * ratio);
1366 tr = tr / div;
1367 ti = ti / div; */
1368 tree ratio = const_binop (code, i2, r2);
1369 tree div = const_binop (PLUS_EXPR, r2,
1370 const_binop (MULT_EXPR, i2, ratio));
1372 real = const_binop (MULT_EXPR, i1, ratio);
1373 real = const_binop (PLUS_EXPR, real, r1);
1374 real = const_binop (code, real, div);
1376 imag = const_binop (MULT_EXPR, r1, ratio);
1377 imag = const_binop (MINUS_EXPR, i1, imag);
1378 imag = const_binop (code, imag, div);
1381 break;
1383 default:
1384 return NULL_TREE;
1387 if (real && imag)
1388 return build_complex (type, real, imag);
1391 if (TREE_CODE (arg1) == VECTOR_CST
1392 && TREE_CODE (arg2) == VECTOR_CST)
1394 tree type = TREE_TYPE (arg1);
1395 int count = TYPE_VECTOR_SUBPARTS (type), i;
1396 tree *elts = XALLOCAVEC (tree, count);
1398 for (i = 0; i < count; i++)
1400 tree elem1 = VECTOR_CST_ELT (arg1, i);
1401 tree elem2 = VECTOR_CST_ELT (arg2, i);
1403 elts[i] = const_binop (code, elem1, elem2);
1405 /* It is possible that const_binop cannot handle the given
1406 code and return NULL_TREE */
1407 if (elts[i] == NULL_TREE)
1408 return NULL_TREE;
1411 return build_vector (type, elts);
1414 /* Shifts allow a scalar offset for a vector. */
1415 if (TREE_CODE (arg1) == VECTOR_CST
1416 && TREE_CODE (arg2) == INTEGER_CST)
1418 tree type = TREE_TYPE (arg1);
1419 int count = TYPE_VECTOR_SUBPARTS (type), i;
1420 tree *elts = XALLOCAVEC (tree, count);
1422 if (code == VEC_RSHIFT_EXPR)
1424 if (!tree_fits_uhwi_p (arg2))
1425 return NULL_TREE;
1427 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1428 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1429 unsigned HOST_WIDE_INT innerc
1430 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1431 if (shiftc >= outerc || (shiftc % innerc) != 0)
1432 return NULL_TREE;
1433 int offset = shiftc / innerc;
1434 /* The direction of VEC_RSHIFT_EXPR is endian dependent.
1435 For reductions, if !BYTES_BIG_ENDIAN then compiler picks first
1436 vector element, but last element if BYTES_BIG_ENDIAN. */
1437 if (BYTES_BIG_ENDIAN)
1438 offset = -offset;
1439 tree zero = build_zero_cst (TREE_TYPE (type));
1440 for (i = 0; i < count; i++)
1442 if (i + offset < 0 || i + offset >= count)
1443 elts[i] = zero;
1444 else
1445 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1448 else
1449 for (i = 0; i < count; i++)
1451 tree elem1 = VECTOR_CST_ELT (arg1, i);
1453 elts[i] = const_binop (code, elem1, arg2);
1455 /* It is possible that const_binop cannot handle the given
1456 code and return NULL_TREE */
1457 if (elts[i] == NULL_TREE)
1458 return NULL_TREE;
1461 return build_vector (type, elts);
1463 return NULL_TREE;
1466 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1467 indicates which particular sizetype to create. */
1469 tree
1470 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1472 return build_int_cst (sizetype_tab[(int) kind], number);
1475 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1476 is a tree code. The type of the result is taken from the operands.
1477 Both must be equivalent integer types, ala int_binop_types_match_p.
1478 If the operands are constant, so is the result. */
1480 tree
1481 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1483 tree type = TREE_TYPE (arg0);
1485 if (arg0 == error_mark_node || arg1 == error_mark_node)
1486 return error_mark_node;
1488 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1489 TREE_TYPE (arg1)));
1491 /* Handle the special case of two integer constants faster. */
1492 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1494 /* And some specific cases even faster than that. */
1495 if (code == PLUS_EXPR)
1497 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1498 return arg1;
1499 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1500 return arg0;
1502 else if (code == MINUS_EXPR)
1504 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1505 return arg0;
1507 else if (code == MULT_EXPR)
1509 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1510 return arg1;
1513 /* Handle general case of two integer constants. For sizetype
1514 constant calculations we always want to know about overflow,
1515 even in the unsigned case. */
1516 return int_const_binop_1 (code, arg0, arg1, -1);
1519 return fold_build2_loc (loc, code, type, arg0, arg1);
1522 /* Given two values, either both of sizetype or both of bitsizetype,
1523 compute the difference between the two values. Return the value
1524 in signed type corresponding to the type of the operands. */
1526 tree
1527 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1529 tree type = TREE_TYPE (arg0);
1530 tree ctype;
1532 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1533 TREE_TYPE (arg1)));
1535 /* If the type is already signed, just do the simple thing. */
1536 if (!TYPE_UNSIGNED (type))
1537 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1539 if (type == sizetype)
1540 ctype = ssizetype;
1541 else if (type == bitsizetype)
1542 ctype = sbitsizetype;
1543 else
1544 ctype = signed_type_for (type);
1546 /* If either operand is not a constant, do the conversions to the signed
1547 type and subtract. The hardware will do the right thing with any
1548 overflow in the subtraction. */
1549 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1550 return size_binop_loc (loc, MINUS_EXPR,
1551 fold_convert_loc (loc, ctype, arg0),
1552 fold_convert_loc (loc, ctype, arg1));
1554 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1555 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1556 overflow) and negate (which can't either). Special-case a result
1557 of zero while we're here. */
1558 if (tree_int_cst_equal (arg0, arg1))
1559 return build_int_cst (ctype, 0);
1560 else if (tree_int_cst_lt (arg1, arg0))
1561 return fold_convert_loc (loc, ctype,
1562 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1563 else
1564 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1565 fold_convert_loc (loc, ctype,
1566 size_binop_loc (loc,
1567 MINUS_EXPR,
1568 arg1, arg0)));
1571 /* A subroutine of fold_convert_const handling conversions of an
1572 INTEGER_CST to another integer type. */
1574 static tree
1575 fold_convert_const_int_from_int (tree type, const_tree arg1)
1577 /* Given an integer constant, make new constant with new type,
1578 appropriately sign-extended or truncated. Use widest_int
1579 so that any extension is done according ARG1's type. */
1580 return force_fit_type (type, wi::to_widest (arg1),
1581 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1582 TREE_OVERFLOW (arg1));
1585 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1586 to an integer type. */
1588 static tree
1589 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1591 bool overflow = false;
1592 tree t;
1594 /* The following code implements the floating point to integer
1595 conversion rules required by the Java Language Specification,
1596 that IEEE NaNs are mapped to zero and values that overflow
1597 the target precision saturate, i.e. values greater than
1598 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1599 are mapped to INT_MIN. These semantics are allowed by the
1600 C and C++ standards that simply state that the behavior of
1601 FP-to-integer conversion is unspecified upon overflow. */
1603 wide_int val;
1604 REAL_VALUE_TYPE r;
1605 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1607 switch (code)
1609 case FIX_TRUNC_EXPR:
1610 real_trunc (&r, VOIDmode, &x);
1611 break;
1613 default:
1614 gcc_unreachable ();
1617 /* If R is NaN, return zero and show we have an overflow. */
1618 if (REAL_VALUE_ISNAN (r))
1620 overflow = true;
1621 val = wi::zero (TYPE_PRECISION (type));
1624 /* See if R is less than the lower bound or greater than the
1625 upper bound. */
1627 if (! overflow)
1629 tree lt = TYPE_MIN_VALUE (type);
1630 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1631 if (REAL_VALUES_LESS (r, l))
1633 overflow = true;
1634 val = lt;
1638 if (! overflow)
1640 tree ut = TYPE_MAX_VALUE (type);
1641 if (ut)
1643 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1644 if (REAL_VALUES_LESS (u, r))
1646 overflow = true;
1647 val = ut;
1652 if (! overflow)
1653 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1655 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1656 return t;
1659 /* A subroutine of fold_convert_const handling conversions of a
1660 FIXED_CST to an integer type. */
1662 static tree
1663 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1665 tree t;
1666 double_int temp, temp_trunc;
1667 unsigned int mode;
1669 /* Right shift FIXED_CST to temp by fbit. */
1670 temp = TREE_FIXED_CST (arg1).data;
1671 mode = TREE_FIXED_CST (arg1).mode;
1672 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1674 temp = temp.rshift (GET_MODE_FBIT (mode),
1675 HOST_BITS_PER_DOUBLE_INT,
1676 SIGNED_FIXED_POINT_MODE_P (mode));
1678 /* Left shift temp to temp_trunc by fbit. */
1679 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1680 HOST_BITS_PER_DOUBLE_INT,
1681 SIGNED_FIXED_POINT_MODE_P (mode));
1683 else
1685 temp = double_int_zero;
1686 temp_trunc = double_int_zero;
1689 /* If FIXED_CST is negative, we need to round the value toward 0.
1690 By checking if the fractional bits are not zero to add 1 to temp. */
1691 if (SIGNED_FIXED_POINT_MODE_P (mode)
1692 && temp_trunc.is_negative ()
1693 && TREE_FIXED_CST (arg1).data != temp_trunc)
1694 temp += double_int_one;
1696 /* Given a fixed-point constant, make new constant with new type,
1697 appropriately sign-extended or truncated. */
1698 t = force_fit_type (type, temp, -1,
1699 (temp.is_negative ()
1700 && (TYPE_UNSIGNED (type)
1701 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1702 | TREE_OVERFLOW (arg1));
1704 return t;
1707 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1708 to another floating point type. */
1710 static tree
1711 fold_convert_const_real_from_real (tree type, const_tree arg1)
1713 REAL_VALUE_TYPE value;
1714 tree t;
1716 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1717 t = build_real (type, value);
1719 /* If converting an infinity or NAN to a representation that doesn't
1720 have one, set the overflow bit so that we can produce some kind of
1721 error message at the appropriate point if necessary. It's not the
1722 most user-friendly message, but it's better than nothing. */
1723 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1724 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1725 TREE_OVERFLOW (t) = 1;
1726 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1727 && !MODE_HAS_NANS (TYPE_MODE (type)))
1728 TREE_OVERFLOW (t) = 1;
1729 /* Regular overflow, conversion produced an infinity in a mode that
1730 can't represent them. */
1731 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1732 && REAL_VALUE_ISINF (value)
1733 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1734 TREE_OVERFLOW (t) = 1;
1735 else
1736 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1737 return t;
1740 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1741 to a floating point type. */
1743 static tree
1744 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1746 REAL_VALUE_TYPE value;
1747 tree t;
1749 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1750 t = build_real (type, value);
1752 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1753 return t;
1756 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1757 to another fixed-point type. */
1759 static tree
1760 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1762 FIXED_VALUE_TYPE value;
1763 tree t;
1764 bool overflow_p;
1766 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1767 TYPE_SATURATING (type));
1768 t = build_fixed (type, value);
1770 /* Propagate overflow flags. */
1771 if (overflow_p | TREE_OVERFLOW (arg1))
1772 TREE_OVERFLOW (t) = 1;
1773 return t;
1776 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1777 to a fixed-point type. */
1779 static tree
1780 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1782 FIXED_VALUE_TYPE value;
1783 tree t;
1784 bool overflow_p;
1785 double_int di;
1787 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1789 di.low = TREE_INT_CST_ELT (arg1, 0);
1790 if (TREE_INT_CST_NUNITS (arg1) == 1)
1791 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1792 else
1793 di.high = TREE_INT_CST_ELT (arg1, 1);
1795 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1796 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1797 TYPE_SATURATING (type));
1798 t = build_fixed (type, value);
1800 /* Propagate overflow flags. */
1801 if (overflow_p | TREE_OVERFLOW (arg1))
1802 TREE_OVERFLOW (t) = 1;
1803 return t;
1806 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1807 to a fixed-point type. */
1809 static tree
1810 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1812 FIXED_VALUE_TYPE value;
1813 tree t;
1814 bool overflow_p;
1816 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1817 &TREE_REAL_CST (arg1),
1818 TYPE_SATURATING (type));
1819 t = build_fixed (type, value);
1821 /* Propagate overflow flags. */
1822 if (overflow_p | TREE_OVERFLOW (arg1))
1823 TREE_OVERFLOW (t) = 1;
1824 return t;
1827 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1828 type TYPE. If no simplification can be done return NULL_TREE. */
1830 static tree
1831 fold_convert_const (enum tree_code code, tree type, tree arg1)
1833 if (TREE_TYPE (arg1) == type)
1834 return arg1;
1836 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1837 || TREE_CODE (type) == OFFSET_TYPE)
1839 if (TREE_CODE (arg1) == INTEGER_CST)
1840 return fold_convert_const_int_from_int (type, arg1);
1841 else if (TREE_CODE (arg1) == REAL_CST)
1842 return fold_convert_const_int_from_real (code, type, arg1);
1843 else if (TREE_CODE (arg1) == FIXED_CST)
1844 return fold_convert_const_int_from_fixed (type, arg1);
1846 else if (TREE_CODE (type) == REAL_TYPE)
1848 if (TREE_CODE (arg1) == INTEGER_CST)
1849 return build_real_from_int_cst (type, arg1);
1850 else if (TREE_CODE (arg1) == REAL_CST)
1851 return fold_convert_const_real_from_real (type, arg1);
1852 else if (TREE_CODE (arg1) == FIXED_CST)
1853 return fold_convert_const_real_from_fixed (type, arg1);
1855 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1857 if (TREE_CODE (arg1) == FIXED_CST)
1858 return fold_convert_const_fixed_from_fixed (type, arg1);
1859 else if (TREE_CODE (arg1) == INTEGER_CST)
1860 return fold_convert_const_fixed_from_int (type, arg1);
1861 else if (TREE_CODE (arg1) == REAL_CST)
1862 return fold_convert_const_fixed_from_real (type, arg1);
1864 return NULL_TREE;
1867 /* Construct a vector of zero elements of vector type TYPE. */
1869 static tree
1870 build_zero_vector (tree type)
1872 tree t;
1874 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1875 return build_vector_from_val (type, t);
1878 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1880 bool
1881 fold_convertible_p (const_tree type, const_tree arg)
1883 tree orig = TREE_TYPE (arg);
1885 if (type == orig)
1886 return true;
1888 if (TREE_CODE (arg) == ERROR_MARK
1889 || TREE_CODE (type) == ERROR_MARK
1890 || TREE_CODE (orig) == ERROR_MARK)
1891 return false;
1893 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1894 return true;
1896 switch (TREE_CODE (type))
1898 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1899 case POINTER_TYPE: case REFERENCE_TYPE:
1900 case OFFSET_TYPE:
1901 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1902 || TREE_CODE (orig) == OFFSET_TYPE)
1903 return true;
1904 return (TREE_CODE (orig) == VECTOR_TYPE
1905 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1907 case REAL_TYPE:
1908 case FIXED_POINT_TYPE:
1909 case COMPLEX_TYPE:
1910 case VECTOR_TYPE:
1911 case VOID_TYPE:
1912 return TREE_CODE (type) == TREE_CODE (orig);
1914 default:
1915 return false;
1919 /* Convert expression ARG to type TYPE. Used by the middle-end for
1920 simple conversions in preference to calling the front-end's convert. */
1922 tree
1923 fold_convert_loc (location_t loc, tree type, tree arg)
1925 tree orig = TREE_TYPE (arg);
1926 tree tem;
1928 if (type == orig)
1929 return arg;
1931 if (TREE_CODE (arg) == ERROR_MARK
1932 || TREE_CODE (type) == ERROR_MARK
1933 || TREE_CODE (orig) == ERROR_MARK)
1934 return error_mark_node;
1936 switch (TREE_CODE (type))
1938 case POINTER_TYPE:
1939 case REFERENCE_TYPE:
1940 /* Handle conversions between pointers to different address spaces. */
1941 if (POINTER_TYPE_P (orig)
1942 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1943 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1944 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1945 /* fall through */
1947 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1948 case OFFSET_TYPE:
1949 if (TREE_CODE (arg) == INTEGER_CST)
1951 tem = fold_convert_const (NOP_EXPR, type, arg);
1952 if (tem != NULL_TREE)
1953 return tem;
1955 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1956 || TREE_CODE (orig) == OFFSET_TYPE)
1957 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1958 if (TREE_CODE (orig) == COMPLEX_TYPE)
1959 return fold_convert_loc (loc, type,
1960 fold_build1_loc (loc, REALPART_EXPR,
1961 TREE_TYPE (orig), arg));
1962 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1963 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1964 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1966 case REAL_TYPE:
1967 if (TREE_CODE (arg) == INTEGER_CST)
1969 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1970 if (tem != NULL_TREE)
1971 return tem;
1973 else if (TREE_CODE (arg) == REAL_CST)
1975 tem = fold_convert_const (NOP_EXPR, type, arg);
1976 if (tem != NULL_TREE)
1977 return tem;
1979 else if (TREE_CODE (arg) == FIXED_CST)
1981 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1982 if (tem != NULL_TREE)
1983 return tem;
1986 switch (TREE_CODE (orig))
1988 case INTEGER_TYPE:
1989 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1990 case POINTER_TYPE: case REFERENCE_TYPE:
1991 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1993 case REAL_TYPE:
1994 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1996 case FIXED_POINT_TYPE:
1997 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1999 case COMPLEX_TYPE:
2000 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2001 return fold_convert_loc (loc, type, tem);
2003 default:
2004 gcc_unreachable ();
2007 case FIXED_POINT_TYPE:
2008 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2009 || TREE_CODE (arg) == REAL_CST)
2011 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2012 if (tem != NULL_TREE)
2013 goto fold_convert_exit;
2016 switch (TREE_CODE (orig))
2018 case FIXED_POINT_TYPE:
2019 case INTEGER_TYPE:
2020 case ENUMERAL_TYPE:
2021 case BOOLEAN_TYPE:
2022 case REAL_TYPE:
2023 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2025 case COMPLEX_TYPE:
2026 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2027 return fold_convert_loc (loc, type, tem);
2029 default:
2030 gcc_unreachable ();
2033 case COMPLEX_TYPE:
2034 switch (TREE_CODE (orig))
2036 case INTEGER_TYPE:
2037 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2038 case POINTER_TYPE: case REFERENCE_TYPE:
2039 case REAL_TYPE:
2040 case FIXED_POINT_TYPE:
2041 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2042 fold_convert_loc (loc, TREE_TYPE (type), arg),
2043 fold_convert_loc (loc, TREE_TYPE (type),
2044 integer_zero_node));
2045 case COMPLEX_TYPE:
2047 tree rpart, ipart;
2049 if (TREE_CODE (arg) == COMPLEX_EXPR)
2051 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2052 TREE_OPERAND (arg, 0));
2053 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2054 TREE_OPERAND (arg, 1));
2055 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2058 arg = save_expr (arg);
2059 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2060 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2061 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2062 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2063 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2066 default:
2067 gcc_unreachable ();
2070 case VECTOR_TYPE:
2071 if (integer_zerop (arg))
2072 return build_zero_vector (type);
2073 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2074 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2075 || TREE_CODE (orig) == VECTOR_TYPE);
2076 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2078 case VOID_TYPE:
2079 tem = fold_ignored_result (arg);
2080 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2082 default:
2083 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2084 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2085 gcc_unreachable ();
2087 fold_convert_exit:
2088 protected_set_expr_location_unshare (tem, loc);
2089 return tem;
2092 /* Return false if expr can be assumed not to be an lvalue, true
2093 otherwise. */
2095 static bool
2096 maybe_lvalue_p (const_tree x)
2098 /* We only need to wrap lvalue tree codes. */
2099 switch (TREE_CODE (x))
2101 case VAR_DECL:
2102 case PARM_DECL:
2103 case RESULT_DECL:
2104 case LABEL_DECL:
2105 case FUNCTION_DECL:
2106 case SSA_NAME:
2108 case COMPONENT_REF:
2109 case MEM_REF:
2110 case INDIRECT_REF:
2111 case ARRAY_REF:
2112 case ARRAY_RANGE_REF:
2113 case BIT_FIELD_REF:
2114 case OBJ_TYPE_REF:
2116 case REALPART_EXPR:
2117 case IMAGPART_EXPR:
2118 case PREINCREMENT_EXPR:
2119 case PREDECREMENT_EXPR:
2120 case SAVE_EXPR:
2121 case TRY_CATCH_EXPR:
2122 case WITH_CLEANUP_EXPR:
2123 case COMPOUND_EXPR:
2124 case MODIFY_EXPR:
2125 case TARGET_EXPR:
2126 case COND_EXPR:
2127 case BIND_EXPR:
2128 break;
2130 default:
2131 /* Assume the worst for front-end tree codes. */
2132 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2133 break;
2134 return false;
2137 return true;
2140 /* Return an expr equal to X but certainly not valid as an lvalue. */
2142 tree
2143 non_lvalue_loc (location_t loc, tree x)
2145 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2146 us. */
2147 if (in_gimple_form)
2148 return x;
2150 if (! maybe_lvalue_p (x))
2151 return x;
2152 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2155 /* When pedantic, return an expr equal to X but certainly not valid as a
2156 pedantic lvalue. Otherwise, return X. */
2158 static tree
2159 pedantic_non_lvalue_loc (location_t loc, tree x)
2161 return protected_set_expr_location_unshare (x, loc);
2164 /* Given a tree comparison code, return the code that is the logical inverse.
2165 It is generally not safe to do this for floating-point comparisons, except
2166 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2167 ERROR_MARK in this case. */
2169 enum tree_code
2170 invert_tree_comparison (enum tree_code code, bool honor_nans)
2172 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2173 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2174 return ERROR_MARK;
2176 switch (code)
2178 case EQ_EXPR:
2179 return NE_EXPR;
2180 case NE_EXPR:
2181 return EQ_EXPR;
2182 case GT_EXPR:
2183 return honor_nans ? UNLE_EXPR : LE_EXPR;
2184 case GE_EXPR:
2185 return honor_nans ? UNLT_EXPR : LT_EXPR;
2186 case LT_EXPR:
2187 return honor_nans ? UNGE_EXPR : GE_EXPR;
2188 case LE_EXPR:
2189 return honor_nans ? UNGT_EXPR : GT_EXPR;
2190 case LTGT_EXPR:
2191 return UNEQ_EXPR;
2192 case UNEQ_EXPR:
2193 return LTGT_EXPR;
2194 case UNGT_EXPR:
2195 return LE_EXPR;
2196 case UNGE_EXPR:
2197 return LT_EXPR;
2198 case UNLT_EXPR:
2199 return GE_EXPR;
2200 case UNLE_EXPR:
2201 return GT_EXPR;
2202 case ORDERED_EXPR:
2203 return UNORDERED_EXPR;
2204 case UNORDERED_EXPR:
2205 return ORDERED_EXPR;
2206 default:
2207 gcc_unreachable ();
2211 /* Similar, but return the comparison that results if the operands are
2212 swapped. This is safe for floating-point. */
2214 enum tree_code
2215 swap_tree_comparison (enum tree_code code)
2217 switch (code)
2219 case EQ_EXPR:
2220 case NE_EXPR:
2221 case ORDERED_EXPR:
2222 case UNORDERED_EXPR:
2223 case LTGT_EXPR:
2224 case UNEQ_EXPR:
2225 return code;
2226 case GT_EXPR:
2227 return LT_EXPR;
2228 case GE_EXPR:
2229 return LE_EXPR;
2230 case LT_EXPR:
2231 return GT_EXPR;
2232 case LE_EXPR:
2233 return GE_EXPR;
2234 case UNGT_EXPR:
2235 return UNLT_EXPR;
2236 case UNGE_EXPR:
2237 return UNLE_EXPR;
2238 case UNLT_EXPR:
2239 return UNGT_EXPR;
2240 case UNLE_EXPR:
2241 return UNGE_EXPR;
2242 default:
2243 gcc_unreachable ();
2248 /* Convert a comparison tree code from an enum tree_code representation
2249 into a compcode bit-based encoding. This function is the inverse of
2250 compcode_to_comparison. */
2252 static enum comparison_code
2253 comparison_to_compcode (enum tree_code code)
2255 switch (code)
2257 case LT_EXPR:
2258 return COMPCODE_LT;
2259 case EQ_EXPR:
2260 return COMPCODE_EQ;
2261 case LE_EXPR:
2262 return COMPCODE_LE;
2263 case GT_EXPR:
2264 return COMPCODE_GT;
2265 case NE_EXPR:
2266 return COMPCODE_NE;
2267 case GE_EXPR:
2268 return COMPCODE_GE;
2269 case ORDERED_EXPR:
2270 return COMPCODE_ORD;
2271 case UNORDERED_EXPR:
2272 return COMPCODE_UNORD;
2273 case UNLT_EXPR:
2274 return COMPCODE_UNLT;
2275 case UNEQ_EXPR:
2276 return COMPCODE_UNEQ;
2277 case UNLE_EXPR:
2278 return COMPCODE_UNLE;
2279 case UNGT_EXPR:
2280 return COMPCODE_UNGT;
2281 case LTGT_EXPR:
2282 return COMPCODE_LTGT;
2283 case UNGE_EXPR:
2284 return COMPCODE_UNGE;
2285 default:
2286 gcc_unreachable ();
2290 /* Convert a compcode bit-based encoding of a comparison operator back
2291 to GCC's enum tree_code representation. This function is the
2292 inverse of comparison_to_compcode. */
2294 static enum tree_code
2295 compcode_to_comparison (enum comparison_code code)
2297 switch (code)
2299 case COMPCODE_LT:
2300 return LT_EXPR;
2301 case COMPCODE_EQ:
2302 return EQ_EXPR;
2303 case COMPCODE_LE:
2304 return LE_EXPR;
2305 case COMPCODE_GT:
2306 return GT_EXPR;
2307 case COMPCODE_NE:
2308 return NE_EXPR;
2309 case COMPCODE_GE:
2310 return GE_EXPR;
2311 case COMPCODE_ORD:
2312 return ORDERED_EXPR;
2313 case COMPCODE_UNORD:
2314 return UNORDERED_EXPR;
2315 case COMPCODE_UNLT:
2316 return UNLT_EXPR;
2317 case COMPCODE_UNEQ:
2318 return UNEQ_EXPR;
2319 case COMPCODE_UNLE:
2320 return UNLE_EXPR;
2321 case COMPCODE_UNGT:
2322 return UNGT_EXPR;
2323 case COMPCODE_LTGT:
2324 return LTGT_EXPR;
2325 case COMPCODE_UNGE:
2326 return UNGE_EXPR;
2327 default:
2328 gcc_unreachable ();
2332 /* Return a tree for the comparison which is the combination of
2333 doing the AND or OR (depending on CODE) of the two operations LCODE
2334 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2335 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2336 if this makes the transformation invalid. */
2338 tree
2339 combine_comparisons (location_t loc,
2340 enum tree_code code, enum tree_code lcode,
2341 enum tree_code rcode, tree truth_type,
2342 tree ll_arg, tree lr_arg)
2344 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2345 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2346 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2347 int compcode;
2349 switch (code)
2351 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2352 compcode = lcompcode & rcompcode;
2353 break;
2355 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2356 compcode = lcompcode | rcompcode;
2357 break;
2359 default:
2360 return NULL_TREE;
2363 if (!honor_nans)
2365 /* Eliminate unordered comparisons, as well as LTGT and ORD
2366 which are not used unless the mode has NaNs. */
2367 compcode &= ~COMPCODE_UNORD;
2368 if (compcode == COMPCODE_LTGT)
2369 compcode = COMPCODE_NE;
2370 else if (compcode == COMPCODE_ORD)
2371 compcode = COMPCODE_TRUE;
2373 else if (flag_trapping_math)
2375 /* Check that the original operation and the optimized ones will trap
2376 under the same condition. */
2377 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2378 && (lcompcode != COMPCODE_EQ)
2379 && (lcompcode != COMPCODE_ORD);
2380 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2381 && (rcompcode != COMPCODE_EQ)
2382 && (rcompcode != COMPCODE_ORD);
2383 bool trap = (compcode & COMPCODE_UNORD) == 0
2384 && (compcode != COMPCODE_EQ)
2385 && (compcode != COMPCODE_ORD);
2387 /* In a short-circuited boolean expression the LHS might be
2388 such that the RHS, if evaluated, will never trap. For
2389 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2390 if neither x nor y is NaN. (This is a mixed blessing: for
2391 example, the expression above will never trap, hence
2392 optimizing it to x < y would be invalid). */
2393 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2394 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2395 rtrap = false;
2397 /* If the comparison was short-circuited, and only the RHS
2398 trapped, we may now generate a spurious trap. */
2399 if (rtrap && !ltrap
2400 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2401 return NULL_TREE;
2403 /* If we changed the conditions that cause a trap, we lose. */
2404 if ((ltrap || rtrap) != trap)
2405 return NULL_TREE;
2408 if (compcode == COMPCODE_TRUE)
2409 return constant_boolean_node (true, truth_type);
2410 else if (compcode == COMPCODE_FALSE)
2411 return constant_boolean_node (false, truth_type);
2412 else
2414 enum tree_code tcode;
2416 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2417 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2421 /* Return nonzero if two operands (typically of the same tree node)
2422 are necessarily equal. If either argument has side-effects this
2423 function returns zero. FLAGS modifies behavior as follows:
2425 If OEP_ONLY_CONST is set, only return nonzero for constants.
2426 This function tests whether the operands are indistinguishable;
2427 it does not test whether they are equal using C's == operation.
2428 The distinction is important for IEEE floating point, because
2429 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2430 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2432 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2433 even though it may hold multiple values during a function.
2434 This is because a GCC tree node guarantees that nothing else is
2435 executed between the evaluation of its "operands" (which may often
2436 be evaluated in arbitrary order). Hence if the operands themselves
2437 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2438 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2439 unset means assuming isochronic (or instantaneous) tree equivalence.
2440 Unless comparing arbitrary expression trees, such as from different
2441 statements, this flag can usually be left unset.
2443 If OEP_PURE_SAME is set, then pure functions with identical arguments
2444 are considered the same. It is used when the caller has other ways
2445 to ensure that global memory is unchanged in between. */
2448 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2450 /* If either is ERROR_MARK, they aren't equal. */
2451 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2452 || TREE_TYPE (arg0) == error_mark_node
2453 || TREE_TYPE (arg1) == error_mark_node)
2454 return 0;
2456 /* Similar, if either does not have a type (like a released SSA name),
2457 they aren't equal. */
2458 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2459 return 0;
2461 /* Check equality of integer constants before bailing out due to
2462 precision differences. */
2463 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2464 return tree_int_cst_equal (arg0, arg1);
2466 /* If both types don't have the same signedness, then we can't consider
2467 them equal. We must check this before the STRIP_NOPS calls
2468 because they may change the signedness of the arguments. As pointers
2469 strictly don't have a signedness, require either two pointers or
2470 two non-pointers as well. */
2471 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2472 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2473 return 0;
2475 /* We cannot consider pointers to different address space equal. */
2476 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2477 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2478 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2479 return 0;
2481 /* If both types don't have the same precision, then it is not safe
2482 to strip NOPs. */
2483 if (element_precision (TREE_TYPE (arg0))
2484 != element_precision (TREE_TYPE (arg1)))
2485 return 0;
2487 STRIP_NOPS (arg0);
2488 STRIP_NOPS (arg1);
2490 /* In case both args are comparisons but with different comparison
2491 code, try to swap the comparison operands of one arg to produce
2492 a match and compare that variant. */
2493 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2494 && COMPARISON_CLASS_P (arg0)
2495 && COMPARISON_CLASS_P (arg1))
2497 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2499 if (TREE_CODE (arg0) == swap_code)
2500 return operand_equal_p (TREE_OPERAND (arg0, 0),
2501 TREE_OPERAND (arg1, 1), flags)
2502 && operand_equal_p (TREE_OPERAND (arg0, 1),
2503 TREE_OPERAND (arg1, 0), flags);
2506 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2507 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2508 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2509 return 0;
2511 /* This is needed for conversions and for COMPONENT_REF.
2512 Might as well play it safe and always test this. */
2513 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2514 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2515 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2516 return 0;
2518 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2519 We don't care about side effects in that case because the SAVE_EXPR
2520 takes care of that for us. In all other cases, two expressions are
2521 equal if they have no side effects. If we have two identical
2522 expressions with side effects that should be treated the same due
2523 to the only side effects being identical SAVE_EXPR's, that will
2524 be detected in the recursive calls below.
2525 If we are taking an invariant address of two identical objects
2526 they are necessarily equal as well. */
2527 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2528 && (TREE_CODE (arg0) == SAVE_EXPR
2529 || (flags & OEP_CONSTANT_ADDRESS_OF)
2530 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2531 return 1;
2533 /* Next handle constant cases, those for which we can return 1 even
2534 if ONLY_CONST is set. */
2535 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2536 switch (TREE_CODE (arg0))
2538 case INTEGER_CST:
2539 return tree_int_cst_equal (arg0, arg1);
2541 case FIXED_CST:
2542 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2543 TREE_FIXED_CST (arg1));
2545 case REAL_CST:
2546 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2547 TREE_REAL_CST (arg1)))
2548 return 1;
2551 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2553 /* If we do not distinguish between signed and unsigned zero,
2554 consider them equal. */
2555 if (real_zerop (arg0) && real_zerop (arg1))
2556 return 1;
2558 return 0;
2560 case VECTOR_CST:
2562 unsigned i;
2564 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2565 return 0;
2567 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2569 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2570 VECTOR_CST_ELT (arg1, i), flags))
2571 return 0;
2573 return 1;
2576 case COMPLEX_CST:
2577 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2578 flags)
2579 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2580 flags));
2582 case STRING_CST:
2583 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2584 && ! memcmp (TREE_STRING_POINTER (arg0),
2585 TREE_STRING_POINTER (arg1),
2586 TREE_STRING_LENGTH (arg0)));
2588 case ADDR_EXPR:
2589 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2590 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2591 ? OEP_CONSTANT_ADDRESS_OF : 0);
2592 default:
2593 break;
2596 if (flags & OEP_ONLY_CONST)
2597 return 0;
2599 /* Define macros to test an operand from arg0 and arg1 for equality and a
2600 variant that allows null and views null as being different from any
2601 non-null value. In the latter case, if either is null, the both
2602 must be; otherwise, do the normal comparison. */
2603 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2604 TREE_OPERAND (arg1, N), flags)
2606 #define OP_SAME_WITH_NULL(N) \
2607 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2608 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2610 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2612 case tcc_unary:
2613 /* Two conversions are equal only if signedness and modes match. */
2614 switch (TREE_CODE (arg0))
2616 CASE_CONVERT:
2617 case FIX_TRUNC_EXPR:
2618 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2619 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2620 return 0;
2621 break;
2622 default:
2623 break;
2626 return OP_SAME (0);
2629 case tcc_comparison:
2630 case tcc_binary:
2631 if (OP_SAME (0) && OP_SAME (1))
2632 return 1;
2634 /* For commutative ops, allow the other order. */
2635 return (commutative_tree_code (TREE_CODE (arg0))
2636 && operand_equal_p (TREE_OPERAND (arg0, 0),
2637 TREE_OPERAND (arg1, 1), flags)
2638 && operand_equal_p (TREE_OPERAND (arg0, 1),
2639 TREE_OPERAND (arg1, 0), flags));
2641 case tcc_reference:
2642 /* If either of the pointer (or reference) expressions we are
2643 dereferencing contain a side effect, these cannot be equal,
2644 but their addresses can be. */
2645 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2646 && (TREE_SIDE_EFFECTS (arg0)
2647 || TREE_SIDE_EFFECTS (arg1)))
2648 return 0;
2650 switch (TREE_CODE (arg0))
2652 case INDIRECT_REF:
2653 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2654 return OP_SAME (0);
2656 case REALPART_EXPR:
2657 case IMAGPART_EXPR:
2658 return OP_SAME (0);
2660 case TARGET_MEM_REF:
2661 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2662 /* Require equal extra operands and then fall through to MEM_REF
2663 handling of the two common operands. */
2664 if (!OP_SAME_WITH_NULL (2)
2665 || !OP_SAME_WITH_NULL (3)
2666 || !OP_SAME_WITH_NULL (4))
2667 return 0;
2668 /* Fallthru. */
2669 case MEM_REF:
2670 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2671 /* Require equal access sizes, and similar pointer types.
2672 We can have incomplete types for array references of
2673 variable-sized arrays from the Fortran frontend
2674 though. Also verify the types are compatible. */
2675 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2676 || (TYPE_SIZE (TREE_TYPE (arg0))
2677 && TYPE_SIZE (TREE_TYPE (arg1))
2678 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2679 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2680 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2681 && alias_ptr_types_compatible_p
2682 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2683 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2684 && OP_SAME (0) && OP_SAME (1));
2686 case ARRAY_REF:
2687 case ARRAY_RANGE_REF:
2688 /* Operands 2 and 3 may be null.
2689 Compare the array index by value if it is constant first as we
2690 may have different types but same value here. */
2691 if (!OP_SAME (0))
2692 return 0;
2693 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2694 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2695 TREE_OPERAND (arg1, 1))
2696 || OP_SAME (1))
2697 && OP_SAME_WITH_NULL (2)
2698 && OP_SAME_WITH_NULL (3));
2700 case COMPONENT_REF:
2701 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2702 may be NULL when we're called to compare MEM_EXPRs. */
2703 if (!OP_SAME_WITH_NULL (0)
2704 || !OP_SAME (1))
2705 return 0;
2706 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2707 return OP_SAME_WITH_NULL (2);
2709 case BIT_FIELD_REF:
2710 if (!OP_SAME (0))
2711 return 0;
2712 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2713 return OP_SAME (1) && OP_SAME (2);
2715 default:
2716 return 0;
2719 case tcc_expression:
2720 switch (TREE_CODE (arg0))
2722 case ADDR_EXPR:
2723 case TRUTH_NOT_EXPR:
2724 return OP_SAME (0);
2726 case TRUTH_ANDIF_EXPR:
2727 case TRUTH_ORIF_EXPR:
2728 return OP_SAME (0) && OP_SAME (1);
2730 case FMA_EXPR:
2731 case WIDEN_MULT_PLUS_EXPR:
2732 case WIDEN_MULT_MINUS_EXPR:
2733 if (!OP_SAME (2))
2734 return 0;
2735 /* The multiplcation operands are commutative. */
2736 /* FALLTHRU */
2738 case TRUTH_AND_EXPR:
2739 case TRUTH_OR_EXPR:
2740 case TRUTH_XOR_EXPR:
2741 if (OP_SAME (0) && OP_SAME (1))
2742 return 1;
2744 /* Otherwise take into account this is a commutative operation. */
2745 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2746 TREE_OPERAND (arg1, 1), flags)
2747 && operand_equal_p (TREE_OPERAND (arg0, 1),
2748 TREE_OPERAND (arg1, 0), flags));
2750 case COND_EXPR:
2751 case VEC_COND_EXPR:
2752 case DOT_PROD_EXPR:
2753 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2755 default:
2756 return 0;
2759 case tcc_vl_exp:
2760 switch (TREE_CODE (arg0))
2762 case CALL_EXPR:
2763 /* If the CALL_EXPRs call different functions, then they
2764 clearly can not be equal. */
2765 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2766 flags))
2767 return 0;
2770 unsigned int cef = call_expr_flags (arg0);
2771 if (flags & OEP_PURE_SAME)
2772 cef &= ECF_CONST | ECF_PURE;
2773 else
2774 cef &= ECF_CONST;
2775 if (!cef)
2776 return 0;
2779 /* Now see if all the arguments are the same. */
2781 const_call_expr_arg_iterator iter0, iter1;
2782 const_tree a0, a1;
2783 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2784 a1 = first_const_call_expr_arg (arg1, &iter1);
2785 a0 && a1;
2786 a0 = next_const_call_expr_arg (&iter0),
2787 a1 = next_const_call_expr_arg (&iter1))
2788 if (! operand_equal_p (a0, a1, flags))
2789 return 0;
2791 /* If we get here and both argument lists are exhausted
2792 then the CALL_EXPRs are equal. */
2793 return ! (a0 || a1);
2795 default:
2796 return 0;
2799 case tcc_declaration:
2800 /* Consider __builtin_sqrt equal to sqrt. */
2801 return (TREE_CODE (arg0) == FUNCTION_DECL
2802 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2803 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2804 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2806 default:
2807 return 0;
2810 #undef OP_SAME
2811 #undef OP_SAME_WITH_NULL
2814 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2815 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2817 When in doubt, return 0. */
2819 static int
2820 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2822 int unsignedp1, unsignedpo;
2823 tree primarg0, primarg1, primother;
2824 unsigned int correct_width;
2826 if (operand_equal_p (arg0, arg1, 0))
2827 return 1;
2829 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2830 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2831 return 0;
2833 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2834 and see if the inner values are the same. This removes any
2835 signedness comparison, which doesn't matter here. */
2836 primarg0 = arg0, primarg1 = arg1;
2837 STRIP_NOPS (primarg0);
2838 STRIP_NOPS (primarg1);
2839 if (operand_equal_p (primarg0, primarg1, 0))
2840 return 1;
2842 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2843 actual comparison operand, ARG0.
2845 First throw away any conversions to wider types
2846 already present in the operands. */
2848 primarg1 = get_narrower (arg1, &unsignedp1);
2849 primother = get_narrower (other, &unsignedpo);
2851 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2852 if (unsignedp1 == unsignedpo
2853 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2854 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2856 tree type = TREE_TYPE (arg0);
2858 /* Make sure shorter operand is extended the right way
2859 to match the longer operand. */
2860 primarg1 = fold_convert (signed_or_unsigned_type_for
2861 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2863 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2864 return 1;
2867 return 0;
2870 /* See if ARG is an expression that is either a comparison or is performing
2871 arithmetic on comparisons. The comparisons must only be comparing
2872 two different values, which will be stored in *CVAL1 and *CVAL2; if
2873 they are nonzero it means that some operands have already been found.
2874 No variables may be used anywhere else in the expression except in the
2875 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2876 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2878 If this is true, return 1. Otherwise, return zero. */
2880 static int
2881 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2883 enum tree_code code = TREE_CODE (arg);
2884 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2886 /* We can handle some of the tcc_expression cases here. */
2887 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2888 tclass = tcc_unary;
2889 else if (tclass == tcc_expression
2890 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2891 || code == COMPOUND_EXPR))
2892 tclass = tcc_binary;
2894 else if (tclass == tcc_expression && code == SAVE_EXPR
2895 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2897 /* If we've already found a CVAL1 or CVAL2, this expression is
2898 two complex to handle. */
2899 if (*cval1 || *cval2)
2900 return 0;
2902 tclass = tcc_unary;
2903 *save_p = 1;
2906 switch (tclass)
2908 case tcc_unary:
2909 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2911 case tcc_binary:
2912 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2913 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2914 cval1, cval2, save_p));
2916 case tcc_constant:
2917 return 1;
2919 case tcc_expression:
2920 if (code == COND_EXPR)
2921 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2922 cval1, cval2, save_p)
2923 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2924 cval1, cval2, save_p)
2925 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2926 cval1, cval2, save_p));
2927 return 0;
2929 case tcc_comparison:
2930 /* First see if we can handle the first operand, then the second. For
2931 the second operand, we know *CVAL1 can't be zero. It must be that
2932 one side of the comparison is each of the values; test for the
2933 case where this isn't true by failing if the two operands
2934 are the same. */
2936 if (operand_equal_p (TREE_OPERAND (arg, 0),
2937 TREE_OPERAND (arg, 1), 0))
2938 return 0;
2940 if (*cval1 == 0)
2941 *cval1 = TREE_OPERAND (arg, 0);
2942 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2944 else if (*cval2 == 0)
2945 *cval2 = TREE_OPERAND (arg, 0);
2946 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2948 else
2949 return 0;
2951 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2953 else if (*cval2 == 0)
2954 *cval2 = TREE_OPERAND (arg, 1);
2955 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2957 else
2958 return 0;
2960 return 1;
2962 default:
2963 return 0;
2967 /* ARG is a tree that is known to contain just arithmetic operations and
2968 comparisons. Evaluate the operations in the tree substituting NEW0 for
2969 any occurrence of OLD0 as an operand of a comparison and likewise for
2970 NEW1 and OLD1. */
2972 static tree
2973 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2974 tree old1, tree new1)
2976 tree type = TREE_TYPE (arg);
2977 enum tree_code code = TREE_CODE (arg);
2978 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2980 /* We can handle some of the tcc_expression cases here. */
2981 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2982 tclass = tcc_unary;
2983 else if (tclass == tcc_expression
2984 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2985 tclass = tcc_binary;
2987 switch (tclass)
2989 case tcc_unary:
2990 return fold_build1_loc (loc, code, type,
2991 eval_subst (loc, TREE_OPERAND (arg, 0),
2992 old0, new0, old1, new1));
2994 case tcc_binary:
2995 return fold_build2_loc (loc, code, type,
2996 eval_subst (loc, TREE_OPERAND (arg, 0),
2997 old0, new0, old1, new1),
2998 eval_subst (loc, TREE_OPERAND (arg, 1),
2999 old0, new0, old1, new1));
3001 case tcc_expression:
3002 switch (code)
3004 case SAVE_EXPR:
3005 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3006 old1, new1);
3008 case COMPOUND_EXPR:
3009 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3010 old1, new1);
3012 case COND_EXPR:
3013 return fold_build3_loc (loc, code, type,
3014 eval_subst (loc, TREE_OPERAND (arg, 0),
3015 old0, new0, old1, new1),
3016 eval_subst (loc, TREE_OPERAND (arg, 1),
3017 old0, new0, old1, new1),
3018 eval_subst (loc, TREE_OPERAND (arg, 2),
3019 old0, new0, old1, new1));
3020 default:
3021 break;
3023 /* Fall through - ??? */
3025 case tcc_comparison:
3027 tree arg0 = TREE_OPERAND (arg, 0);
3028 tree arg1 = TREE_OPERAND (arg, 1);
3030 /* We need to check both for exact equality and tree equality. The
3031 former will be true if the operand has a side-effect. In that
3032 case, we know the operand occurred exactly once. */
3034 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3035 arg0 = new0;
3036 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3037 arg0 = new1;
3039 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3040 arg1 = new0;
3041 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3042 arg1 = new1;
3044 return fold_build2_loc (loc, code, type, arg0, arg1);
3047 default:
3048 return arg;
3052 /* Return a tree for the case when the result of an expression is RESULT
3053 converted to TYPE and OMITTED was previously an operand of the expression
3054 but is now not needed (e.g., we folded OMITTED * 0).
3056 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3057 the conversion of RESULT to TYPE. */
3059 tree
3060 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3062 tree t = fold_convert_loc (loc, type, result);
3064 /* If the resulting operand is an empty statement, just return the omitted
3065 statement casted to void. */
3066 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3067 return build1_loc (loc, NOP_EXPR, void_type_node,
3068 fold_ignored_result (omitted));
3070 if (TREE_SIDE_EFFECTS (omitted))
3071 return build2_loc (loc, COMPOUND_EXPR, type,
3072 fold_ignored_result (omitted), t);
3074 return non_lvalue_loc (loc, t);
3077 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3079 static tree
3080 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3081 tree omitted)
3083 tree t = fold_convert_loc (loc, type, result);
3085 /* If the resulting operand is an empty statement, just return the omitted
3086 statement casted to void. */
3087 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3088 return build1_loc (loc, NOP_EXPR, void_type_node,
3089 fold_ignored_result (omitted));
3091 if (TREE_SIDE_EFFECTS (omitted))
3092 return build2_loc (loc, COMPOUND_EXPR, type,
3093 fold_ignored_result (omitted), t);
3095 return pedantic_non_lvalue_loc (loc, t);
3098 /* Return a tree for the case when the result of an expression is RESULT
3099 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3100 of the expression but are now not needed.
3102 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3103 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3104 evaluated before OMITTED2. Otherwise, if neither has side effects,
3105 just do the conversion of RESULT to TYPE. */
3107 tree
3108 omit_two_operands_loc (location_t loc, tree type, tree result,
3109 tree omitted1, tree omitted2)
3111 tree t = fold_convert_loc (loc, type, result);
3113 if (TREE_SIDE_EFFECTS (omitted2))
3114 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3115 if (TREE_SIDE_EFFECTS (omitted1))
3116 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3118 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3122 /* Return a simplified tree node for the truth-negation of ARG. This
3123 never alters ARG itself. We assume that ARG is an operation that
3124 returns a truth value (0 or 1).
3126 FIXME: one would think we would fold the result, but it causes
3127 problems with the dominator optimizer. */
3129 static tree
3130 fold_truth_not_expr (location_t loc, tree arg)
3132 tree type = TREE_TYPE (arg);
3133 enum tree_code code = TREE_CODE (arg);
3134 location_t loc1, loc2;
3136 /* If this is a comparison, we can simply invert it, except for
3137 floating-point non-equality comparisons, in which case we just
3138 enclose a TRUTH_NOT_EXPR around what we have. */
3140 if (TREE_CODE_CLASS (code) == tcc_comparison)
3142 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3143 if (FLOAT_TYPE_P (op_type)
3144 && flag_trapping_math
3145 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3146 && code != NE_EXPR && code != EQ_EXPR)
3147 return NULL_TREE;
3149 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3150 if (code == ERROR_MARK)
3151 return NULL_TREE;
3153 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3154 TREE_OPERAND (arg, 1));
3157 switch (code)
3159 case INTEGER_CST:
3160 return constant_boolean_node (integer_zerop (arg), type);
3162 case TRUTH_AND_EXPR:
3163 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3164 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3165 return build2_loc (loc, TRUTH_OR_EXPR, type,
3166 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3167 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3169 case TRUTH_OR_EXPR:
3170 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3171 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3172 return build2_loc (loc, TRUTH_AND_EXPR, type,
3173 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3174 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3176 case TRUTH_XOR_EXPR:
3177 /* Here we can invert either operand. We invert the first operand
3178 unless the second operand is a TRUTH_NOT_EXPR in which case our
3179 result is the XOR of the first operand with the inside of the
3180 negation of the second operand. */
3182 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3183 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3184 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3185 else
3186 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3187 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3188 TREE_OPERAND (arg, 1));
3190 case TRUTH_ANDIF_EXPR:
3191 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3192 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3193 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3194 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3195 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3197 case TRUTH_ORIF_EXPR:
3198 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3199 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3200 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3201 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3202 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3204 case TRUTH_NOT_EXPR:
3205 return TREE_OPERAND (arg, 0);
3207 case COND_EXPR:
3209 tree arg1 = TREE_OPERAND (arg, 1);
3210 tree arg2 = TREE_OPERAND (arg, 2);
3212 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3213 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3215 /* A COND_EXPR may have a throw as one operand, which
3216 then has void type. Just leave void operands
3217 as they are. */
3218 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3219 VOID_TYPE_P (TREE_TYPE (arg1))
3220 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3221 VOID_TYPE_P (TREE_TYPE (arg2))
3222 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3225 case COMPOUND_EXPR:
3226 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3227 return build2_loc (loc, COMPOUND_EXPR, type,
3228 TREE_OPERAND (arg, 0),
3229 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3231 case NON_LVALUE_EXPR:
3232 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3233 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3235 CASE_CONVERT:
3236 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3237 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3239 /* ... fall through ... */
3241 case FLOAT_EXPR:
3242 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3243 return build1_loc (loc, TREE_CODE (arg), type,
3244 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3246 case BIT_AND_EXPR:
3247 if (!integer_onep (TREE_OPERAND (arg, 1)))
3248 return NULL_TREE;
3249 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3251 case SAVE_EXPR:
3252 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3254 case CLEANUP_POINT_EXPR:
3255 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3256 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3257 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3259 default:
3260 return NULL_TREE;
3264 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3265 assume that ARG is an operation that returns a truth value (0 or 1
3266 for scalars, 0 or -1 for vectors). Return the folded expression if
3267 folding is successful. Otherwise, return NULL_TREE. */
3269 static tree
3270 fold_invert_truthvalue (location_t loc, tree arg)
3272 tree type = TREE_TYPE (arg);
3273 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3274 ? BIT_NOT_EXPR
3275 : TRUTH_NOT_EXPR,
3276 type, arg);
3279 /* Return a simplified tree node for the truth-negation of ARG. This
3280 never alters ARG itself. We assume that ARG is an operation that
3281 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3283 tree
3284 invert_truthvalue_loc (location_t loc, tree arg)
3286 if (TREE_CODE (arg) == ERROR_MARK)
3287 return arg;
3289 tree type = TREE_TYPE (arg);
3290 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3291 ? BIT_NOT_EXPR
3292 : TRUTH_NOT_EXPR,
3293 type, arg);
3296 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3297 operands are another bit-wise operation with a common input. If so,
3298 distribute the bit operations to save an operation and possibly two if
3299 constants are involved. For example, convert
3300 (A | B) & (A | C) into A | (B & C)
3301 Further simplification will occur if B and C are constants.
3303 If this optimization cannot be done, 0 will be returned. */
3305 static tree
3306 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3307 tree arg0, tree arg1)
3309 tree common;
3310 tree left, right;
3312 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3313 || TREE_CODE (arg0) == code
3314 || (TREE_CODE (arg0) != BIT_AND_EXPR
3315 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3316 return 0;
3318 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3320 common = TREE_OPERAND (arg0, 0);
3321 left = TREE_OPERAND (arg0, 1);
3322 right = TREE_OPERAND (arg1, 1);
3324 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3326 common = TREE_OPERAND (arg0, 0);
3327 left = TREE_OPERAND (arg0, 1);
3328 right = TREE_OPERAND (arg1, 0);
3330 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3332 common = TREE_OPERAND (arg0, 1);
3333 left = TREE_OPERAND (arg0, 0);
3334 right = TREE_OPERAND (arg1, 1);
3336 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3338 common = TREE_OPERAND (arg0, 1);
3339 left = TREE_OPERAND (arg0, 0);
3340 right = TREE_OPERAND (arg1, 0);
3342 else
3343 return 0;
3345 common = fold_convert_loc (loc, type, common);
3346 left = fold_convert_loc (loc, type, left);
3347 right = fold_convert_loc (loc, type, right);
3348 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3349 fold_build2_loc (loc, code, type, left, right));
3352 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3353 with code CODE. This optimization is unsafe. */
3354 static tree
3355 distribute_real_division (location_t loc, enum tree_code code, tree type,
3356 tree arg0, tree arg1)
3358 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3359 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3361 /* (A / C) +- (B / C) -> (A +- B) / C. */
3362 if (mul0 == mul1
3363 && operand_equal_p (TREE_OPERAND (arg0, 1),
3364 TREE_OPERAND (arg1, 1), 0))
3365 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3366 fold_build2_loc (loc, code, type,
3367 TREE_OPERAND (arg0, 0),
3368 TREE_OPERAND (arg1, 0)),
3369 TREE_OPERAND (arg0, 1));
3371 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3372 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3373 TREE_OPERAND (arg1, 0), 0)
3374 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3375 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3377 REAL_VALUE_TYPE r0, r1;
3378 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3379 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3380 if (!mul0)
3381 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3382 if (!mul1)
3383 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3384 real_arithmetic (&r0, code, &r0, &r1);
3385 return fold_build2_loc (loc, MULT_EXPR, type,
3386 TREE_OPERAND (arg0, 0),
3387 build_real (type, r0));
3390 return NULL_TREE;
3393 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3394 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3396 static tree
3397 make_bit_field_ref (location_t loc, tree inner, tree type,
3398 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3400 tree result, bftype;
3402 if (bitpos == 0)
3404 tree size = TYPE_SIZE (TREE_TYPE (inner));
3405 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3406 || POINTER_TYPE_P (TREE_TYPE (inner)))
3407 && tree_fits_shwi_p (size)
3408 && tree_to_shwi (size) == bitsize)
3409 return fold_convert_loc (loc, type, inner);
3412 bftype = type;
3413 if (TYPE_PRECISION (bftype) != bitsize
3414 || TYPE_UNSIGNED (bftype) == !unsignedp)
3415 bftype = build_nonstandard_integer_type (bitsize, 0);
3417 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3418 size_int (bitsize), bitsize_int (bitpos));
3420 if (bftype != type)
3421 result = fold_convert_loc (loc, type, result);
3423 return result;
3426 /* Optimize a bit-field compare.
3428 There are two cases: First is a compare against a constant and the
3429 second is a comparison of two items where the fields are at the same
3430 bit position relative to the start of a chunk (byte, halfword, word)
3431 large enough to contain it. In these cases we can avoid the shift
3432 implicit in bitfield extractions.
3434 For constants, we emit a compare of the shifted constant with the
3435 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3436 compared. For two fields at the same position, we do the ANDs with the
3437 similar mask and compare the result of the ANDs.
3439 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3440 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3441 are the left and right operands of the comparison, respectively.
3443 If the optimization described above can be done, we return the resulting
3444 tree. Otherwise we return zero. */
3446 static tree
3447 optimize_bit_field_compare (location_t loc, enum tree_code code,
3448 tree compare_type, tree lhs, tree rhs)
3450 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3451 tree type = TREE_TYPE (lhs);
3452 tree unsigned_type;
3453 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3454 machine_mode lmode, rmode, nmode;
3455 int lunsignedp, runsignedp;
3456 int lvolatilep = 0, rvolatilep = 0;
3457 tree linner, rinner = NULL_TREE;
3458 tree mask;
3459 tree offset;
3461 /* Get all the information about the extractions being done. If the bit size
3462 if the same as the size of the underlying object, we aren't doing an
3463 extraction at all and so can do nothing. We also don't want to
3464 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3465 then will no longer be able to replace it. */
3466 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3467 &lunsignedp, &lvolatilep, false);
3468 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3469 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3470 return 0;
3472 if (!const_p)
3474 /* If this is not a constant, we can only do something if bit positions,
3475 sizes, and signedness are the same. */
3476 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3477 &runsignedp, &rvolatilep, false);
3479 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3480 || lunsignedp != runsignedp || offset != 0
3481 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3482 return 0;
3485 /* See if we can find a mode to refer to this field. We should be able to,
3486 but fail if we can't. */
3487 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3488 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3489 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3490 TYPE_ALIGN (TREE_TYPE (rinner))),
3491 word_mode, false);
3492 if (nmode == VOIDmode)
3493 return 0;
3495 /* Set signed and unsigned types of the precision of this mode for the
3496 shifts below. */
3497 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3499 /* Compute the bit position and size for the new reference and our offset
3500 within it. If the new reference is the same size as the original, we
3501 won't optimize anything, so return zero. */
3502 nbitsize = GET_MODE_BITSIZE (nmode);
3503 nbitpos = lbitpos & ~ (nbitsize - 1);
3504 lbitpos -= nbitpos;
3505 if (nbitsize == lbitsize)
3506 return 0;
3508 if (BYTES_BIG_ENDIAN)
3509 lbitpos = nbitsize - lbitsize - lbitpos;
3511 /* Make the mask to be used against the extracted field. */
3512 mask = build_int_cst_type (unsigned_type, -1);
3513 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3514 mask = const_binop (RSHIFT_EXPR, mask,
3515 size_int (nbitsize - lbitsize - lbitpos));
3517 if (! const_p)
3518 /* If not comparing with constant, just rework the comparison
3519 and return. */
3520 return fold_build2_loc (loc, code, compare_type,
3521 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3522 make_bit_field_ref (loc, linner,
3523 unsigned_type,
3524 nbitsize, nbitpos,
3526 mask),
3527 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3528 make_bit_field_ref (loc, rinner,
3529 unsigned_type,
3530 nbitsize, nbitpos,
3532 mask));
3534 /* Otherwise, we are handling the constant case. See if the constant is too
3535 big for the field. Warn and return a tree of for 0 (false) if so. We do
3536 this not only for its own sake, but to avoid having to test for this
3537 error case below. If we didn't, we might generate wrong code.
3539 For unsigned fields, the constant shifted right by the field length should
3540 be all zero. For signed fields, the high-order bits should agree with
3541 the sign bit. */
3543 if (lunsignedp)
3545 if (wi::lrshift (rhs, lbitsize) != 0)
3547 warning (0, "comparison is always %d due to width of bit-field",
3548 code == NE_EXPR);
3549 return constant_boolean_node (code == NE_EXPR, compare_type);
3552 else
3554 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3555 if (tem != 0 && tem != -1)
3557 warning (0, "comparison is always %d due to width of bit-field",
3558 code == NE_EXPR);
3559 return constant_boolean_node (code == NE_EXPR, compare_type);
3563 /* Single-bit compares should always be against zero. */
3564 if (lbitsize == 1 && ! integer_zerop (rhs))
3566 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3567 rhs = build_int_cst (type, 0);
3570 /* Make a new bitfield reference, shift the constant over the
3571 appropriate number of bits and mask it with the computed mask
3572 (in case this was a signed field). If we changed it, make a new one. */
3573 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3575 rhs = const_binop (BIT_AND_EXPR,
3576 const_binop (LSHIFT_EXPR,
3577 fold_convert_loc (loc, unsigned_type, rhs),
3578 size_int (lbitpos)),
3579 mask);
3581 lhs = build2_loc (loc, code, compare_type,
3582 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3583 return lhs;
3586 /* Subroutine for fold_truth_andor_1: decode a field reference.
3588 If EXP is a comparison reference, we return the innermost reference.
3590 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3591 set to the starting bit number.
3593 If the innermost field can be completely contained in a mode-sized
3594 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3596 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3597 otherwise it is not changed.
3599 *PUNSIGNEDP is set to the signedness of the field.
3601 *PMASK is set to the mask used. This is either contained in a
3602 BIT_AND_EXPR or derived from the width of the field.
3604 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3606 Return 0 if this is not a component reference or is one that we can't
3607 do anything with. */
3609 static tree
3610 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3611 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3612 int *punsignedp, int *pvolatilep,
3613 tree *pmask, tree *pand_mask)
3615 tree outer_type = 0;
3616 tree and_mask = 0;
3617 tree mask, inner, offset;
3618 tree unsigned_type;
3619 unsigned int precision;
3621 /* All the optimizations using this function assume integer fields.
3622 There are problems with FP fields since the type_for_size call
3623 below can fail for, e.g., XFmode. */
3624 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3625 return 0;
3627 /* We are interested in the bare arrangement of bits, so strip everything
3628 that doesn't affect the machine mode. However, record the type of the
3629 outermost expression if it may matter below. */
3630 if (CONVERT_EXPR_P (exp)
3631 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3632 outer_type = TREE_TYPE (exp);
3633 STRIP_NOPS (exp);
3635 if (TREE_CODE (exp) == BIT_AND_EXPR)
3637 and_mask = TREE_OPERAND (exp, 1);
3638 exp = TREE_OPERAND (exp, 0);
3639 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3640 if (TREE_CODE (and_mask) != INTEGER_CST)
3641 return 0;
3644 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3645 punsignedp, pvolatilep, false);
3646 if ((inner == exp && and_mask == 0)
3647 || *pbitsize < 0 || offset != 0
3648 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3649 return 0;
3651 /* If the number of bits in the reference is the same as the bitsize of
3652 the outer type, then the outer type gives the signedness. Otherwise
3653 (in case of a small bitfield) the signedness is unchanged. */
3654 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3655 *punsignedp = TYPE_UNSIGNED (outer_type);
3657 /* Compute the mask to access the bitfield. */
3658 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3659 precision = TYPE_PRECISION (unsigned_type);
3661 mask = build_int_cst_type (unsigned_type, -1);
3663 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3664 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3666 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3667 if (and_mask != 0)
3668 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3669 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3671 *pmask = mask;
3672 *pand_mask = and_mask;
3673 return inner;
3676 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3677 bit positions and MASK is SIGNED. */
3679 static int
3680 all_ones_mask_p (const_tree mask, unsigned int size)
3682 tree type = TREE_TYPE (mask);
3683 unsigned int precision = TYPE_PRECISION (type);
3685 /* If this function returns true when the type of the mask is
3686 UNSIGNED, then there will be errors. In particular see
3687 gcc.c-torture/execute/990326-1.c. There does not appear to be
3688 any documentation paper trail as to why this is so. But the pre
3689 wide-int worked with that restriction and it has been preserved
3690 here. */
3691 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3692 return false;
3694 return wi::mask (size, false, precision) == mask;
3697 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3698 represents the sign bit of EXP's type. If EXP represents a sign
3699 or zero extension, also test VAL against the unextended type.
3700 The return value is the (sub)expression whose sign bit is VAL,
3701 or NULL_TREE otherwise. */
3703 static tree
3704 sign_bit_p (tree exp, const_tree val)
3706 int width;
3707 tree t;
3709 /* Tree EXP must have an integral type. */
3710 t = TREE_TYPE (exp);
3711 if (! INTEGRAL_TYPE_P (t))
3712 return NULL_TREE;
3714 /* Tree VAL must be an integer constant. */
3715 if (TREE_CODE (val) != INTEGER_CST
3716 || TREE_OVERFLOW (val))
3717 return NULL_TREE;
3719 width = TYPE_PRECISION (t);
3720 if (wi::only_sign_bit_p (val, width))
3721 return exp;
3723 /* Handle extension from a narrower type. */
3724 if (TREE_CODE (exp) == NOP_EXPR
3725 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3726 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3728 return NULL_TREE;
3731 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3732 to be evaluated unconditionally. */
3734 static int
3735 simple_operand_p (const_tree exp)
3737 /* Strip any conversions that don't change the machine mode. */
3738 STRIP_NOPS (exp);
3740 return (CONSTANT_CLASS_P (exp)
3741 || TREE_CODE (exp) == SSA_NAME
3742 || (DECL_P (exp)
3743 && ! TREE_ADDRESSABLE (exp)
3744 && ! TREE_THIS_VOLATILE (exp)
3745 && ! DECL_NONLOCAL (exp)
3746 /* Don't regard global variables as simple. They may be
3747 allocated in ways unknown to the compiler (shared memory,
3748 #pragma weak, etc). */
3749 && ! TREE_PUBLIC (exp)
3750 && ! DECL_EXTERNAL (exp)
3751 /* Weakrefs are not safe to be read, since they can be NULL.
3752 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3753 have DECL_WEAK flag set. */
3754 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3755 /* Loading a static variable is unduly expensive, but global
3756 registers aren't expensive. */
3757 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3760 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3761 to be evaluated unconditionally.
3762 I addition to simple_operand_p, we assume that comparisons, conversions,
3763 and logic-not operations are simple, if their operands are simple, too. */
3765 static bool
3766 simple_operand_p_2 (tree exp)
3768 enum tree_code code;
3770 if (TREE_SIDE_EFFECTS (exp)
3771 || tree_could_trap_p (exp))
3772 return false;
3774 while (CONVERT_EXPR_P (exp))
3775 exp = TREE_OPERAND (exp, 0);
3777 code = TREE_CODE (exp);
3779 if (TREE_CODE_CLASS (code) == tcc_comparison)
3780 return (simple_operand_p (TREE_OPERAND (exp, 0))
3781 && simple_operand_p (TREE_OPERAND (exp, 1)));
3783 if (code == TRUTH_NOT_EXPR)
3784 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3786 return simple_operand_p (exp);
3790 /* The following functions are subroutines to fold_range_test and allow it to
3791 try to change a logical combination of comparisons into a range test.
3793 For example, both
3794 X == 2 || X == 3 || X == 4 || X == 5
3796 X >= 2 && X <= 5
3797 are converted to
3798 (unsigned) (X - 2) <= 3
3800 We describe each set of comparisons as being either inside or outside
3801 a range, using a variable named like IN_P, and then describe the
3802 range with a lower and upper bound. If one of the bounds is omitted,
3803 it represents either the highest or lowest value of the type.
3805 In the comments below, we represent a range by two numbers in brackets
3806 preceded by a "+" to designate being inside that range, or a "-" to
3807 designate being outside that range, so the condition can be inverted by
3808 flipping the prefix. An omitted bound is represented by a "-". For
3809 example, "- [-, 10]" means being outside the range starting at the lowest
3810 possible value and ending at 10, in other words, being greater than 10.
3811 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3812 always false.
3814 We set up things so that the missing bounds are handled in a consistent
3815 manner so neither a missing bound nor "true" and "false" need to be
3816 handled using a special case. */
3818 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3819 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3820 and UPPER1_P are nonzero if the respective argument is an upper bound
3821 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3822 must be specified for a comparison. ARG1 will be converted to ARG0's
3823 type if both are specified. */
3825 static tree
3826 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3827 tree arg1, int upper1_p)
3829 tree tem;
3830 int result;
3831 int sgn0, sgn1;
3833 /* If neither arg represents infinity, do the normal operation.
3834 Else, if not a comparison, return infinity. Else handle the special
3835 comparison rules. Note that most of the cases below won't occur, but
3836 are handled for consistency. */
3838 if (arg0 != 0 && arg1 != 0)
3840 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3841 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3842 STRIP_NOPS (tem);
3843 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3846 if (TREE_CODE_CLASS (code) != tcc_comparison)
3847 return 0;
3849 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3850 for neither. In real maths, we cannot assume open ended ranges are
3851 the same. But, this is computer arithmetic, where numbers are finite.
3852 We can therefore make the transformation of any unbounded range with
3853 the value Z, Z being greater than any representable number. This permits
3854 us to treat unbounded ranges as equal. */
3855 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3856 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3857 switch (code)
3859 case EQ_EXPR:
3860 result = sgn0 == sgn1;
3861 break;
3862 case NE_EXPR:
3863 result = sgn0 != sgn1;
3864 break;
3865 case LT_EXPR:
3866 result = sgn0 < sgn1;
3867 break;
3868 case LE_EXPR:
3869 result = sgn0 <= sgn1;
3870 break;
3871 case GT_EXPR:
3872 result = sgn0 > sgn1;
3873 break;
3874 case GE_EXPR:
3875 result = sgn0 >= sgn1;
3876 break;
3877 default:
3878 gcc_unreachable ();
3881 return constant_boolean_node (result, type);
3884 /* Helper routine for make_range. Perform one step for it, return
3885 new expression if the loop should continue or NULL_TREE if it should
3886 stop. */
3888 tree
3889 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3890 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3891 bool *strict_overflow_p)
3893 tree arg0_type = TREE_TYPE (arg0);
3894 tree n_low, n_high, low = *p_low, high = *p_high;
3895 int in_p = *p_in_p, n_in_p;
3897 switch (code)
3899 case TRUTH_NOT_EXPR:
3900 /* We can only do something if the range is testing for zero. */
3901 if (low == NULL_TREE || high == NULL_TREE
3902 || ! integer_zerop (low) || ! integer_zerop (high))
3903 return NULL_TREE;
3904 *p_in_p = ! in_p;
3905 return arg0;
3907 case EQ_EXPR: case NE_EXPR:
3908 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3909 /* We can only do something if the range is testing for zero
3910 and if the second operand is an integer constant. Note that
3911 saying something is "in" the range we make is done by
3912 complementing IN_P since it will set in the initial case of
3913 being not equal to zero; "out" is leaving it alone. */
3914 if (low == NULL_TREE || high == NULL_TREE
3915 || ! integer_zerop (low) || ! integer_zerop (high)
3916 || TREE_CODE (arg1) != INTEGER_CST)
3917 return NULL_TREE;
3919 switch (code)
3921 case NE_EXPR: /* - [c, c] */
3922 low = high = arg1;
3923 break;
3924 case EQ_EXPR: /* + [c, c] */
3925 in_p = ! in_p, low = high = arg1;
3926 break;
3927 case GT_EXPR: /* - [-, c] */
3928 low = 0, high = arg1;
3929 break;
3930 case GE_EXPR: /* + [c, -] */
3931 in_p = ! in_p, low = arg1, high = 0;
3932 break;
3933 case LT_EXPR: /* - [c, -] */
3934 low = arg1, high = 0;
3935 break;
3936 case LE_EXPR: /* + [-, c] */
3937 in_p = ! in_p, low = 0, high = arg1;
3938 break;
3939 default:
3940 gcc_unreachable ();
3943 /* If this is an unsigned comparison, we also know that EXP is
3944 greater than or equal to zero. We base the range tests we make
3945 on that fact, so we record it here so we can parse existing
3946 range tests. We test arg0_type since often the return type
3947 of, e.g. EQ_EXPR, is boolean. */
3948 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3950 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3951 in_p, low, high, 1,
3952 build_int_cst (arg0_type, 0),
3953 NULL_TREE))
3954 return NULL_TREE;
3956 in_p = n_in_p, low = n_low, high = n_high;
3958 /* If the high bound is missing, but we have a nonzero low
3959 bound, reverse the range so it goes from zero to the low bound
3960 minus 1. */
3961 if (high == 0 && low && ! integer_zerop (low))
3963 in_p = ! in_p;
3964 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3965 build_int_cst (TREE_TYPE (low), 1), 0);
3966 low = build_int_cst (arg0_type, 0);
3970 *p_low = low;
3971 *p_high = high;
3972 *p_in_p = in_p;
3973 return arg0;
3975 case NEGATE_EXPR:
3976 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3977 low and high are non-NULL, then normalize will DTRT. */
3978 if (!TYPE_UNSIGNED (arg0_type)
3979 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3981 if (low == NULL_TREE)
3982 low = TYPE_MIN_VALUE (arg0_type);
3983 if (high == NULL_TREE)
3984 high = TYPE_MAX_VALUE (arg0_type);
3987 /* (-x) IN [a,b] -> x in [-b, -a] */
3988 n_low = range_binop (MINUS_EXPR, exp_type,
3989 build_int_cst (exp_type, 0),
3990 0, high, 1);
3991 n_high = range_binop (MINUS_EXPR, exp_type,
3992 build_int_cst (exp_type, 0),
3993 0, low, 0);
3994 if (n_high != 0 && TREE_OVERFLOW (n_high))
3995 return NULL_TREE;
3996 goto normalize;
3998 case BIT_NOT_EXPR:
3999 /* ~ X -> -X - 1 */
4000 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4001 build_int_cst (exp_type, 1));
4003 case PLUS_EXPR:
4004 case MINUS_EXPR:
4005 if (TREE_CODE (arg1) != INTEGER_CST)
4006 return NULL_TREE;
4008 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4009 move a constant to the other side. */
4010 if (!TYPE_UNSIGNED (arg0_type)
4011 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4012 return NULL_TREE;
4014 /* If EXP is signed, any overflow in the computation is undefined,
4015 so we don't worry about it so long as our computations on
4016 the bounds don't overflow. For unsigned, overflow is defined
4017 and this is exactly the right thing. */
4018 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4019 arg0_type, low, 0, arg1, 0);
4020 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4021 arg0_type, high, 1, arg1, 0);
4022 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4023 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4024 return NULL_TREE;
4026 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4027 *strict_overflow_p = true;
4029 normalize:
4030 /* Check for an unsigned range which has wrapped around the maximum
4031 value thus making n_high < n_low, and normalize it. */
4032 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4034 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4035 build_int_cst (TREE_TYPE (n_high), 1), 0);
4036 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4037 build_int_cst (TREE_TYPE (n_low), 1), 0);
4039 /* If the range is of the form +/- [ x+1, x ], we won't
4040 be able to normalize it. But then, it represents the
4041 whole range or the empty set, so make it
4042 +/- [ -, - ]. */
4043 if (tree_int_cst_equal (n_low, low)
4044 && tree_int_cst_equal (n_high, high))
4045 low = high = 0;
4046 else
4047 in_p = ! in_p;
4049 else
4050 low = n_low, high = n_high;
4052 *p_low = low;
4053 *p_high = high;
4054 *p_in_p = in_p;
4055 return arg0;
4057 CASE_CONVERT:
4058 case NON_LVALUE_EXPR:
4059 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4060 return NULL_TREE;
4062 if (! INTEGRAL_TYPE_P (arg0_type)
4063 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4064 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4065 return NULL_TREE;
4067 n_low = low, n_high = high;
4069 if (n_low != 0)
4070 n_low = fold_convert_loc (loc, arg0_type, n_low);
4072 if (n_high != 0)
4073 n_high = fold_convert_loc (loc, arg0_type, n_high);
4075 /* If we're converting arg0 from an unsigned type, to exp,
4076 a signed type, we will be doing the comparison as unsigned.
4077 The tests above have already verified that LOW and HIGH
4078 are both positive.
4080 So we have to ensure that we will handle large unsigned
4081 values the same way that the current signed bounds treat
4082 negative values. */
4084 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4086 tree high_positive;
4087 tree equiv_type;
4088 /* For fixed-point modes, we need to pass the saturating flag
4089 as the 2nd parameter. */
4090 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4091 equiv_type
4092 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4093 TYPE_SATURATING (arg0_type));
4094 else
4095 equiv_type
4096 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4098 /* A range without an upper bound is, naturally, unbounded.
4099 Since convert would have cropped a very large value, use
4100 the max value for the destination type. */
4101 high_positive
4102 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4103 : TYPE_MAX_VALUE (arg0_type);
4105 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4106 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4107 fold_convert_loc (loc, arg0_type,
4108 high_positive),
4109 build_int_cst (arg0_type, 1));
4111 /* If the low bound is specified, "and" the range with the
4112 range for which the original unsigned value will be
4113 positive. */
4114 if (low != 0)
4116 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4117 1, fold_convert_loc (loc, arg0_type,
4118 integer_zero_node),
4119 high_positive))
4120 return NULL_TREE;
4122 in_p = (n_in_p == in_p);
4124 else
4126 /* Otherwise, "or" the range with the range of the input
4127 that will be interpreted as negative. */
4128 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4129 1, fold_convert_loc (loc, arg0_type,
4130 integer_zero_node),
4131 high_positive))
4132 return NULL_TREE;
4134 in_p = (in_p != n_in_p);
4138 *p_low = n_low;
4139 *p_high = n_high;
4140 *p_in_p = in_p;
4141 return arg0;
4143 default:
4144 return NULL_TREE;
4148 /* Given EXP, a logical expression, set the range it is testing into
4149 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4150 actually being tested. *PLOW and *PHIGH will be made of the same
4151 type as the returned expression. If EXP is not a comparison, we
4152 will most likely not be returning a useful value and range. Set
4153 *STRICT_OVERFLOW_P to true if the return value is only valid
4154 because signed overflow is undefined; otherwise, do not change
4155 *STRICT_OVERFLOW_P. */
4157 tree
4158 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4159 bool *strict_overflow_p)
4161 enum tree_code code;
4162 tree arg0, arg1 = NULL_TREE;
4163 tree exp_type, nexp;
4164 int in_p;
4165 tree low, high;
4166 location_t loc = EXPR_LOCATION (exp);
4168 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4169 and see if we can refine the range. Some of the cases below may not
4170 happen, but it doesn't seem worth worrying about this. We "continue"
4171 the outer loop when we've changed something; otherwise we "break"
4172 the switch, which will "break" the while. */
4174 in_p = 0;
4175 low = high = build_int_cst (TREE_TYPE (exp), 0);
4177 while (1)
4179 code = TREE_CODE (exp);
4180 exp_type = TREE_TYPE (exp);
4181 arg0 = NULL_TREE;
4183 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4185 if (TREE_OPERAND_LENGTH (exp) > 0)
4186 arg0 = TREE_OPERAND (exp, 0);
4187 if (TREE_CODE_CLASS (code) == tcc_binary
4188 || TREE_CODE_CLASS (code) == tcc_comparison
4189 || (TREE_CODE_CLASS (code) == tcc_expression
4190 && TREE_OPERAND_LENGTH (exp) > 1))
4191 arg1 = TREE_OPERAND (exp, 1);
4193 if (arg0 == NULL_TREE)
4194 break;
4196 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4197 &high, &in_p, strict_overflow_p);
4198 if (nexp == NULL_TREE)
4199 break;
4200 exp = nexp;
4203 /* If EXP is a constant, we can evaluate whether this is true or false. */
4204 if (TREE_CODE (exp) == INTEGER_CST)
4206 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4207 exp, 0, low, 0))
4208 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4209 exp, 1, high, 1)));
4210 low = high = 0;
4211 exp = 0;
4214 *pin_p = in_p, *plow = low, *phigh = high;
4215 return exp;
4218 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4219 type, TYPE, return an expression to test if EXP is in (or out of, depending
4220 on IN_P) the range. Return 0 if the test couldn't be created. */
4222 tree
4223 build_range_check (location_t loc, tree type, tree exp, int in_p,
4224 tree low, tree high)
4226 tree etype = TREE_TYPE (exp), value;
4228 #ifdef HAVE_canonicalize_funcptr_for_compare
4229 /* Disable this optimization for function pointer expressions
4230 on targets that require function pointer canonicalization. */
4231 if (HAVE_canonicalize_funcptr_for_compare
4232 && TREE_CODE (etype) == POINTER_TYPE
4233 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4234 return NULL_TREE;
4235 #endif
4237 if (! in_p)
4239 value = build_range_check (loc, type, exp, 1, low, high);
4240 if (value != 0)
4241 return invert_truthvalue_loc (loc, value);
4243 return 0;
4246 if (low == 0 && high == 0)
4247 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4249 if (low == 0)
4250 return fold_build2_loc (loc, LE_EXPR, type, exp,
4251 fold_convert_loc (loc, etype, high));
4253 if (high == 0)
4254 return fold_build2_loc (loc, GE_EXPR, type, exp,
4255 fold_convert_loc (loc, etype, low));
4257 if (operand_equal_p (low, high, 0))
4258 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4259 fold_convert_loc (loc, etype, low));
4261 if (integer_zerop (low))
4263 if (! TYPE_UNSIGNED (etype))
4265 etype = unsigned_type_for (etype);
4266 high = fold_convert_loc (loc, etype, high);
4267 exp = fold_convert_loc (loc, etype, exp);
4269 return build_range_check (loc, type, exp, 1, 0, high);
4272 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4273 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4275 int prec = TYPE_PRECISION (etype);
4277 if (wi::mask (prec - 1, false, prec) == high)
4279 if (TYPE_UNSIGNED (etype))
4281 tree signed_etype = signed_type_for (etype);
4282 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4283 etype
4284 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4285 else
4286 etype = signed_etype;
4287 exp = fold_convert_loc (loc, etype, exp);
4289 return fold_build2_loc (loc, GT_EXPR, type, exp,
4290 build_int_cst (etype, 0));
4294 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4295 This requires wrap-around arithmetics for the type of the expression.
4296 First make sure that arithmetics in this type is valid, then make sure
4297 that it wraps around. */
4298 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4299 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4300 TYPE_UNSIGNED (etype));
4302 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4304 tree utype, minv, maxv;
4306 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4307 for the type in question, as we rely on this here. */
4308 utype = unsigned_type_for (etype);
4309 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4310 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4311 build_int_cst (TREE_TYPE (maxv), 1), 1);
4312 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4314 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4315 minv, 1, maxv, 1)))
4316 etype = utype;
4317 else
4318 return 0;
4321 high = fold_convert_loc (loc, etype, high);
4322 low = fold_convert_loc (loc, etype, low);
4323 exp = fold_convert_loc (loc, etype, exp);
4325 value = const_binop (MINUS_EXPR, high, low);
4328 if (POINTER_TYPE_P (etype))
4330 if (value != 0 && !TREE_OVERFLOW (value))
4332 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4333 return build_range_check (loc, type,
4334 fold_build_pointer_plus_loc (loc, exp, low),
4335 1, build_int_cst (etype, 0), value);
4337 return 0;
4340 if (value != 0 && !TREE_OVERFLOW (value))
4341 return build_range_check (loc, type,
4342 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4343 1, build_int_cst (etype, 0), value);
4345 return 0;
4348 /* Return the predecessor of VAL in its type, handling the infinite case. */
4350 static tree
4351 range_predecessor (tree val)
4353 tree type = TREE_TYPE (val);
4355 if (INTEGRAL_TYPE_P (type)
4356 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4357 return 0;
4358 else
4359 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4360 build_int_cst (TREE_TYPE (val), 1), 0);
4363 /* Return the successor of VAL in its type, handling the infinite case. */
4365 static tree
4366 range_successor (tree val)
4368 tree type = TREE_TYPE (val);
4370 if (INTEGRAL_TYPE_P (type)
4371 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4372 return 0;
4373 else
4374 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4375 build_int_cst (TREE_TYPE (val), 1), 0);
4378 /* Given two ranges, see if we can merge them into one. Return 1 if we
4379 can, 0 if we can't. Set the output range into the specified parameters. */
4381 bool
4382 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4383 tree high0, int in1_p, tree low1, tree high1)
4385 int no_overlap;
4386 int subset;
4387 int temp;
4388 tree tem;
4389 int in_p;
4390 tree low, high;
4391 int lowequal = ((low0 == 0 && low1 == 0)
4392 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4393 low0, 0, low1, 0)));
4394 int highequal = ((high0 == 0 && high1 == 0)
4395 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4396 high0, 1, high1, 1)));
4398 /* Make range 0 be the range that starts first, or ends last if they
4399 start at the same value. Swap them if it isn't. */
4400 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4401 low0, 0, low1, 0))
4402 || (lowequal
4403 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4404 high1, 1, high0, 1))))
4406 temp = in0_p, in0_p = in1_p, in1_p = temp;
4407 tem = low0, low0 = low1, low1 = tem;
4408 tem = high0, high0 = high1, high1 = tem;
4411 /* Now flag two cases, whether the ranges are disjoint or whether the
4412 second range is totally subsumed in the first. Note that the tests
4413 below are simplified by the ones above. */
4414 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4415 high0, 1, low1, 0));
4416 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4417 high1, 1, high0, 1));
4419 /* We now have four cases, depending on whether we are including or
4420 excluding the two ranges. */
4421 if (in0_p && in1_p)
4423 /* If they don't overlap, the result is false. If the second range
4424 is a subset it is the result. Otherwise, the range is from the start
4425 of the second to the end of the first. */
4426 if (no_overlap)
4427 in_p = 0, low = high = 0;
4428 else if (subset)
4429 in_p = 1, low = low1, high = high1;
4430 else
4431 in_p = 1, low = low1, high = high0;
4434 else if (in0_p && ! in1_p)
4436 /* If they don't overlap, the result is the first range. If they are
4437 equal, the result is false. If the second range is a subset of the
4438 first, and the ranges begin at the same place, we go from just after
4439 the end of the second range to the end of the first. If the second
4440 range is not a subset of the first, or if it is a subset and both
4441 ranges end at the same place, the range starts at the start of the
4442 first range and ends just before the second range.
4443 Otherwise, we can't describe this as a single range. */
4444 if (no_overlap)
4445 in_p = 1, low = low0, high = high0;
4446 else if (lowequal && highequal)
4447 in_p = 0, low = high = 0;
4448 else if (subset && lowequal)
4450 low = range_successor (high1);
4451 high = high0;
4452 in_p = 1;
4453 if (low == 0)
4455 /* We are in the weird situation where high0 > high1 but
4456 high1 has no successor. Punt. */
4457 return 0;
4460 else if (! subset || highequal)
4462 low = low0;
4463 high = range_predecessor (low1);
4464 in_p = 1;
4465 if (high == 0)
4467 /* low0 < low1 but low1 has no predecessor. Punt. */
4468 return 0;
4471 else
4472 return 0;
4475 else if (! in0_p && in1_p)
4477 /* If they don't overlap, the result is the second range. If the second
4478 is a subset of the first, the result is false. Otherwise,
4479 the range starts just after the first range and ends at the
4480 end of the second. */
4481 if (no_overlap)
4482 in_p = 1, low = low1, high = high1;
4483 else if (subset || highequal)
4484 in_p = 0, low = high = 0;
4485 else
4487 low = range_successor (high0);
4488 high = high1;
4489 in_p = 1;
4490 if (low == 0)
4492 /* high1 > high0 but high0 has no successor. Punt. */
4493 return 0;
4498 else
4500 /* The case where we are excluding both ranges. Here the complex case
4501 is if they don't overlap. In that case, the only time we have a
4502 range is if they are adjacent. If the second is a subset of the
4503 first, the result is the first. Otherwise, the range to exclude
4504 starts at the beginning of the first range and ends at the end of the
4505 second. */
4506 if (no_overlap)
4508 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4509 range_successor (high0),
4510 1, low1, 0)))
4511 in_p = 0, low = low0, high = high1;
4512 else
4514 /* Canonicalize - [min, x] into - [-, x]. */
4515 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4516 switch (TREE_CODE (TREE_TYPE (low0)))
4518 case ENUMERAL_TYPE:
4519 if (TYPE_PRECISION (TREE_TYPE (low0))
4520 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4521 break;
4522 /* FALLTHROUGH */
4523 case INTEGER_TYPE:
4524 if (tree_int_cst_equal (low0,
4525 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4526 low0 = 0;
4527 break;
4528 case POINTER_TYPE:
4529 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4530 && integer_zerop (low0))
4531 low0 = 0;
4532 break;
4533 default:
4534 break;
4537 /* Canonicalize - [x, max] into - [x, -]. */
4538 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4539 switch (TREE_CODE (TREE_TYPE (high1)))
4541 case ENUMERAL_TYPE:
4542 if (TYPE_PRECISION (TREE_TYPE (high1))
4543 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4544 break;
4545 /* FALLTHROUGH */
4546 case INTEGER_TYPE:
4547 if (tree_int_cst_equal (high1,
4548 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4549 high1 = 0;
4550 break;
4551 case POINTER_TYPE:
4552 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4553 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4554 high1, 1,
4555 build_int_cst (TREE_TYPE (high1), 1),
4556 1)))
4557 high1 = 0;
4558 break;
4559 default:
4560 break;
4563 /* The ranges might be also adjacent between the maximum and
4564 minimum values of the given type. For
4565 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4566 return + [x + 1, y - 1]. */
4567 if (low0 == 0 && high1 == 0)
4569 low = range_successor (high0);
4570 high = range_predecessor (low1);
4571 if (low == 0 || high == 0)
4572 return 0;
4574 in_p = 1;
4576 else
4577 return 0;
4580 else if (subset)
4581 in_p = 0, low = low0, high = high0;
4582 else
4583 in_p = 0, low = low0, high = high1;
4586 *pin_p = in_p, *plow = low, *phigh = high;
4587 return 1;
4591 /* Subroutine of fold, looking inside expressions of the form
4592 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4593 of the COND_EXPR. This function is being used also to optimize
4594 A op B ? C : A, by reversing the comparison first.
4596 Return a folded expression whose code is not a COND_EXPR
4597 anymore, or NULL_TREE if no folding opportunity is found. */
4599 static tree
4600 fold_cond_expr_with_comparison (location_t loc, tree type,
4601 tree arg0, tree arg1, tree arg2)
4603 enum tree_code comp_code = TREE_CODE (arg0);
4604 tree arg00 = TREE_OPERAND (arg0, 0);
4605 tree arg01 = TREE_OPERAND (arg0, 1);
4606 tree arg1_type = TREE_TYPE (arg1);
4607 tree tem;
4609 STRIP_NOPS (arg1);
4610 STRIP_NOPS (arg2);
4612 /* If we have A op 0 ? A : -A, consider applying the following
4613 transformations:
4615 A == 0? A : -A same as -A
4616 A != 0? A : -A same as A
4617 A >= 0? A : -A same as abs (A)
4618 A > 0? A : -A same as abs (A)
4619 A <= 0? A : -A same as -abs (A)
4620 A < 0? A : -A same as -abs (A)
4622 None of these transformations work for modes with signed
4623 zeros. If A is +/-0, the first two transformations will
4624 change the sign of the result (from +0 to -0, or vice
4625 versa). The last four will fix the sign of the result,
4626 even though the original expressions could be positive or
4627 negative, depending on the sign of A.
4629 Note that all these transformations are correct if A is
4630 NaN, since the two alternatives (A and -A) are also NaNs. */
4631 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4632 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4633 ? real_zerop (arg01)
4634 : integer_zerop (arg01))
4635 && ((TREE_CODE (arg2) == NEGATE_EXPR
4636 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4637 /* In the case that A is of the form X-Y, '-A' (arg2) may
4638 have already been folded to Y-X, check for that. */
4639 || (TREE_CODE (arg1) == MINUS_EXPR
4640 && TREE_CODE (arg2) == MINUS_EXPR
4641 && operand_equal_p (TREE_OPERAND (arg1, 0),
4642 TREE_OPERAND (arg2, 1), 0)
4643 && operand_equal_p (TREE_OPERAND (arg1, 1),
4644 TREE_OPERAND (arg2, 0), 0))))
4645 switch (comp_code)
4647 case EQ_EXPR:
4648 case UNEQ_EXPR:
4649 tem = fold_convert_loc (loc, arg1_type, arg1);
4650 return pedantic_non_lvalue_loc (loc,
4651 fold_convert_loc (loc, type,
4652 negate_expr (tem)));
4653 case NE_EXPR:
4654 case LTGT_EXPR:
4655 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4656 case UNGE_EXPR:
4657 case UNGT_EXPR:
4658 if (flag_trapping_math)
4659 break;
4660 /* Fall through. */
4661 case GE_EXPR:
4662 case GT_EXPR:
4663 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4664 arg1 = fold_convert_loc (loc, signed_type_for
4665 (TREE_TYPE (arg1)), arg1);
4666 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4667 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4668 case UNLE_EXPR:
4669 case UNLT_EXPR:
4670 if (flag_trapping_math)
4671 break;
4672 case LE_EXPR:
4673 case LT_EXPR:
4674 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4675 arg1 = fold_convert_loc (loc, signed_type_for
4676 (TREE_TYPE (arg1)), arg1);
4677 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4678 return negate_expr (fold_convert_loc (loc, type, tem));
4679 default:
4680 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4681 break;
4684 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4685 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4686 both transformations are correct when A is NaN: A != 0
4687 is then true, and A == 0 is false. */
4689 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4690 && integer_zerop (arg01) && integer_zerop (arg2))
4692 if (comp_code == NE_EXPR)
4693 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4694 else if (comp_code == EQ_EXPR)
4695 return build_zero_cst (type);
4698 /* Try some transformations of A op B ? A : B.
4700 A == B? A : B same as B
4701 A != B? A : B same as A
4702 A >= B? A : B same as max (A, B)
4703 A > B? A : B same as max (B, A)
4704 A <= B? A : B same as min (A, B)
4705 A < B? A : B same as min (B, A)
4707 As above, these transformations don't work in the presence
4708 of signed zeros. For example, if A and B are zeros of
4709 opposite sign, the first two transformations will change
4710 the sign of the result. In the last four, the original
4711 expressions give different results for (A=+0, B=-0) and
4712 (A=-0, B=+0), but the transformed expressions do not.
4714 The first two transformations are correct if either A or B
4715 is a NaN. In the first transformation, the condition will
4716 be false, and B will indeed be chosen. In the case of the
4717 second transformation, the condition A != B will be true,
4718 and A will be chosen.
4720 The conversions to max() and min() are not correct if B is
4721 a number and A is not. The conditions in the original
4722 expressions will be false, so all four give B. The min()
4723 and max() versions would give a NaN instead. */
4724 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4725 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4726 /* Avoid these transformations if the COND_EXPR may be used
4727 as an lvalue in the C++ front-end. PR c++/19199. */
4728 && (in_gimple_form
4729 || VECTOR_TYPE_P (type)
4730 || (strcmp (lang_hooks.name, "GNU C++") != 0
4731 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4732 || ! maybe_lvalue_p (arg1)
4733 || ! maybe_lvalue_p (arg2)))
4735 tree comp_op0 = arg00;
4736 tree comp_op1 = arg01;
4737 tree comp_type = TREE_TYPE (comp_op0);
4739 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4740 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4742 comp_type = type;
4743 comp_op0 = arg1;
4744 comp_op1 = arg2;
4747 switch (comp_code)
4749 case EQ_EXPR:
4750 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4751 case NE_EXPR:
4752 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4753 case LE_EXPR:
4754 case LT_EXPR:
4755 case UNLE_EXPR:
4756 case UNLT_EXPR:
4757 /* In C++ a ?: expression can be an lvalue, so put the
4758 operand which will be used if they are equal first
4759 so that we can convert this back to the
4760 corresponding COND_EXPR. */
4761 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4763 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4764 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4765 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4766 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4767 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4768 comp_op1, comp_op0);
4769 return pedantic_non_lvalue_loc (loc,
4770 fold_convert_loc (loc, type, tem));
4772 break;
4773 case GE_EXPR:
4774 case GT_EXPR:
4775 case UNGE_EXPR:
4776 case UNGT_EXPR:
4777 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4779 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4780 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4781 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4782 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4783 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4784 comp_op1, comp_op0);
4785 return pedantic_non_lvalue_loc (loc,
4786 fold_convert_loc (loc, type, tem));
4788 break;
4789 case UNEQ_EXPR:
4790 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4791 return pedantic_non_lvalue_loc (loc,
4792 fold_convert_loc (loc, type, arg2));
4793 break;
4794 case LTGT_EXPR:
4795 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4796 return pedantic_non_lvalue_loc (loc,
4797 fold_convert_loc (loc, type, arg1));
4798 break;
4799 default:
4800 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4801 break;
4805 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4806 we might still be able to simplify this. For example,
4807 if C1 is one less or one more than C2, this might have started
4808 out as a MIN or MAX and been transformed by this function.
4809 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4811 if (INTEGRAL_TYPE_P (type)
4812 && TREE_CODE (arg01) == INTEGER_CST
4813 && TREE_CODE (arg2) == INTEGER_CST)
4814 switch (comp_code)
4816 case EQ_EXPR:
4817 if (TREE_CODE (arg1) == INTEGER_CST)
4818 break;
4819 /* We can replace A with C1 in this case. */
4820 arg1 = fold_convert_loc (loc, type, arg01);
4821 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4823 case LT_EXPR:
4824 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4825 MIN_EXPR, to preserve the signedness of the comparison. */
4826 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4827 OEP_ONLY_CONST)
4828 && operand_equal_p (arg01,
4829 const_binop (PLUS_EXPR, arg2,
4830 build_int_cst (type, 1)),
4831 OEP_ONLY_CONST))
4833 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4834 fold_convert_loc (loc, TREE_TYPE (arg00),
4835 arg2));
4836 return pedantic_non_lvalue_loc (loc,
4837 fold_convert_loc (loc, type, tem));
4839 break;
4841 case LE_EXPR:
4842 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4843 as above. */
4844 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4845 OEP_ONLY_CONST)
4846 && operand_equal_p (arg01,
4847 const_binop (MINUS_EXPR, arg2,
4848 build_int_cst (type, 1)),
4849 OEP_ONLY_CONST))
4851 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4852 fold_convert_loc (loc, TREE_TYPE (arg00),
4853 arg2));
4854 return pedantic_non_lvalue_loc (loc,
4855 fold_convert_loc (loc, type, tem));
4857 break;
4859 case GT_EXPR:
4860 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4861 MAX_EXPR, to preserve the signedness of the comparison. */
4862 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4863 OEP_ONLY_CONST)
4864 && operand_equal_p (arg01,
4865 const_binop (MINUS_EXPR, arg2,
4866 build_int_cst (type, 1)),
4867 OEP_ONLY_CONST))
4869 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4870 fold_convert_loc (loc, TREE_TYPE (arg00),
4871 arg2));
4872 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4874 break;
4876 case GE_EXPR:
4877 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4878 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4879 OEP_ONLY_CONST)
4880 && operand_equal_p (arg01,
4881 const_binop (PLUS_EXPR, arg2,
4882 build_int_cst (type, 1)),
4883 OEP_ONLY_CONST))
4885 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4886 fold_convert_loc (loc, TREE_TYPE (arg00),
4887 arg2));
4888 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4890 break;
4891 case NE_EXPR:
4892 break;
4893 default:
4894 gcc_unreachable ();
4897 return NULL_TREE;
4902 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4903 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4904 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4905 false) >= 2)
4906 #endif
4908 /* EXP is some logical combination of boolean tests. See if we can
4909 merge it into some range test. Return the new tree if so. */
4911 static tree
4912 fold_range_test (location_t loc, enum tree_code code, tree type,
4913 tree op0, tree op1)
4915 int or_op = (code == TRUTH_ORIF_EXPR
4916 || code == TRUTH_OR_EXPR);
4917 int in0_p, in1_p, in_p;
4918 tree low0, low1, low, high0, high1, high;
4919 bool strict_overflow_p = false;
4920 tree tem, lhs, rhs;
4921 const char * const warnmsg = G_("assuming signed overflow does not occur "
4922 "when simplifying range test");
4924 if (!INTEGRAL_TYPE_P (type))
4925 return 0;
4927 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4928 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4930 /* If this is an OR operation, invert both sides; we will invert
4931 again at the end. */
4932 if (or_op)
4933 in0_p = ! in0_p, in1_p = ! in1_p;
4935 /* If both expressions are the same, if we can merge the ranges, and we
4936 can build the range test, return it or it inverted. If one of the
4937 ranges is always true or always false, consider it to be the same
4938 expression as the other. */
4939 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4940 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4941 in1_p, low1, high1)
4942 && 0 != (tem = (build_range_check (loc, type,
4943 lhs != 0 ? lhs
4944 : rhs != 0 ? rhs : integer_zero_node,
4945 in_p, low, high))))
4947 if (strict_overflow_p)
4948 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4949 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4952 /* On machines where the branch cost is expensive, if this is a
4953 short-circuited branch and the underlying object on both sides
4954 is the same, make a non-short-circuit operation. */
4955 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4956 && lhs != 0 && rhs != 0
4957 && (code == TRUTH_ANDIF_EXPR
4958 || code == TRUTH_ORIF_EXPR)
4959 && operand_equal_p (lhs, rhs, 0))
4961 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4962 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4963 which cases we can't do this. */
4964 if (simple_operand_p (lhs))
4965 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4966 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4967 type, op0, op1);
4969 else if (!lang_hooks.decls.global_bindings_p ()
4970 && !CONTAINS_PLACEHOLDER_P (lhs))
4972 tree common = save_expr (lhs);
4974 if (0 != (lhs = build_range_check (loc, type, common,
4975 or_op ? ! in0_p : in0_p,
4976 low0, high0))
4977 && (0 != (rhs = build_range_check (loc, type, common,
4978 or_op ? ! in1_p : in1_p,
4979 low1, high1))))
4981 if (strict_overflow_p)
4982 fold_overflow_warning (warnmsg,
4983 WARN_STRICT_OVERFLOW_COMPARISON);
4984 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4985 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4986 type, lhs, rhs);
4991 return 0;
4994 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4995 bit value. Arrange things so the extra bits will be set to zero if and
4996 only if C is signed-extended to its full width. If MASK is nonzero,
4997 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4999 static tree
5000 unextend (tree c, int p, int unsignedp, tree mask)
5002 tree type = TREE_TYPE (c);
5003 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5004 tree temp;
5006 if (p == modesize || unsignedp)
5007 return c;
5009 /* We work by getting just the sign bit into the low-order bit, then
5010 into the high-order bit, then sign-extend. We then XOR that value
5011 with C. */
5012 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5014 /* We must use a signed type in order to get an arithmetic right shift.
5015 However, we must also avoid introducing accidental overflows, so that
5016 a subsequent call to integer_zerop will work. Hence we must
5017 do the type conversion here. At this point, the constant is either
5018 zero or one, and the conversion to a signed type can never overflow.
5019 We could get an overflow if this conversion is done anywhere else. */
5020 if (TYPE_UNSIGNED (type))
5021 temp = fold_convert (signed_type_for (type), temp);
5023 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5024 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5025 if (mask != 0)
5026 temp = const_binop (BIT_AND_EXPR, temp,
5027 fold_convert (TREE_TYPE (c), mask));
5028 /* If necessary, convert the type back to match the type of C. */
5029 if (TYPE_UNSIGNED (type))
5030 temp = fold_convert (type, temp);
5032 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5035 /* For an expression that has the form
5036 (A && B) || ~B
5038 (A || B) && ~B,
5039 we can drop one of the inner expressions and simplify to
5040 A || ~B
5042 A && ~B
5043 LOC is the location of the resulting expression. OP is the inner
5044 logical operation; the left-hand side in the examples above, while CMPOP
5045 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5046 removing a condition that guards another, as in
5047 (A != NULL && A->...) || A == NULL
5048 which we must not transform. If RHS_ONLY is true, only eliminate the
5049 right-most operand of the inner logical operation. */
5051 static tree
5052 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5053 bool rhs_only)
5055 tree type = TREE_TYPE (cmpop);
5056 enum tree_code code = TREE_CODE (cmpop);
5057 enum tree_code truthop_code = TREE_CODE (op);
5058 tree lhs = TREE_OPERAND (op, 0);
5059 tree rhs = TREE_OPERAND (op, 1);
5060 tree orig_lhs = lhs, orig_rhs = rhs;
5061 enum tree_code rhs_code = TREE_CODE (rhs);
5062 enum tree_code lhs_code = TREE_CODE (lhs);
5063 enum tree_code inv_code;
5065 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5066 return NULL_TREE;
5068 if (TREE_CODE_CLASS (code) != tcc_comparison)
5069 return NULL_TREE;
5071 if (rhs_code == truthop_code)
5073 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5074 if (newrhs != NULL_TREE)
5076 rhs = newrhs;
5077 rhs_code = TREE_CODE (rhs);
5080 if (lhs_code == truthop_code && !rhs_only)
5082 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5083 if (newlhs != NULL_TREE)
5085 lhs = newlhs;
5086 lhs_code = TREE_CODE (lhs);
5090 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5091 if (inv_code == rhs_code
5092 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5093 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5094 return lhs;
5095 if (!rhs_only && inv_code == lhs_code
5096 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5097 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5098 return rhs;
5099 if (rhs != orig_rhs || lhs != orig_lhs)
5100 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5101 lhs, rhs);
5102 return NULL_TREE;
5105 /* Find ways of folding logical expressions of LHS and RHS:
5106 Try to merge two comparisons to the same innermost item.
5107 Look for range tests like "ch >= '0' && ch <= '9'".
5108 Look for combinations of simple terms on machines with expensive branches
5109 and evaluate the RHS unconditionally.
5111 For example, if we have p->a == 2 && p->b == 4 and we can make an
5112 object large enough to span both A and B, we can do this with a comparison
5113 against the object ANDed with the a mask.
5115 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5116 operations to do this with one comparison.
5118 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5119 function and the one above.
5121 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5122 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5124 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5125 two operands.
5127 We return the simplified tree or 0 if no optimization is possible. */
5129 static tree
5130 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5131 tree lhs, tree rhs)
5133 /* If this is the "or" of two comparisons, we can do something if
5134 the comparisons are NE_EXPR. If this is the "and", we can do something
5135 if the comparisons are EQ_EXPR. I.e.,
5136 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5138 WANTED_CODE is this operation code. For single bit fields, we can
5139 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5140 comparison for one-bit fields. */
5142 enum tree_code wanted_code;
5143 enum tree_code lcode, rcode;
5144 tree ll_arg, lr_arg, rl_arg, rr_arg;
5145 tree ll_inner, lr_inner, rl_inner, rr_inner;
5146 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5147 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5148 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5149 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5150 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5151 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5152 machine_mode lnmode, rnmode;
5153 tree ll_mask, lr_mask, rl_mask, rr_mask;
5154 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5155 tree l_const, r_const;
5156 tree lntype, rntype, result;
5157 HOST_WIDE_INT first_bit, end_bit;
5158 int volatilep;
5160 /* Start by getting the comparison codes. Fail if anything is volatile.
5161 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5162 it were surrounded with a NE_EXPR. */
5164 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5165 return 0;
5167 lcode = TREE_CODE (lhs);
5168 rcode = TREE_CODE (rhs);
5170 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5172 lhs = build2 (NE_EXPR, truth_type, lhs,
5173 build_int_cst (TREE_TYPE (lhs), 0));
5174 lcode = NE_EXPR;
5177 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5179 rhs = build2 (NE_EXPR, truth_type, rhs,
5180 build_int_cst (TREE_TYPE (rhs), 0));
5181 rcode = NE_EXPR;
5184 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5185 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5186 return 0;
5188 ll_arg = TREE_OPERAND (lhs, 0);
5189 lr_arg = TREE_OPERAND (lhs, 1);
5190 rl_arg = TREE_OPERAND (rhs, 0);
5191 rr_arg = TREE_OPERAND (rhs, 1);
5193 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5194 if (simple_operand_p (ll_arg)
5195 && simple_operand_p (lr_arg))
5197 if (operand_equal_p (ll_arg, rl_arg, 0)
5198 && operand_equal_p (lr_arg, rr_arg, 0))
5200 result = combine_comparisons (loc, code, lcode, rcode,
5201 truth_type, ll_arg, lr_arg);
5202 if (result)
5203 return result;
5205 else if (operand_equal_p (ll_arg, rr_arg, 0)
5206 && operand_equal_p (lr_arg, rl_arg, 0))
5208 result = combine_comparisons (loc, code, lcode,
5209 swap_tree_comparison (rcode),
5210 truth_type, ll_arg, lr_arg);
5211 if (result)
5212 return result;
5216 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5217 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5219 /* If the RHS can be evaluated unconditionally and its operands are
5220 simple, it wins to evaluate the RHS unconditionally on machines
5221 with expensive branches. In this case, this isn't a comparison
5222 that can be merged. */
5224 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5225 false) >= 2
5226 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5227 && simple_operand_p (rl_arg)
5228 && simple_operand_p (rr_arg))
5230 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5231 if (code == TRUTH_OR_EXPR
5232 && lcode == NE_EXPR && integer_zerop (lr_arg)
5233 && rcode == NE_EXPR && integer_zerop (rr_arg)
5234 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5235 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5236 return build2_loc (loc, NE_EXPR, truth_type,
5237 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5238 ll_arg, rl_arg),
5239 build_int_cst (TREE_TYPE (ll_arg), 0));
5241 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5242 if (code == TRUTH_AND_EXPR
5243 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5244 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5245 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5246 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5247 return build2_loc (loc, EQ_EXPR, truth_type,
5248 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5249 ll_arg, rl_arg),
5250 build_int_cst (TREE_TYPE (ll_arg), 0));
5253 /* See if the comparisons can be merged. Then get all the parameters for
5254 each side. */
5256 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5257 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5258 return 0;
5260 volatilep = 0;
5261 ll_inner = decode_field_reference (loc, ll_arg,
5262 &ll_bitsize, &ll_bitpos, &ll_mode,
5263 &ll_unsignedp, &volatilep, &ll_mask,
5264 &ll_and_mask);
5265 lr_inner = decode_field_reference (loc, lr_arg,
5266 &lr_bitsize, &lr_bitpos, &lr_mode,
5267 &lr_unsignedp, &volatilep, &lr_mask,
5268 &lr_and_mask);
5269 rl_inner = decode_field_reference (loc, rl_arg,
5270 &rl_bitsize, &rl_bitpos, &rl_mode,
5271 &rl_unsignedp, &volatilep, &rl_mask,
5272 &rl_and_mask);
5273 rr_inner = decode_field_reference (loc, rr_arg,
5274 &rr_bitsize, &rr_bitpos, &rr_mode,
5275 &rr_unsignedp, &volatilep, &rr_mask,
5276 &rr_and_mask);
5278 /* It must be true that the inner operation on the lhs of each
5279 comparison must be the same if we are to be able to do anything.
5280 Then see if we have constants. If not, the same must be true for
5281 the rhs's. */
5282 if (volatilep || ll_inner == 0 || rl_inner == 0
5283 || ! operand_equal_p (ll_inner, rl_inner, 0))
5284 return 0;
5286 if (TREE_CODE (lr_arg) == INTEGER_CST
5287 && TREE_CODE (rr_arg) == INTEGER_CST)
5288 l_const = lr_arg, r_const = rr_arg;
5289 else if (lr_inner == 0 || rr_inner == 0
5290 || ! operand_equal_p (lr_inner, rr_inner, 0))
5291 return 0;
5292 else
5293 l_const = r_const = 0;
5295 /* If either comparison code is not correct for our logical operation,
5296 fail. However, we can convert a one-bit comparison against zero into
5297 the opposite comparison against that bit being set in the field. */
5299 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5300 if (lcode != wanted_code)
5302 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5304 /* Make the left operand unsigned, since we are only interested
5305 in the value of one bit. Otherwise we are doing the wrong
5306 thing below. */
5307 ll_unsignedp = 1;
5308 l_const = ll_mask;
5310 else
5311 return 0;
5314 /* This is analogous to the code for l_const above. */
5315 if (rcode != wanted_code)
5317 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5319 rl_unsignedp = 1;
5320 r_const = rl_mask;
5322 else
5323 return 0;
5326 /* See if we can find a mode that contains both fields being compared on
5327 the left. If we can't, fail. Otherwise, update all constants and masks
5328 to be relative to a field of that size. */
5329 first_bit = MIN (ll_bitpos, rl_bitpos);
5330 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5331 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5332 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5333 volatilep);
5334 if (lnmode == VOIDmode)
5335 return 0;
5337 lnbitsize = GET_MODE_BITSIZE (lnmode);
5338 lnbitpos = first_bit & ~ (lnbitsize - 1);
5339 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5340 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5342 if (BYTES_BIG_ENDIAN)
5344 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5345 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5348 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5349 size_int (xll_bitpos));
5350 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5351 size_int (xrl_bitpos));
5353 if (l_const)
5355 l_const = fold_convert_loc (loc, lntype, l_const);
5356 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5357 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5358 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5359 fold_build1_loc (loc, BIT_NOT_EXPR,
5360 lntype, ll_mask))))
5362 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5364 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5367 if (r_const)
5369 r_const = fold_convert_loc (loc, lntype, r_const);
5370 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5371 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5372 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5373 fold_build1_loc (loc, BIT_NOT_EXPR,
5374 lntype, rl_mask))))
5376 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5378 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5382 /* If the right sides are not constant, do the same for it. Also,
5383 disallow this optimization if a size or signedness mismatch occurs
5384 between the left and right sides. */
5385 if (l_const == 0)
5387 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5388 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5389 /* Make sure the two fields on the right
5390 correspond to the left without being swapped. */
5391 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5392 return 0;
5394 first_bit = MIN (lr_bitpos, rr_bitpos);
5395 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5396 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5397 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5398 volatilep);
5399 if (rnmode == VOIDmode)
5400 return 0;
5402 rnbitsize = GET_MODE_BITSIZE (rnmode);
5403 rnbitpos = first_bit & ~ (rnbitsize - 1);
5404 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5405 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5407 if (BYTES_BIG_ENDIAN)
5409 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5410 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5413 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5414 rntype, lr_mask),
5415 size_int (xlr_bitpos));
5416 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5417 rntype, rr_mask),
5418 size_int (xrr_bitpos));
5420 /* Make a mask that corresponds to both fields being compared.
5421 Do this for both items being compared. If the operands are the
5422 same size and the bits being compared are in the same position
5423 then we can do this by masking both and comparing the masked
5424 results. */
5425 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5426 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5427 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5429 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5430 ll_unsignedp || rl_unsignedp);
5431 if (! all_ones_mask_p (ll_mask, lnbitsize))
5432 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5434 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5435 lr_unsignedp || rr_unsignedp);
5436 if (! all_ones_mask_p (lr_mask, rnbitsize))
5437 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5439 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5442 /* There is still another way we can do something: If both pairs of
5443 fields being compared are adjacent, we may be able to make a wider
5444 field containing them both.
5446 Note that we still must mask the lhs/rhs expressions. Furthermore,
5447 the mask must be shifted to account for the shift done by
5448 make_bit_field_ref. */
5449 if ((ll_bitsize + ll_bitpos == rl_bitpos
5450 && lr_bitsize + lr_bitpos == rr_bitpos)
5451 || (ll_bitpos == rl_bitpos + rl_bitsize
5452 && lr_bitpos == rr_bitpos + rr_bitsize))
5454 tree type;
5456 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5457 ll_bitsize + rl_bitsize,
5458 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5459 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5460 lr_bitsize + rr_bitsize,
5461 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5463 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5464 size_int (MIN (xll_bitpos, xrl_bitpos)));
5465 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5466 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5468 /* Convert to the smaller type before masking out unwanted bits. */
5469 type = lntype;
5470 if (lntype != rntype)
5472 if (lnbitsize > rnbitsize)
5474 lhs = fold_convert_loc (loc, rntype, lhs);
5475 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5476 type = rntype;
5478 else if (lnbitsize < rnbitsize)
5480 rhs = fold_convert_loc (loc, lntype, rhs);
5481 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5482 type = lntype;
5486 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5487 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5489 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5490 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5492 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5495 return 0;
5498 /* Handle the case of comparisons with constants. If there is something in
5499 common between the masks, those bits of the constants must be the same.
5500 If not, the condition is always false. Test for this to avoid generating
5501 incorrect code below. */
5502 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5503 if (! integer_zerop (result)
5504 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5505 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5507 if (wanted_code == NE_EXPR)
5509 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5510 return constant_boolean_node (true, truth_type);
5512 else
5514 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5515 return constant_boolean_node (false, truth_type);
5519 /* Construct the expression we will return. First get the component
5520 reference we will make. Unless the mask is all ones the width of
5521 that field, perform the mask operation. Then compare with the
5522 merged constant. */
5523 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5524 ll_unsignedp || rl_unsignedp);
5526 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5527 if (! all_ones_mask_p (ll_mask, lnbitsize))
5528 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5530 return build2_loc (loc, wanted_code, truth_type, result,
5531 const_binop (BIT_IOR_EXPR, l_const, r_const));
5534 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5535 constant. */
5537 static tree
5538 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5539 tree op0, tree op1)
5541 tree arg0 = op0;
5542 enum tree_code op_code;
5543 tree comp_const;
5544 tree minmax_const;
5545 int consts_equal, consts_lt;
5546 tree inner;
5548 STRIP_SIGN_NOPS (arg0);
5550 op_code = TREE_CODE (arg0);
5551 minmax_const = TREE_OPERAND (arg0, 1);
5552 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5553 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5554 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5555 inner = TREE_OPERAND (arg0, 0);
5557 /* If something does not permit us to optimize, return the original tree. */
5558 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5559 || TREE_CODE (comp_const) != INTEGER_CST
5560 || TREE_OVERFLOW (comp_const)
5561 || TREE_CODE (minmax_const) != INTEGER_CST
5562 || TREE_OVERFLOW (minmax_const))
5563 return NULL_TREE;
5565 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5566 and GT_EXPR, doing the rest with recursive calls using logical
5567 simplifications. */
5568 switch (code)
5570 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5572 tree tem
5573 = optimize_minmax_comparison (loc,
5574 invert_tree_comparison (code, false),
5575 type, op0, op1);
5576 if (tem)
5577 return invert_truthvalue_loc (loc, tem);
5578 return NULL_TREE;
5581 case GE_EXPR:
5582 return
5583 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5584 optimize_minmax_comparison
5585 (loc, EQ_EXPR, type, arg0, comp_const),
5586 optimize_minmax_comparison
5587 (loc, GT_EXPR, type, arg0, comp_const));
5589 case EQ_EXPR:
5590 if (op_code == MAX_EXPR && consts_equal)
5591 /* MAX (X, 0) == 0 -> X <= 0 */
5592 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5594 else if (op_code == MAX_EXPR && consts_lt)
5595 /* MAX (X, 0) == 5 -> X == 5 */
5596 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5598 else if (op_code == MAX_EXPR)
5599 /* MAX (X, 0) == -1 -> false */
5600 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5602 else if (consts_equal)
5603 /* MIN (X, 0) == 0 -> X >= 0 */
5604 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5606 else if (consts_lt)
5607 /* MIN (X, 0) == 5 -> false */
5608 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5610 else
5611 /* MIN (X, 0) == -1 -> X == -1 */
5612 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5614 case GT_EXPR:
5615 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5616 /* MAX (X, 0) > 0 -> X > 0
5617 MAX (X, 0) > 5 -> X > 5 */
5618 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5620 else if (op_code == MAX_EXPR)
5621 /* MAX (X, 0) > -1 -> true */
5622 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5624 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5625 /* MIN (X, 0) > 0 -> false
5626 MIN (X, 0) > 5 -> false */
5627 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5629 else
5630 /* MIN (X, 0) > -1 -> X > -1 */
5631 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5633 default:
5634 return NULL_TREE;
5638 /* T is an integer expression that is being multiplied, divided, or taken a
5639 modulus (CODE says which and what kind of divide or modulus) by a
5640 constant C. See if we can eliminate that operation by folding it with
5641 other operations already in T. WIDE_TYPE, if non-null, is a type that
5642 should be used for the computation if wider than our type.
5644 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5645 (X * 2) + (Y * 4). We must, however, be assured that either the original
5646 expression would not overflow or that overflow is undefined for the type
5647 in the language in question.
5649 If we return a non-null expression, it is an equivalent form of the
5650 original computation, but need not be in the original type.
5652 We set *STRICT_OVERFLOW_P to true if the return values depends on
5653 signed overflow being undefined. Otherwise we do not change
5654 *STRICT_OVERFLOW_P. */
5656 static tree
5657 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5658 bool *strict_overflow_p)
5660 /* To avoid exponential search depth, refuse to allow recursion past
5661 three levels. Beyond that (1) it's highly unlikely that we'll find
5662 something interesting and (2) we've probably processed it before
5663 when we built the inner expression. */
5665 static int depth;
5666 tree ret;
5668 if (depth > 3)
5669 return NULL;
5671 depth++;
5672 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5673 depth--;
5675 return ret;
5678 static tree
5679 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5680 bool *strict_overflow_p)
5682 tree type = TREE_TYPE (t);
5683 enum tree_code tcode = TREE_CODE (t);
5684 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5685 > GET_MODE_SIZE (TYPE_MODE (type)))
5686 ? wide_type : type);
5687 tree t1, t2;
5688 int same_p = tcode == code;
5689 tree op0 = NULL_TREE, op1 = NULL_TREE;
5690 bool sub_strict_overflow_p;
5692 /* Don't deal with constants of zero here; they confuse the code below. */
5693 if (integer_zerop (c))
5694 return NULL_TREE;
5696 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5697 op0 = TREE_OPERAND (t, 0);
5699 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5700 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5702 /* Note that we need not handle conditional operations here since fold
5703 already handles those cases. So just do arithmetic here. */
5704 switch (tcode)
5706 case INTEGER_CST:
5707 /* For a constant, we can always simplify if we are a multiply
5708 or (for divide and modulus) if it is a multiple of our constant. */
5709 if (code == MULT_EXPR
5710 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5711 return const_binop (code, fold_convert (ctype, t),
5712 fold_convert (ctype, c));
5713 break;
5715 CASE_CONVERT: case NON_LVALUE_EXPR:
5716 /* If op0 is an expression ... */
5717 if ((COMPARISON_CLASS_P (op0)
5718 || UNARY_CLASS_P (op0)
5719 || BINARY_CLASS_P (op0)
5720 || VL_EXP_CLASS_P (op0)
5721 || EXPRESSION_CLASS_P (op0))
5722 /* ... and has wrapping overflow, and its type is smaller
5723 than ctype, then we cannot pass through as widening. */
5724 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5725 && (TYPE_PRECISION (ctype)
5726 > TYPE_PRECISION (TREE_TYPE (op0))))
5727 /* ... or this is a truncation (t is narrower than op0),
5728 then we cannot pass through this narrowing. */
5729 || (TYPE_PRECISION (type)
5730 < TYPE_PRECISION (TREE_TYPE (op0)))
5731 /* ... or signedness changes for division or modulus,
5732 then we cannot pass through this conversion. */
5733 || (code != MULT_EXPR
5734 && (TYPE_UNSIGNED (ctype)
5735 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5736 /* ... or has undefined overflow while the converted to
5737 type has not, we cannot do the operation in the inner type
5738 as that would introduce undefined overflow. */
5739 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5740 && !TYPE_OVERFLOW_UNDEFINED (type))))
5741 break;
5743 /* Pass the constant down and see if we can make a simplification. If
5744 we can, replace this expression with the inner simplification for
5745 possible later conversion to our or some other type. */
5746 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5747 && TREE_CODE (t2) == INTEGER_CST
5748 && !TREE_OVERFLOW (t2)
5749 && (0 != (t1 = extract_muldiv (op0, t2, code,
5750 code == MULT_EXPR
5751 ? ctype : NULL_TREE,
5752 strict_overflow_p))))
5753 return t1;
5754 break;
5756 case ABS_EXPR:
5757 /* If widening the type changes it from signed to unsigned, then we
5758 must avoid building ABS_EXPR itself as unsigned. */
5759 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5761 tree cstype = (*signed_type_for) (ctype);
5762 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5763 != 0)
5765 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5766 return fold_convert (ctype, t1);
5768 break;
5770 /* If the constant is negative, we cannot simplify this. */
5771 if (tree_int_cst_sgn (c) == -1)
5772 break;
5773 /* FALLTHROUGH */
5774 case NEGATE_EXPR:
5775 /* For division and modulus, type can't be unsigned, as e.g.
5776 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5777 For signed types, even with wrapping overflow, this is fine. */
5778 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5779 break;
5780 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5781 != 0)
5782 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5783 break;
5785 case MIN_EXPR: case MAX_EXPR:
5786 /* If widening the type changes the signedness, then we can't perform
5787 this optimization as that changes the result. */
5788 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5789 break;
5791 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5792 sub_strict_overflow_p = false;
5793 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5794 &sub_strict_overflow_p)) != 0
5795 && (t2 = extract_muldiv (op1, c, code, wide_type,
5796 &sub_strict_overflow_p)) != 0)
5798 if (tree_int_cst_sgn (c) < 0)
5799 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5800 if (sub_strict_overflow_p)
5801 *strict_overflow_p = true;
5802 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5803 fold_convert (ctype, t2));
5805 break;
5807 case LSHIFT_EXPR: case RSHIFT_EXPR:
5808 /* If the second operand is constant, this is a multiplication
5809 or floor division, by a power of two, so we can treat it that
5810 way unless the multiplier or divisor overflows. Signed
5811 left-shift overflow is implementation-defined rather than
5812 undefined in C90, so do not convert signed left shift into
5813 multiplication. */
5814 if (TREE_CODE (op1) == INTEGER_CST
5815 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5816 /* const_binop may not detect overflow correctly,
5817 so check for it explicitly here. */
5818 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5819 && 0 != (t1 = fold_convert (ctype,
5820 const_binop (LSHIFT_EXPR,
5821 size_one_node,
5822 op1)))
5823 && !TREE_OVERFLOW (t1))
5824 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5825 ? MULT_EXPR : FLOOR_DIV_EXPR,
5826 ctype,
5827 fold_convert (ctype, op0),
5828 t1),
5829 c, code, wide_type, strict_overflow_p);
5830 break;
5832 case PLUS_EXPR: case MINUS_EXPR:
5833 /* See if we can eliminate the operation on both sides. If we can, we
5834 can return a new PLUS or MINUS. If we can't, the only remaining
5835 cases where we can do anything are if the second operand is a
5836 constant. */
5837 sub_strict_overflow_p = false;
5838 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5839 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5840 if (t1 != 0 && t2 != 0
5841 && (code == MULT_EXPR
5842 /* If not multiplication, we can only do this if both operands
5843 are divisible by c. */
5844 || (multiple_of_p (ctype, op0, c)
5845 && multiple_of_p (ctype, op1, c))))
5847 if (sub_strict_overflow_p)
5848 *strict_overflow_p = true;
5849 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5850 fold_convert (ctype, t2));
5853 /* If this was a subtraction, negate OP1 and set it to be an addition.
5854 This simplifies the logic below. */
5855 if (tcode == MINUS_EXPR)
5857 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5858 /* If OP1 was not easily negatable, the constant may be OP0. */
5859 if (TREE_CODE (op0) == INTEGER_CST)
5861 tree tem = op0;
5862 op0 = op1;
5863 op1 = tem;
5864 tem = t1;
5865 t1 = t2;
5866 t2 = tem;
5870 if (TREE_CODE (op1) != INTEGER_CST)
5871 break;
5873 /* If either OP1 or C are negative, this optimization is not safe for
5874 some of the division and remainder types while for others we need
5875 to change the code. */
5876 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5878 if (code == CEIL_DIV_EXPR)
5879 code = FLOOR_DIV_EXPR;
5880 else if (code == FLOOR_DIV_EXPR)
5881 code = CEIL_DIV_EXPR;
5882 else if (code != MULT_EXPR
5883 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5884 break;
5887 /* If it's a multiply or a division/modulus operation of a multiple
5888 of our constant, do the operation and verify it doesn't overflow. */
5889 if (code == MULT_EXPR
5890 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5892 op1 = const_binop (code, fold_convert (ctype, op1),
5893 fold_convert (ctype, c));
5894 /* We allow the constant to overflow with wrapping semantics. */
5895 if (op1 == 0
5896 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5897 break;
5899 else
5900 break;
5902 /* If we have an unsigned type, we cannot widen the operation since it
5903 will change the result if the original computation overflowed. */
5904 if (TYPE_UNSIGNED (ctype) && ctype != type)
5905 break;
5907 /* If we were able to eliminate our operation from the first side,
5908 apply our operation to the second side and reform the PLUS. */
5909 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5910 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5912 /* The last case is if we are a multiply. In that case, we can
5913 apply the distributive law to commute the multiply and addition
5914 if the multiplication of the constants doesn't overflow
5915 and overflow is defined. With undefined overflow
5916 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5917 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5918 return fold_build2 (tcode, ctype,
5919 fold_build2 (code, ctype,
5920 fold_convert (ctype, op0),
5921 fold_convert (ctype, c)),
5922 op1);
5924 break;
5926 case MULT_EXPR:
5927 /* We have a special case here if we are doing something like
5928 (C * 8) % 4 since we know that's zero. */
5929 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5930 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5931 /* If the multiplication can overflow we cannot optimize this. */
5932 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5933 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5934 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5936 *strict_overflow_p = true;
5937 return omit_one_operand (type, integer_zero_node, op0);
5940 /* ... fall through ... */
5942 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5943 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5944 /* If we can extract our operation from the LHS, do so and return a
5945 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5946 do something only if the second operand is a constant. */
5947 if (same_p
5948 && (t1 = extract_muldiv (op0, c, code, wide_type,
5949 strict_overflow_p)) != 0)
5950 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5951 fold_convert (ctype, op1));
5952 else if (tcode == MULT_EXPR && code == MULT_EXPR
5953 && (t1 = extract_muldiv (op1, c, code, wide_type,
5954 strict_overflow_p)) != 0)
5955 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5956 fold_convert (ctype, t1));
5957 else if (TREE_CODE (op1) != INTEGER_CST)
5958 return 0;
5960 /* If these are the same operation types, we can associate them
5961 assuming no overflow. */
5962 if (tcode == code)
5964 bool overflow_p = false;
5965 bool overflow_mul_p;
5966 signop sign = TYPE_SIGN (ctype);
5967 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5968 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5969 if (overflow_mul_p
5970 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5971 overflow_p = true;
5972 if (!overflow_p)
5973 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5974 wide_int_to_tree (ctype, mul));
5977 /* If these operations "cancel" each other, we have the main
5978 optimizations of this pass, which occur when either constant is a
5979 multiple of the other, in which case we replace this with either an
5980 operation or CODE or TCODE.
5982 If we have an unsigned type, we cannot do this since it will change
5983 the result if the original computation overflowed. */
5984 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5985 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5986 || (tcode == MULT_EXPR
5987 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5988 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5989 && code != MULT_EXPR)))
5991 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5993 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5994 *strict_overflow_p = true;
5995 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5996 fold_convert (ctype,
5997 const_binop (TRUNC_DIV_EXPR,
5998 op1, c)));
6000 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6002 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6003 *strict_overflow_p = true;
6004 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6005 fold_convert (ctype,
6006 const_binop (TRUNC_DIV_EXPR,
6007 c, op1)));
6010 break;
6012 default:
6013 break;
6016 return 0;
6019 /* Return a node which has the indicated constant VALUE (either 0 or
6020 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6021 and is of the indicated TYPE. */
6023 tree
6024 constant_boolean_node (bool value, tree type)
6026 if (type == integer_type_node)
6027 return value ? integer_one_node : integer_zero_node;
6028 else if (type == boolean_type_node)
6029 return value ? boolean_true_node : boolean_false_node;
6030 else if (TREE_CODE (type) == VECTOR_TYPE)
6031 return build_vector_from_val (type,
6032 build_int_cst (TREE_TYPE (type),
6033 value ? -1 : 0));
6034 else
6035 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6039 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6040 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6041 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6042 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6043 COND is the first argument to CODE; otherwise (as in the example
6044 given here), it is the second argument. TYPE is the type of the
6045 original expression. Return NULL_TREE if no simplification is
6046 possible. */
6048 static tree
6049 fold_binary_op_with_conditional_arg (location_t loc,
6050 enum tree_code code,
6051 tree type, tree op0, tree op1,
6052 tree cond, tree arg, int cond_first_p)
6054 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6055 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6056 tree test, true_value, false_value;
6057 tree lhs = NULL_TREE;
6058 tree rhs = NULL_TREE;
6059 enum tree_code cond_code = COND_EXPR;
6061 if (TREE_CODE (cond) == COND_EXPR
6062 || TREE_CODE (cond) == VEC_COND_EXPR)
6064 test = TREE_OPERAND (cond, 0);
6065 true_value = TREE_OPERAND (cond, 1);
6066 false_value = TREE_OPERAND (cond, 2);
6067 /* If this operand throws an expression, then it does not make
6068 sense to try to perform a logical or arithmetic operation
6069 involving it. */
6070 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6071 lhs = true_value;
6072 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6073 rhs = false_value;
6075 else
6077 tree testtype = TREE_TYPE (cond);
6078 test = cond;
6079 true_value = constant_boolean_node (true, testtype);
6080 false_value = constant_boolean_node (false, testtype);
6083 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6084 cond_code = VEC_COND_EXPR;
6086 /* This transformation is only worthwhile if we don't have to wrap ARG
6087 in a SAVE_EXPR and the operation can be simplified without recursing
6088 on at least one of the branches once its pushed inside the COND_EXPR. */
6089 if (!TREE_CONSTANT (arg)
6090 && (TREE_SIDE_EFFECTS (arg)
6091 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6092 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6093 return NULL_TREE;
6095 arg = fold_convert_loc (loc, arg_type, arg);
6096 if (lhs == 0)
6098 true_value = fold_convert_loc (loc, cond_type, true_value);
6099 if (cond_first_p)
6100 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6101 else
6102 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6104 if (rhs == 0)
6106 false_value = fold_convert_loc (loc, cond_type, false_value);
6107 if (cond_first_p)
6108 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6109 else
6110 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6113 /* Check that we have simplified at least one of the branches. */
6114 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6115 return NULL_TREE;
6117 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6121 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6123 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6124 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6125 ADDEND is the same as X.
6127 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6128 and finite. The problematic cases are when X is zero, and its mode
6129 has signed zeros. In the case of rounding towards -infinity,
6130 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6131 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6133 bool
6134 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6136 if (!real_zerop (addend))
6137 return false;
6139 /* Don't allow the fold with -fsignaling-nans. */
6140 if (HONOR_SNANS (TYPE_MODE (type)))
6141 return false;
6143 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6144 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6145 return true;
6147 /* In a vector or complex, we would need to check the sign of all zeros. */
6148 if (TREE_CODE (addend) != REAL_CST)
6149 return false;
6151 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6152 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6153 negate = !negate;
6155 /* The mode has signed zeros, and we have to honor their sign.
6156 In this situation, there is only one case we can return true for.
6157 X - 0 is the same as X unless rounding towards -infinity is
6158 supported. */
6159 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6162 /* Subroutine of fold() that checks comparisons of built-in math
6163 functions against real constants.
6165 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6166 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6167 is the type of the result and ARG0 and ARG1 are the operands of the
6168 comparison. ARG1 must be a TREE_REAL_CST.
6170 The function returns the constant folded tree if a simplification
6171 can be made, and NULL_TREE otherwise. */
6173 static tree
6174 fold_mathfn_compare (location_t loc,
6175 enum built_in_function fcode, enum tree_code code,
6176 tree type, tree arg0, tree arg1)
6178 REAL_VALUE_TYPE c;
6180 if (BUILTIN_SQRT_P (fcode))
6182 tree arg = CALL_EXPR_ARG (arg0, 0);
6183 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6185 c = TREE_REAL_CST (arg1);
6186 if (REAL_VALUE_NEGATIVE (c))
6188 /* sqrt(x) < y is always false, if y is negative. */
6189 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6190 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6192 /* sqrt(x) > y is always true, if y is negative and we
6193 don't care about NaNs, i.e. negative values of x. */
6194 if (code == NE_EXPR || !HONOR_NANS (mode))
6195 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6197 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6198 return fold_build2_loc (loc, GE_EXPR, type, arg,
6199 build_real (TREE_TYPE (arg), dconst0));
6201 else if (code == GT_EXPR || code == GE_EXPR)
6203 REAL_VALUE_TYPE c2;
6205 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6206 real_convert (&c2, mode, &c2);
6208 if (REAL_VALUE_ISINF (c2))
6210 /* sqrt(x) > y is x == +Inf, when y is very large. */
6211 if (HONOR_INFINITIES (mode))
6212 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6213 build_real (TREE_TYPE (arg), c2));
6215 /* sqrt(x) > y is always false, when y is very large
6216 and we don't care about infinities. */
6217 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6220 /* sqrt(x) > c is the same as x > c*c. */
6221 return fold_build2_loc (loc, code, type, arg,
6222 build_real (TREE_TYPE (arg), c2));
6224 else if (code == LT_EXPR || code == LE_EXPR)
6226 REAL_VALUE_TYPE c2;
6228 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6229 real_convert (&c2, mode, &c2);
6231 if (REAL_VALUE_ISINF (c2))
6233 /* sqrt(x) < y is always true, when y is a very large
6234 value and we don't care about NaNs or Infinities. */
6235 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6236 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6238 /* sqrt(x) < y is x != +Inf when y is very large and we
6239 don't care about NaNs. */
6240 if (! HONOR_NANS (mode))
6241 return fold_build2_loc (loc, NE_EXPR, type, arg,
6242 build_real (TREE_TYPE (arg), c2));
6244 /* sqrt(x) < y is x >= 0 when y is very large and we
6245 don't care about Infinities. */
6246 if (! HONOR_INFINITIES (mode))
6247 return fold_build2_loc (loc, GE_EXPR, type, arg,
6248 build_real (TREE_TYPE (arg), dconst0));
6250 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6251 arg = save_expr (arg);
6252 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6253 fold_build2_loc (loc, GE_EXPR, type, arg,
6254 build_real (TREE_TYPE (arg),
6255 dconst0)),
6256 fold_build2_loc (loc, NE_EXPR, type, arg,
6257 build_real (TREE_TYPE (arg),
6258 c2)));
6261 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6262 if (! HONOR_NANS (mode))
6263 return fold_build2_loc (loc, code, type, arg,
6264 build_real (TREE_TYPE (arg), c2));
6266 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6267 arg = save_expr (arg);
6268 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6269 fold_build2_loc (loc, GE_EXPR, type, arg,
6270 build_real (TREE_TYPE (arg),
6271 dconst0)),
6272 fold_build2_loc (loc, code, type, arg,
6273 build_real (TREE_TYPE (arg),
6274 c2)));
6278 return NULL_TREE;
6281 /* Subroutine of fold() that optimizes comparisons against Infinities,
6282 either +Inf or -Inf.
6284 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6285 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6286 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6288 The function returns the constant folded tree if a simplification
6289 can be made, and NULL_TREE otherwise. */
6291 static tree
6292 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6293 tree arg0, tree arg1)
6295 machine_mode mode;
6296 REAL_VALUE_TYPE max;
6297 tree temp;
6298 bool neg;
6300 mode = TYPE_MODE (TREE_TYPE (arg0));
6302 /* For negative infinity swap the sense of the comparison. */
6303 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6304 if (neg)
6305 code = swap_tree_comparison (code);
6307 switch (code)
6309 case GT_EXPR:
6310 /* x > +Inf is always false, if with ignore sNANs. */
6311 if (HONOR_SNANS (mode))
6312 return NULL_TREE;
6313 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6315 case LE_EXPR:
6316 /* x <= +Inf is always true, if we don't case about NaNs. */
6317 if (! HONOR_NANS (mode))
6318 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6320 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6321 arg0 = save_expr (arg0);
6322 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6324 case EQ_EXPR:
6325 case GE_EXPR:
6326 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6327 real_maxval (&max, neg, mode);
6328 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6329 arg0, build_real (TREE_TYPE (arg0), max));
6331 case LT_EXPR:
6332 /* x < +Inf is always equal to x <= DBL_MAX. */
6333 real_maxval (&max, neg, mode);
6334 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6335 arg0, build_real (TREE_TYPE (arg0), max));
6337 case NE_EXPR:
6338 /* x != +Inf is always equal to !(x > DBL_MAX). */
6339 real_maxval (&max, neg, mode);
6340 if (! HONOR_NANS (mode))
6341 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6342 arg0, build_real (TREE_TYPE (arg0), max));
6344 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6345 arg0, build_real (TREE_TYPE (arg0), max));
6346 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6348 default:
6349 break;
6352 return NULL_TREE;
6355 /* Subroutine of fold() that optimizes comparisons of a division by
6356 a nonzero integer constant against an integer constant, i.e.
6357 X/C1 op C2.
6359 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6360 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6361 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6363 The function returns the constant folded tree if a simplification
6364 can be made, and NULL_TREE otherwise. */
6366 static tree
6367 fold_div_compare (location_t loc,
6368 enum tree_code code, tree type, tree arg0, tree arg1)
6370 tree prod, tmp, hi, lo;
6371 tree arg00 = TREE_OPERAND (arg0, 0);
6372 tree arg01 = TREE_OPERAND (arg0, 1);
6373 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6374 bool neg_overflow = false;
6375 bool overflow;
6377 /* We have to do this the hard way to detect unsigned overflow.
6378 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6379 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6380 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6381 neg_overflow = false;
6383 if (sign == UNSIGNED)
6385 tmp = int_const_binop (MINUS_EXPR, arg01,
6386 build_int_cst (TREE_TYPE (arg01), 1));
6387 lo = prod;
6389 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6390 val = wi::add (prod, tmp, sign, &overflow);
6391 hi = force_fit_type (TREE_TYPE (arg00), val,
6392 -1, overflow | TREE_OVERFLOW (prod));
6394 else if (tree_int_cst_sgn (arg01) >= 0)
6396 tmp = int_const_binop (MINUS_EXPR, arg01,
6397 build_int_cst (TREE_TYPE (arg01), 1));
6398 switch (tree_int_cst_sgn (arg1))
6400 case -1:
6401 neg_overflow = true;
6402 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6403 hi = prod;
6404 break;
6406 case 0:
6407 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6408 hi = tmp;
6409 break;
6411 case 1:
6412 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6413 lo = prod;
6414 break;
6416 default:
6417 gcc_unreachable ();
6420 else
6422 /* A negative divisor reverses the relational operators. */
6423 code = swap_tree_comparison (code);
6425 tmp = int_const_binop (PLUS_EXPR, arg01,
6426 build_int_cst (TREE_TYPE (arg01), 1));
6427 switch (tree_int_cst_sgn (arg1))
6429 case -1:
6430 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6431 lo = prod;
6432 break;
6434 case 0:
6435 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6436 lo = tmp;
6437 break;
6439 case 1:
6440 neg_overflow = true;
6441 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6442 hi = prod;
6443 break;
6445 default:
6446 gcc_unreachable ();
6450 switch (code)
6452 case EQ_EXPR:
6453 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6454 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6455 if (TREE_OVERFLOW (hi))
6456 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6457 if (TREE_OVERFLOW (lo))
6458 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6459 return build_range_check (loc, type, arg00, 1, lo, hi);
6461 case NE_EXPR:
6462 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6463 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6464 if (TREE_OVERFLOW (hi))
6465 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6466 if (TREE_OVERFLOW (lo))
6467 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6468 return build_range_check (loc, type, arg00, 0, lo, hi);
6470 case LT_EXPR:
6471 if (TREE_OVERFLOW (lo))
6473 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6474 return omit_one_operand_loc (loc, type, tmp, arg00);
6476 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6478 case LE_EXPR:
6479 if (TREE_OVERFLOW (hi))
6481 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6482 return omit_one_operand_loc (loc, type, tmp, arg00);
6484 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6486 case GT_EXPR:
6487 if (TREE_OVERFLOW (hi))
6489 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6490 return omit_one_operand_loc (loc, type, tmp, arg00);
6492 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6494 case GE_EXPR:
6495 if (TREE_OVERFLOW (lo))
6497 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6498 return omit_one_operand_loc (loc, type, tmp, arg00);
6500 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6502 default:
6503 break;
6506 return NULL_TREE;
6510 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6511 equality/inequality test, then return a simplified form of the test
6512 using a sign testing. Otherwise return NULL. TYPE is the desired
6513 result type. */
6515 static tree
6516 fold_single_bit_test_into_sign_test (location_t loc,
6517 enum tree_code code, tree arg0, tree arg1,
6518 tree result_type)
6520 /* If this is testing a single bit, we can optimize the test. */
6521 if ((code == NE_EXPR || code == EQ_EXPR)
6522 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6523 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6525 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6526 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6527 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6529 if (arg00 != NULL_TREE
6530 /* This is only a win if casting to a signed type is cheap,
6531 i.e. when arg00's type is not a partial mode. */
6532 && TYPE_PRECISION (TREE_TYPE (arg00))
6533 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6535 tree stype = signed_type_for (TREE_TYPE (arg00));
6536 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6537 result_type,
6538 fold_convert_loc (loc, stype, arg00),
6539 build_int_cst (stype, 0));
6543 return NULL_TREE;
6546 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6547 equality/inequality test, then return a simplified form of
6548 the test using shifts and logical operations. Otherwise return
6549 NULL. TYPE is the desired result type. */
6551 tree
6552 fold_single_bit_test (location_t loc, enum tree_code code,
6553 tree arg0, tree arg1, tree result_type)
6555 /* If this is testing a single bit, we can optimize the test. */
6556 if ((code == NE_EXPR || code == EQ_EXPR)
6557 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6558 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6560 tree inner = TREE_OPERAND (arg0, 0);
6561 tree type = TREE_TYPE (arg0);
6562 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6563 machine_mode operand_mode = TYPE_MODE (type);
6564 int ops_unsigned;
6565 tree signed_type, unsigned_type, intermediate_type;
6566 tree tem, one;
6568 /* First, see if we can fold the single bit test into a sign-bit
6569 test. */
6570 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6571 result_type);
6572 if (tem)
6573 return tem;
6575 /* Otherwise we have (A & C) != 0 where C is a single bit,
6576 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6577 Similarly for (A & C) == 0. */
6579 /* If INNER is a right shift of a constant and it plus BITNUM does
6580 not overflow, adjust BITNUM and INNER. */
6581 if (TREE_CODE (inner) == RSHIFT_EXPR
6582 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6583 && bitnum < TYPE_PRECISION (type)
6584 && wi::ltu_p (TREE_OPERAND (inner, 1),
6585 TYPE_PRECISION (type) - bitnum))
6587 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6588 inner = TREE_OPERAND (inner, 0);
6591 /* If we are going to be able to omit the AND below, we must do our
6592 operations as unsigned. If we must use the AND, we have a choice.
6593 Normally unsigned is faster, but for some machines signed is. */
6594 #ifdef LOAD_EXTEND_OP
6595 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6596 && !flag_syntax_only) ? 0 : 1;
6597 #else
6598 ops_unsigned = 1;
6599 #endif
6601 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6602 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6603 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6604 inner = fold_convert_loc (loc, intermediate_type, inner);
6606 if (bitnum != 0)
6607 inner = build2 (RSHIFT_EXPR, intermediate_type,
6608 inner, size_int (bitnum));
6610 one = build_int_cst (intermediate_type, 1);
6612 if (code == EQ_EXPR)
6613 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6615 /* Put the AND last so it can combine with more things. */
6616 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6618 /* Make sure to return the proper type. */
6619 inner = fold_convert_loc (loc, result_type, inner);
6621 return inner;
6623 return NULL_TREE;
6626 /* Check whether we are allowed to reorder operands arg0 and arg1,
6627 such that the evaluation of arg1 occurs before arg0. */
6629 static bool
6630 reorder_operands_p (const_tree arg0, const_tree arg1)
6632 if (! flag_evaluation_order)
6633 return true;
6634 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6635 return true;
6636 return ! TREE_SIDE_EFFECTS (arg0)
6637 && ! TREE_SIDE_EFFECTS (arg1);
6640 /* Test whether it is preferable two swap two operands, ARG0 and
6641 ARG1, for example because ARG0 is an integer constant and ARG1
6642 isn't. If REORDER is true, only recommend swapping if we can
6643 evaluate the operands in reverse order. */
6645 bool
6646 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6648 if (CONSTANT_CLASS_P (arg1))
6649 return 0;
6650 if (CONSTANT_CLASS_P (arg0))
6651 return 1;
6653 STRIP_SIGN_NOPS (arg0);
6654 STRIP_SIGN_NOPS (arg1);
6656 if (TREE_CONSTANT (arg1))
6657 return 0;
6658 if (TREE_CONSTANT (arg0))
6659 return 1;
6661 if (reorder && flag_evaluation_order
6662 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6663 return 0;
6665 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6666 for commutative and comparison operators. Ensuring a canonical
6667 form allows the optimizers to find additional redundancies without
6668 having to explicitly check for both orderings. */
6669 if (TREE_CODE (arg0) == SSA_NAME
6670 && TREE_CODE (arg1) == SSA_NAME
6671 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6672 return 1;
6674 /* Put SSA_NAMEs last. */
6675 if (TREE_CODE (arg1) == SSA_NAME)
6676 return 0;
6677 if (TREE_CODE (arg0) == SSA_NAME)
6678 return 1;
6680 /* Put variables last. */
6681 if (DECL_P (arg1))
6682 return 0;
6683 if (DECL_P (arg0))
6684 return 1;
6686 return 0;
6689 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6690 ARG0 is extended to a wider type. */
6692 static tree
6693 fold_widened_comparison (location_t loc, enum tree_code code,
6694 tree type, tree arg0, tree arg1)
6696 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6697 tree arg1_unw;
6698 tree shorter_type, outer_type;
6699 tree min, max;
6700 bool above, below;
6702 if (arg0_unw == arg0)
6703 return NULL_TREE;
6704 shorter_type = TREE_TYPE (arg0_unw);
6706 #ifdef HAVE_canonicalize_funcptr_for_compare
6707 /* Disable this optimization if we're casting a function pointer
6708 type on targets that require function pointer canonicalization. */
6709 if (HAVE_canonicalize_funcptr_for_compare
6710 && TREE_CODE (shorter_type) == POINTER_TYPE
6711 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6712 return NULL_TREE;
6713 #endif
6715 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6716 return NULL_TREE;
6718 arg1_unw = get_unwidened (arg1, NULL_TREE);
6720 /* If possible, express the comparison in the shorter mode. */
6721 if ((code == EQ_EXPR || code == NE_EXPR
6722 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6723 && (TREE_TYPE (arg1_unw) == shorter_type
6724 || ((TYPE_PRECISION (shorter_type)
6725 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6726 && (TYPE_UNSIGNED (shorter_type)
6727 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6728 || (TREE_CODE (arg1_unw) == INTEGER_CST
6729 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6730 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6731 && int_fits_type_p (arg1_unw, shorter_type))))
6732 return fold_build2_loc (loc, code, type, arg0_unw,
6733 fold_convert_loc (loc, shorter_type, arg1_unw));
6735 if (TREE_CODE (arg1_unw) != INTEGER_CST
6736 || TREE_CODE (shorter_type) != INTEGER_TYPE
6737 || !int_fits_type_p (arg1_unw, shorter_type))
6738 return NULL_TREE;
6740 /* If we are comparing with the integer that does not fit into the range
6741 of the shorter type, the result is known. */
6742 outer_type = TREE_TYPE (arg1_unw);
6743 min = lower_bound_in_type (outer_type, shorter_type);
6744 max = upper_bound_in_type (outer_type, shorter_type);
6746 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6747 max, arg1_unw));
6748 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6749 arg1_unw, min));
6751 switch (code)
6753 case EQ_EXPR:
6754 if (above || below)
6755 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6756 break;
6758 case NE_EXPR:
6759 if (above || below)
6760 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6761 break;
6763 case LT_EXPR:
6764 case LE_EXPR:
6765 if (above)
6766 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6767 else if (below)
6768 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6770 case GT_EXPR:
6771 case GE_EXPR:
6772 if (above)
6773 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6774 else if (below)
6775 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6777 default:
6778 break;
6781 return NULL_TREE;
6784 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6785 ARG0 just the signedness is changed. */
6787 static tree
6788 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6789 tree arg0, tree arg1)
6791 tree arg0_inner;
6792 tree inner_type, outer_type;
6794 if (!CONVERT_EXPR_P (arg0))
6795 return NULL_TREE;
6797 outer_type = TREE_TYPE (arg0);
6798 arg0_inner = TREE_OPERAND (arg0, 0);
6799 inner_type = TREE_TYPE (arg0_inner);
6801 #ifdef HAVE_canonicalize_funcptr_for_compare
6802 /* Disable this optimization if we're casting a function pointer
6803 type on targets that require function pointer canonicalization. */
6804 if (HAVE_canonicalize_funcptr_for_compare
6805 && TREE_CODE (inner_type) == POINTER_TYPE
6806 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6807 return NULL_TREE;
6808 #endif
6810 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6811 return NULL_TREE;
6813 if (TREE_CODE (arg1) != INTEGER_CST
6814 && !(CONVERT_EXPR_P (arg1)
6815 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6816 return NULL_TREE;
6818 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6819 && code != NE_EXPR
6820 && code != EQ_EXPR)
6821 return NULL_TREE;
6823 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6824 return NULL_TREE;
6826 if (TREE_CODE (arg1) == INTEGER_CST)
6827 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6828 TREE_OVERFLOW (arg1));
6829 else
6830 arg1 = fold_convert_loc (loc, inner_type, arg1);
6832 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6836 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6837 means A >= Y && A != MAX, but in this case we know that
6838 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6840 static tree
6841 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6843 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6845 if (TREE_CODE (bound) == LT_EXPR)
6846 a = TREE_OPERAND (bound, 0);
6847 else if (TREE_CODE (bound) == GT_EXPR)
6848 a = TREE_OPERAND (bound, 1);
6849 else
6850 return NULL_TREE;
6852 typea = TREE_TYPE (a);
6853 if (!INTEGRAL_TYPE_P (typea)
6854 && !POINTER_TYPE_P (typea))
6855 return NULL_TREE;
6857 if (TREE_CODE (ineq) == LT_EXPR)
6859 a1 = TREE_OPERAND (ineq, 1);
6860 y = TREE_OPERAND (ineq, 0);
6862 else if (TREE_CODE (ineq) == GT_EXPR)
6864 a1 = TREE_OPERAND (ineq, 0);
6865 y = TREE_OPERAND (ineq, 1);
6867 else
6868 return NULL_TREE;
6870 if (TREE_TYPE (a1) != typea)
6871 return NULL_TREE;
6873 if (POINTER_TYPE_P (typea))
6875 /* Convert the pointer types into integer before taking the difference. */
6876 tree ta = fold_convert_loc (loc, ssizetype, a);
6877 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6878 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6880 else
6881 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6883 if (!diff || !integer_onep (diff))
6884 return NULL_TREE;
6886 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6889 /* Fold a sum or difference of at least one multiplication.
6890 Returns the folded tree or NULL if no simplification could be made. */
6892 static tree
6893 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6894 tree arg0, tree arg1)
6896 tree arg00, arg01, arg10, arg11;
6897 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6899 /* (A * C) +- (B * C) -> (A+-B) * C.
6900 (A * C) +- A -> A * (C+-1).
6901 We are most concerned about the case where C is a constant,
6902 but other combinations show up during loop reduction. Since
6903 it is not difficult, try all four possibilities. */
6905 if (TREE_CODE (arg0) == MULT_EXPR)
6907 arg00 = TREE_OPERAND (arg0, 0);
6908 arg01 = TREE_OPERAND (arg0, 1);
6910 else if (TREE_CODE (arg0) == INTEGER_CST)
6912 arg00 = build_one_cst (type);
6913 arg01 = arg0;
6915 else
6917 /* We cannot generate constant 1 for fract. */
6918 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6919 return NULL_TREE;
6920 arg00 = arg0;
6921 arg01 = build_one_cst (type);
6923 if (TREE_CODE (arg1) == MULT_EXPR)
6925 arg10 = TREE_OPERAND (arg1, 0);
6926 arg11 = TREE_OPERAND (arg1, 1);
6928 else if (TREE_CODE (arg1) == INTEGER_CST)
6930 arg10 = build_one_cst (type);
6931 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6932 the purpose of this canonicalization. */
6933 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6934 && negate_expr_p (arg1)
6935 && code == PLUS_EXPR)
6937 arg11 = negate_expr (arg1);
6938 code = MINUS_EXPR;
6940 else
6941 arg11 = arg1;
6943 else
6945 /* We cannot generate constant 1 for fract. */
6946 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6947 return NULL_TREE;
6948 arg10 = arg1;
6949 arg11 = build_one_cst (type);
6951 same = NULL_TREE;
6953 if (operand_equal_p (arg01, arg11, 0))
6954 same = arg01, alt0 = arg00, alt1 = arg10;
6955 else if (operand_equal_p (arg00, arg10, 0))
6956 same = arg00, alt0 = arg01, alt1 = arg11;
6957 else if (operand_equal_p (arg00, arg11, 0))
6958 same = arg00, alt0 = arg01, alt1 = arg10;
6959 else if (operand_equal_p (arg01, arg10, 0))
6960 same = arg01, alt0 = arg00, alt1 = arg11;
6962 /* No identical multiplicands; see if we can find a common
6963 power-of-two factor in non-power-of-two multiplies. This
6964 can help in multi-dimensional array access. */
6965 else if (tree_fits_shwi_p (arg01)
6966 && tree_fits_shwi_p (arg11))
6968 HOST_WIDE_INT int01, int11, tmp;
6969 bool swap = false;
6970 tree maybe_same;
6971 int01 = tree_to_shwi (arg01);
6972 int11 = tree_to_shwi (arg11);
6974 /* Move min of absolute values to int11. */
6975 if (absu_hwi (int01) < absu_hwi (int11))
6977 tmp = int01, int01 = int11, int11 = tmp;
6978 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6979 maybe_same = arg01;
6980 swap = true;
6982 else
6983 maybe_same = arg11;
6985 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6986 /* The remainder should not be a constant, otherwise we
6987 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6988 increased the number of multiplications necessary. */
6989 && TREE_CODE (arg10) != INTEGER_CST)
6991 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6992 build_int_cst (TREE_TYPE (arg00),
6993 int01 / int11));
6994 alt1 = arg10;
6995 same = maybe_same;
6996 if (swap)
6997 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7001 if (same)
7002 return fold_build2_loc (loc, MULT_EXPR, type,
7003 fold_build2_loc (loc, code, type,
7004 fold_convert_loc (loc, type, alt0),
7005 fold_convert_loc (loc, type, alt1)),
7006 fold_convert_loc (loc, type, same));
7008 return NULL_TREE;
7011 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7012 specified by EXPR into the buffer PTR of length LEN bytes.
7013 Return the number of bytes placed in the buffer, or zero
7014 upon failure. */
7016 static int
7017 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7019 tree type = TREE_TYPE (expr);
7020 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7021 int byte, offset, word, words;
7022 unsigned char value;
7024 if ((off == -1 && total_bytes > len)
7025 || off >= total_bytes)
7026 return 0;
7027 if (off == -1)
7028 off = 0;
7029 words = total_bytes / UNITS_PER_WORD;
7031 for (byte = 0; byte < total_bytes; byte++)
7033 int bitpos = byte * BITS_PER_UNIT;
7034 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7035 number of bytes. */
7036 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7038 if (total_bytes > UNITS_PER_WORD)
7040 word = byte / UNITS_PER_WORD;
7041 if (WORDS_BIG_ENDIAN)
7042 word = (words - 1) - word;
7043 offset = word * UNITS_PER_WORD;
7044 if (BYTES_BIG_ENDIAN)
7045 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7046 else
7047 offset += byte % UNITS_PER_WORD;
7049 else
7050 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7051 if (offset >= off
7052 && offset - off < len)
7053 ptr[offset - off] = value;
7055 return MIN (len, total_bytes - off);
7059 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7060 specified by EXPR into the buffer PTR of length LEN bytes.
7061 Return the number of bytes placed in the buffer, or zero
7062 upon failure. */
7064 static int
7065 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7067 tree type = TREE_TYPE (expr);
7068 machine_mode mode = TYPE_MODE (type);
7069 int total_bytes = GET_MODE_SIZE (mode);
7070 FIXED_VALUE_TYPE value;
7071 tree i_value, i_type;
7073 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7074 return 0;
7076 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7078 if (NULL_TREE == i_type
7079 || TYPE_PRECISION (i_type) != total_bytes)
7080 return 0;
7082 value = TREE_FIXED_CST (expr);
7083 i_value = double_int_to_tree (i_type, value.data);
7085 return native_encode_int (i_value, ptr, len, off);
7089 /* Subroutine of native_encode_expr. Encode the REAL_CST
7090 specified by EXPR into the buffer PTR of length LEN bytes.
7091 Return the number of bytes placed in the buffer, or zero
7092 upon failure. */
7094 static int
7095 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7097 tree type = TREE_TYPE (expr);
7098 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7099 int byte, offset, word, words, bitpos;
7100 unsigned char value;
7102 /* There are always 32 bits in each long, no matter the size of
7103 the hosts long. We handle floating point representations with
7104 up to 192 bits. */
7105 long tmp[6];
7107 if ((off == -1 && total_bytes > len)
7108 || off >= total_bytes)
7109 return 0;
7110 if (off == -1)
7111 off = 0;
7112 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7114 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7116 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7117 bitpos += BITS_PER_UNIT)
7119 byte = (bitpos / BITS_PER_UNIT) & 3;
7120 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7122 if (UNITS_PER_WORD < 4)
7124 word = byte / UNITS_PER_WORD;
7125 if (WORDS_BIG_ENDIAN)
7126 word = (words - 1) - word;
7127 offset = word * UNITS_PER_WORD;
7128 if (BYTES_BIG_ENDIAN)
7129 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7130 else
7131 offset += byte % UNITS_PER_WORD;
7133 else
7134 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7135 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7136 if (offset >= off
7137 && offset - off < len)
7138 ptr[offset - off] = value;
7140 return MIN (len, total_bytes - off);
7143 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7144 specified by EXPR into the buffer PTR of length LEN bytes.
7145 Return the number of bytes placed in the buffer, or zero
7146 upon failure. */
7148 static int
7149 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7151 int rsize, isize;
7152 tree part;
7154 part = TREE_REALPART (expr);
7155 rsize = native_encode_expr (part, ptr, len, off);
7156 if (off == -1
7157 && rsize == 0)
7158 return 0;
7159 part = TREE_IMAGPART (expr);
7160 if (off != -1)
7161 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7162 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7163 if (off == -1
7164 && isize != rsize)
7165 return 0;
7166 return rsize + isize;
7170 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7171 specified by EXPR into the buffer PTR of length LEN bytes.
7172 Return the number of bytes placed in the buffer, or zero
7173 upon failure. */
7175 static int
7176 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7178 unsigned i, count;
7179 int size, offset;
7180 tree itype, elem;
7182 offset = 0;
7183 count = VECTOR_CST_NELTS (expr);
7184 itype = TREE_TYPE (TREE_TYPE (expr));
7185 size = GET_MODE_SIZE (TYPE_MODE (itype));
7186 for (i = 0; i < count; i++)
7188 if (off >= size)
7190 off -= size;
7191 continue;
7193 elem = VECTOR_CST_ELT (expr, i);
7194 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7195 if ((off == -1 && res != size)
7196 || res == 0)
7197 return 0;
7198 offset += res;
7199 if (offset >= len)
7200 return offset;
7201 if (off != -1)
7202 off = 0;
7204 return offset;
7208 /* Subroutine of native_encode_expr. Encode the STRING_CST
7209 specified by EXPR into the buffer PTR of length LEN bytes.
7210 Return the number of bytes placed in the buffer, or zero
7211 upon failure. */
7213 static int
7214 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7216 tree type = TREE_TYPE (expr);
7217 HOST_WIDE_INT total_bytes;
7219 if (TREE_CODE (type) != ARRAY_TYPE
7220 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7221 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7222 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7223 return 0;
7224 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7225 if ((off == -1 && total_bytes > len)
7226 || off >= total_bytes)
7227 return 0;
7228 if (off == -1)
7229 off = 0;
7230 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7232 int written = 0;
7233 if (off < TREE_STRING_LENGTH (expr))
7235 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7236 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7238 memset (ptr + written, 0,
7239 MIN (total_bytes - written, len - written));
7241 else
7242 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7243 return MIN (total_bytes - off, len);
7247 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7248 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7249 buffer PTR of length LEN bytes. If OFF is not -1 then start
7250 the encoding at byte offset OFF and encode at most LEN bytes.
7251 Return the number of bytes placed in the buffer, or zero upon failure. */
7254 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7256 switch (TREE_CODE (expr))
7258 case INTEGER_CST:
7259 return native_encode_int (expr, ptr, len, off);
7261 case REAL_CST:
7262 return native_encode_real (expr, ptr, len, off);
7264 case FIXED_CST:
7265 return native_encode_fixed (expr, ptr, len, off);
7267 case COMPLEX_CST:
7268 return native_encode_complex (expr, ptr, len, off);
7270 case VECTOR_CST:
7271 return native_encode_vector (expr, ptr, len, off);
7273 case STRING_CST:
7274 return native_encode_string (expr, ptr, len, off);
7276 default:
7277 return 0;
7282 /* Subroutine of native_interpret_expr. Interpret the contents of
7283 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7284 If the buffer cannot be interpreted, return NULL_TREE. */
7286 static tree
7287 native_interpret_int (tree type, const unsigned char *ptr, int len)
7289 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7291 if (total_bytes > len
7292 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7293 return NULL_TREE;
7295 wide_int result = wi::from_buffer (ptr, total_bytes);
7297 return wide_int_to_tree (type, result);
7301 /* Subroutine of native_interpret_expr. Interpret the contents of
7302 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7303 If the buffer cannot be interpreted, return NULL_TREE. */
7305 static tree
7306 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7308 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7309 double_int result;
7310 FIXED_VALUE_TYPE fixed_value;
7312 if (total_bytes > len
7313 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7314 return NULL_TREE;
7316 result = double_int::from_buffer (ptr, total_bytes);
7317 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7319 return build_fixed (type, fixed_value);
7323 /* Subroutine of native_interpret_expr. Interpret the contents of
7324 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7325 If the buffer cannot be interpreted, return NULL_TREE. */
7327 static tree
7328 native_interpret_real (tree type, const unsigned char *ptr, int len)
7330 machine_mode mode = TYPE_MODE (type);
7331 int total_bytes = GET_MODE_SIZE (mode);
7332 int byte, offset, word, words, bitpos;
7333 unsigned char value;
7334 /* There are always 32 bits in each long, no matter the size of
7335 the hosts long. We handle floating point representations with
7336 up to 192 bits. */
7337 REAL_VALUE_TYPE r;
7338 long tmp[6];
7340 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7341 if (total_bytes > len || total_bytes > 24)
7342 return NULL_TREE;
7343 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7345 memset (tmp, 0, sizeof (tmp));
7346 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7347 bitpos += BITS_PER_UNIT)
7349 byte = (bitpos / BITS_PER_UNIT) & 3;
7350 if (UNITS_PER_WORD < 4)
7352 word = byte / UNITS_PER_WORD;
7353 if (WORDS_BIG_ENDIAN)
7354 word = (words - 1) - word;
7355 offset = word * UNITS_PER_WORD;
7356 if (BYTES_BIG_ENDIAN)
7357 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7358 else
7359 offset += byte % UNITS_PER_WORD;
7361 else
7362 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7363 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7365 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7368 real_from_target (&r, tmp, mode);
7369 return build_real (type, r);
7373 /* Subroutine of native_interpret_expr. Interpret the contents of
7374 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7375 If the buffer cannot be interpreted, return NULL_TREE. */
7377 static tree
7378 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7380 tree etype, rpart, ipart;
7381 int size;
7383 etype = TREE_TYPE (type);
7384 size = GET_MODE_SIZE (TYPE_MODE (etype));
7385 if (size * 2 > len)
7386 return NULL_TREE;
7387 rpart = native_interpret_expr (etype, ptr, size);
7388 if (!rpart)
7389 return NULL_TREE;
7390 ipart = native_interpret_expr (etype, ptr+size, size);
7391 if (!ipart)
7392 return NULL_TREE;
7393 return build_complex (type, rpart, ipart);
7397 /* Subroutine of native_interpret_expr. Interpret the contents of
7398 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7399 If the buffer cannot be interpreted, return NULL_TREE. */
7401 static tree
7402 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7404 tree etype, elem;
7405 int i, size, count;
7406 tree *elements;
7408 etype = TREE_TYPE (type);
7409 size = GET_MODE_SIZE (TYPE_MODE (etype));
7410 count = TYPE_VECTOR_SUBPARTS (type);
7411 if (size * count > len)
7412 return NULL_TREE;
7414 elements = XALLOCAVEC (tree, count);
7415 for (i = count - 1; i >= 0; i--)
7417 elem = native_interpret_expr (etype, ptr+(i*size), size);
7418 if (!elem)
7419 return NULL_TREE;
7420 elements[i] = elem;
7422 return build_vector (type, elements);
7426 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7427 the buffer PTR of length LEN as a constant of type TYPE. For
7428 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7429 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7430 return NULL_TREE. */
7432 tree
7433 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7435 switch (TREE_CODE (type))
7437 case INTEGER_TYPE:
7438 case ENUMERAL_TYPE:
7439 case BOOLEAN_TYPE:
7440 case POINTER_TYPE:
7441 case REFERENCE_TYPE:
7442 return native_interpret_int (type, ptr, len);
7444 case REAL_TYPE:
7445 return native_interpret_real (type, ptr, len);
7447 case FIXED_POINT_TYPE:
7448 return native_interpret_fixed (type, ptr, len);
7450 case COMPLEX_TYPE:
7451 return native_interpret_complex (type, ptr, len);
7453 case VECTOR_TYPE:
7454 return native_interpret_vector (type, ptr, len);
7456 default:
7457 return NULL_TREE;
7461 /* Returns true if we can interpret the contents of a native encoding
7462 as TYPE. */
7464 static bool
7465 can_native_interpret_type_p (tree type)
7467 switch (TREE_CODE (type))
7469 case INTEGER_TYPE:
7470 case ENUMERAL_TYPE:
7471 case BOOLEAN_TYPE:
7472 case POINTER_TYPE:
7473 case REFERENCE_TYPE:
7474 case FIXED_POINT_TYPE:
7475 case REAL_TYPE:
7476 case COMPLEX_TYPE:
7477 case VECTOR_TYPE:
7478 return true;
7479 default:
7480 return false;
7484 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7485 TYPE at compile-time. If we're unable to perform the conversion
7486 return NULL_TREE. */
7488 static tree
7489 fold_view_convert_expr (tree type, tree expr)
7491 /* We support up to 512-bit values (for V8DFmode). */
7492 unsigned char buffer[64];
7493 int len;
7495 /* Check that the host and target are sane. */
7496 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7497 return NULL_TREE;
7499 len = native_encode_expr (expr, buffer, sizeof (buffer));
7500 if (len == 0)
7501 return NULL_TREE;
7503 return native_interpret_expr (type, buffer, len);
7506 /* Build an expression for the address of T. Folds away INDIRECT_REF
7507 to avoid confusing the gimplify process. */
7509 tree
7510 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7512 /* The size of the object is not relevant when talking about its address. */
7513 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7514 t = TREE_OPERAND (t, 0);
7516 if (TREE_CODE (t) == INDIRECT_REF)
7518 t = TREE_OPERAND (t, 0);
7520 if (TREE_TYPE (t) != ptrtype)
7521 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7523 else if (TREE_CODE (t) == MEM_REF
7524 && integer_zerop (TREE_OPERAND (t, 1)))
7525 return TREE_OPERAND (t, 0);
7526 else if (TREE_CODE (t) == MEM_REF
7527 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7528 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7529 TREE_OPERAND (t, 0),
7530 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7531 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7533 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7535 if (TREE_TYPE (t) != ptrtype)
7536 t = fold_convert_loc (loc, ptrtype, t);
7538 else
7539 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7541 return t;
7544 /* Build an expression for the address of T. */
7546 tree
7547 build_fold_addr_expr_loc (location_t loc, tree t)
7549 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7551 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7554 static bool vec_cst_ctor_to_array (tree, tree *);
7556 /* Fold a unary expression of code CODE and type TYPE with operand
7557 OP0. Return the folded expression if folding is successful.
7558 Otherwise, return NULL_TREE. */
7560 tree
7561 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7563 tree tem;
7564 tree arg0;
7565 enum tree_code_class kind = TREE_CODE_CLASS (code);
7567 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7568 && TREE_CODE_LENGTH (code) == 1);
7570 tem = generic_simplify (loc, code, type, op0);
7571 if (tem)
7572 return tem;
7574 arg0 = op0;
7575 if (arg0)
7577 if (CONVERT_EXPR_CODE_P (code)
7578 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7580 /* Don't use STRIP_NOPS, because signedness of argument type
7581 matters. */
7582 STRIP_SIGN_NOPS (arg0);
7584 else
7586 /* Strip any conversions that don't change the mode. This
7587 is safe for every expression, except for a comparison
7588 expression because its signedness is derived from its
7589 operands.
7591 Note that this is done as an internal manipulation within
7592 the constant folder, in order to find the simplest
7593 representation of the arguments so that their form can be
7594 studied. In any cases, the appropriate type conversions
7595 should be put back in the tree that will get out of the
7596 constant folder. */
7597 STRIP_NOPS (arg0);
7601 if (TREE_CODE_CLASS (code) == tcc_unary)
7603 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7604 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7605 fold_build1_loc (loc, code, type,
7606 fold_convert_loc (loc, TREE_TYPE (op0),
7607 TREE_OPERAND (arg0, 1))));
7608 else if (TREE_CODE (arg0) == COND_EXPR)
7610 tree arg01 = TREE_OPERAND (arg0, 1);
7611 tree arg02 = TREE_OPERAND (arg0, 2);
7612 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7613 arg01 = fold_build1_loc (loc, code, type,
7614 fold_convert_loc (loc,
7615 TREE_TYPE (op0), arg01));
7616 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7617 arg02 = fold_build1_loc (loc, code, type,
7618 fold_convert_loc (loc,
7619 TREE_TYPE (op0), arg02));
7620 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7621 arg01, arg02);
7623 /* If this was a conversion, and all we did was to move into
7624 inside the COND_EXPR, bring it back out. But leave it if
7625 it is a conversion from integer to integer and the
7626 result precision is no wider than a word since such a
7627 conversion is cheap and may be optimized away by combine,
7628 while it couldn't if it were outside the COND_EXPR. Then return
7629 so we don't get into an infinite recursion loop taking the
7630 conversion out and then back in. */
7632 if ((CONVERT_EXPR_CODE_P (code)
7633 || code == NON_LVALUE_EXPR)
7634 && TREE_CODE (tem) == COND_EXPR
7635 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7636 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7637 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7638 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7639 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7640 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7641 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7642 && (INTEGRAL_TYPE_P
7643 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7644 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7645 || flag_syntax_only))
7646 tem = build1_loc (loc, code, type,
7647 build3 (COND_EXPR,
7648 TREE_TYPE (TREE_OPERAND
7649 (TREE_OPERAND (tem, 1), 0)),
7650 TREE_OPERAND (tem, 0),
7651 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7652 TREE_OPERAND (TREE_OPERAND (tem, 2),
7653 0)));
7654 return tem;
7658 switch (code)
7660 case NON_LVALUE_EXPR:
7661 if (!maybe_lvalue_p (op0))
7662 return fold_convert_loc (loc, type, op0);
7663 return NULL_TREE;
7665 CASE_CONVERT:
7666 case FLOAT_EXPR:
7667 case FIX_TRUNC_EXPR:
7668 if (COMPARISON_CLASS_P (op0))
7670 /* If we have (type) (a CMP b) and type is an integral type, return
7671 new expression involving the new type. Canonicalize
7672 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7673 non-integral type.
7674 Do not fold the result as that would not simplify further, also
7675 folding again results in recursions. */
7676 if (TREE_CODE (type) == BOOLEAN_TYPE)
7677 return build2_loc (loc, TREE_CODE (op0), type,
7678 TREE_OPERAND (op0, 0),
7679 TREE_OPERAND (op0, 1));
7680 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7681 && TREE_CODE (type) != VECTOR_TYPE)
7682 return build3_loc (loc, COND_EXPR, type, op0,
7683 constant_boolean_node (true, type),
7684 constant_boolean_node (false, type));
7687 /* Handle (T *)&A.B.C for A being of type T and B and C
7688 living at offset zero. This occurs frequently in
7689 C++ upcasting and then accessing the base. */
7690 if (TREE_CODE (op0) == ADDR_EXPR
7691 && POINTER_TYPE_P (type)
7692 && handled_component_p (TREE_OPERAND (op0, 0)))
7694 HOST_WIDE_INT bitsize, bitpos;
7695 tree offset;
7696 machine_mode mode;
7697 int unsignedp, volatilep;
7698 tree base = TREE_OPERAND (op0, 0);
7699 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7700 &mode, &unsignedp, &volatilep, false);
7701 /* If the reference was to a (constant) zero offset, we can use
7702 the address of the base if it has the same base type
7703 as the result type and the pointer type is unqualified. */
7704 if (! offset && bitpos == 0
7705 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7706 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7707 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7708 return fold_convert_loc (loc, type,
7709 build_fold_addr_expr_loc (loc, base));
7712 if (TREE_CODE (op0) == MODIFY_EXPR
7713 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7714 /* Detect assigning a bitfield. */
7715 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7716 && DECL_BIT_FIELD
7717 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7719 /* Don't leave an assignment inside a conversion
7720 unless assigning a bitfield. */
7721 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7722 /* First do the assignment, then return converted constant. */
7723 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7724 TREE_NO_WARNING (tem) = 1;
7725 TREE_USED (tem) = 1;
7726 return tem;
7729 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7730 constants (if x has signed type, the sign bit cannot be set
7731 in c). This folds extension into the BIT_AND_EXPR.
7732 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7733 very likely don't have maximal range for their precision and this
7734 transformation effectively doesn't preserve non-maximal ranges. */
7735 if (TREE_CODE (type) == INTEGER_TYPE
7736 && TREE_CODE (op0) == BIT_AND_EXPR
7737 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7739 tree and_expr = op0;
7740 tree and0 = TREE_OPERAND (and_expr, 0);
7741 tree and1 = TREE_OPERAND (and_expr, 1);
7742 int change = 0;
7744 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7745 || (TYPE_PRECISION (type)
7746 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7747 change = 1;
7748 else if (TYPE_PRECISION (TREE_TYPE (and1))
7749 <= HOST_BITS_PER_WIDE_INT
7750 && tree_fits_uhwi_p (and1))
7752 unsigned HOST_WIDE_INT cst;
7754 cst = tree_to_uhwi (and1);
7755 cst &= HOST_WIDE_INT_M1U
7756 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7757 change = (cst == 0);
7758 #ifdef LOAD_EXTEND_OP
7759 if (change
7760 && !flag_syntax_only
7761 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7762 == ZERO_EXTEND))
7764 tree uns = unsigned_type_for (TREE_TYPE (and0));
7765 and0 = fold_convert_loc (loc, uns, and0);
7766 and1 = fold_convert_loc (loc, uns, and1);
7768 #endif
7770 if (change)
7772 tem = force_fit_type (type, wi::to_widest (and1), 0,
7773 TREE_OVERFLOW (and1));
7774 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7775 fold_convert_loc (loc, type, and0), tem);
7779 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7780 when one of the new casts will fold away. Conservatively we assume
7781 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7782 if (POINTER_TYPE_P (type)
7783 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7784 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7785 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7786 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7787 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7789 tree arg00 = TREE_OPERAND (arg0, 0);
7790 tree arg01 = TREE_OPERAND (arg0, 1);
7792 return fold_build_pointer_plus_loc
7793 (loc, fold_convert_loc (loc, type, arg00), arg01);
7796 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7797 of the same precision, and X is an integer type not narrower than
7798 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7799 if (INTEGRAL_TYPE_P (type)
7800 && TREE_CODE (op0) == BIT_NOT_EXPR
7801 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7802 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7803 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7805 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7806 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7807 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7808 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7809 fold_convert_loc (loc, type, tem));
7812 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7813 type of X and Y (integer types only). */
7814 if (INTEGRAL_TYPE_P (type)
7815 && TREE_CODE (op0) == MULT_EXPR
7816 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7817 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7819 /* Be careful not to introduce new overflows. */
7820 tree mult_type;
7821 if (TYPE_OVERFLOW_WRAPS (type))
7822 mult_type = type;
7823 else
7824 mult_type = unsigned_type_for (type);
7826 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7828 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7829 fold_convert_loc (loc, mult_type,
7830 TREE_OPERAND (op0, 0)),
7831 fold_convert_loc (loc, mult_type,
7832 TREE_OPERAND (op0, 1)));
7833 return fold_convert_loc (loc, type, tem);
7837 tem = fold_convert_const (code, type, arg0);
7838 return tem ? tem : NULL_TREE;
7840 case ADDR_SPACE_CONVERT_EXPR:
7841 if (integer_zerop (arg0))
7842 return fold_convert_const (code, type, arg0);
7843 return NULL_TREE;
7845 case FIXED_CONVERT_EXPR:
7846 tem = fold_convert_const (code, type, arg0);
7847 return tem ? tem : NULL_TREE;
7849 case VIEW_CONVERT_EXPR:
7850 if (TREE_CODE (op0) == MEM_REF)
7851 return fold_build2_loc (loc, MEM_REF, type,
7852 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7854 return fold_view_convert_expr (type, op0);
7856 case NEGATE_EXPR:
7857 tem = fold_negate_expr (loc, arg0);
7858 if (tem)
7859 return fold_convert_loc (loc, type, tem);
7860 return NULL_TREE;
7862 case ABS_EXPR:
7863 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7864 return fold_abs_const (arg0, type);
7865 /* Convert fabs((double)float) into (double)fabsf(float). */
7866 else if (TREE_CODE (arg0) == NOP_EXPR
7867 && TREE_CODE (type) == REAL_TYPE)
7869 tree targ0 = strip_float_extensions (arg0);
7870 if (targ0 != arg0)
7871 return fold_convert_loc (loc, type,
7872 fold_build1_loc (loc, ABS_EXPR,
7873 TREE_TYPE (targ0),
7874 targ0));
7876 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7877 else if (TREE_CODE (arg0) == ABS_EXPR)
7878 return arg0;
7880 /* Strip sign ops from argument. */
7881 if (TREE_CODE (type) == REAL_TYPE)
7883 tem = fold_strip_sign_ops (arg0);
7884 if (tem)
7885 return fold_build1_loc (loc, ABS_EXPR, type,
7886 fold_convert_loc (loc, type, tem));
7888 return NULL_TREE;
7890 case CONJ_EXPR:
7891 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7892 return fold_convert_loc (loc, type, arg0);
7893 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7895 tree itype = TREE_TYPE (type);
7896 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
7897 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
7898 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
7899 negate_expr (ipart));
7901 if (TREE_CODE (arg0) == COMPLEX_CST)
7903 tree itype = TREE_TYPE (type);
7904 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
7905 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
7906 return build_complex (type, rpart, negate_expr (ipart));
7908 if (TREE_CODE (arg0) == CONJ_EXPR)
7909 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
7910 return NULL_TREE;
7912 case BIT_NOT_EXPR:
7913 if (TREE_CODE (arg0) == INTEGER_CST)
7914 return fold_not_const (arg0, type);
7915 /* Convert ~ (-A) to A - 1. */
7916 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7917 return fold_build2_loc (loc, MINUS_EXPR, type,
7918 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
7919 build_int_cst (type, 1));
7920 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7921 else if (INTEGRAL_TYPE_P (type)
7922 && ((TREE_CODE (arg0) == MINUS_EXPR
7923 && integer_onep (TREE_OPERAND (arg0, 1)))
7924 || (TREE_CODE (arg0) == PLUS_EXPR
7925 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7926 return fold_build1_loc (loc, NEGATE_EXPR, type,
7927 fold_convert_loc (loc, type,
7928 TREE_OPERAND (arg0, 0)));
7929 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7930 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7931 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7932 fold_convert_loc (loc, type,
7933 TREE_OPERAND (arg0, 0)))))
7934 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7935 fold_convert_loc (loc, type,
7936 TREE_OPERAND (arg0, 1)));
7937 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7938 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7939 fold_convert_loc (loc, type,
7940 TREE_OPERAND (arg0, 1)))))
7941 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7942 fold_convert_loc (loc, type,
7943 TREE_OPERAND (arg0, 0)), tem);
7944 /* Perform BIT_NOT_EXPR on each element individually. */
7945 else if (TREE_CODE (arg0) == VECTOR_CST)
7947 tree *elements;
7948 tree elem;
7949 unsigned count = VECTOR_CST_NELTS (arg0), i;
7951 elements = XALLOCAVEC (tree, count);
7952 for (i = 0; i < count; i++)
7954 elem = VECTOR_CST_ELT (arg0, i);
7955 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
7956 if (elem == NULL_TREE)
7957 break;
7958 elements[i] = elem;
7960 if (i == count)
7961 return build_vector (type, elements);
7963 else if (COMPARISON_CLASS_P (arg0)
7964 && (VECTOR_TYPE_P (type)
7965 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
7967 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
7968 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
7969 HONOR_NANS (TYPE_MODE (op_type)));
7970 if (subcode != ERROR_MARK)
7971 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
7972 TREE_OPERAND (arg0, 1));
7976 return NULL_TREE;
7978 case TRUTH_NOT_EXPR:
7979 /* Note that the operand of this must be an int
7980 and its values must be 0 or 1.
7981 ("true" is a fixed value perhaps depending on the language,
7982 but we don't handle values other than 1 correctly yet.) */
7983 tem = fold_truth_not_expr (loc, arg0);
7984 if (!tem)
7985 return NULL_TREE;
7986 return fold_convert_loc (loc, type, tem);
7988 case REALPART_EXPR:
7989 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7990 return fold_convert_loc (loc, type, arg0);
7991 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7992 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
7993 TREE_OPERAND (arg0, 1));
7994 if (TREE_CODE (arg0) == COMPLEX_CST)
7995 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
7996 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7998 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7999 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8000 fold_build1_loc (loc, REALPART_EXPR, itype,
8001 TREE_OPERAND (arg0, 0)),
8002 fold_build1_loc (loc, REALPART_EXPR, itype,
8003 TREE_OPERAND (arg0, 1)));
8004 return fold_convert_loc (loc, type, tem);
8006 if (TREE_CODE (arg0) == CONJ_EXPR)
8008 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8009 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8010 TREE_OPERAND (arg0, 0));
8011 return fold_convert_loc (loc, type, tem);
8013 if (TREE_CODE (arg0) == CALL_EXPR)
8015 tree fn = get_callee_fndecl (arg0);
8016 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8017 switch (DECL_FUNCTION_CODE (fn))
8019 CASE_FLT_FN (BUILT_IN_CEXPI):
8020 fn = mathfn_built_in (type, BUILT_IN_COS);
8021 if (fn)
8022 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8023 break;
8025 default:
8026 break;
8029 return NULL_TREE;
8031 case IMAGPART_EXPR:
8032 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8033 return build_zero_cst (type);
8034 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8035 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8036 TREE_OPERAND (arg0, 0));
8037 if (TREE_CODE (arg0) == COMPLEX_CST)
8038 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8039 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8041 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8042 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8043 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8044 TREE_OPERAND (arg0, 0)),
8045 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8046 TREE_OPERAND (arg0, 1)));
8047 return fold_convert_loc (loc, type, tem);
8049 if (TREE_CODE (arg0) == CONJ_EXPR)
8051 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8052 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8053 return fold_convert_loc (loc, type, negate_expr (tem));
8055 if (TREE_CODE (arg0) == CALL_EXPR)
8057 tree fn = get_callee_fndecl (arg0);
8058 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8059 switch (DECL_FUNCTION_CODE (fn))
8061 CASE_FLT_FN (BUILT_IN_CEXPI):
8062 fn = mathfn_built_in (type, BUILT_IN_SIN);
8063 if (fn)
8064 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8065 break;
8067 default:
8068 break;
8071 return NULL_TREE;
8073 case INDIRECT_REF:
8074 /* Fold *&X to X if X is an lvalue. */
8075 if (TREE_CODE (op0) == ADDR_EXPR)
8077 tree op00 = TREE_OPERAND (op0, 0);
8078 if ((TREE_CODE (op00) == VAR_DECL
8079 || TREE_CODE (op00) == PARM_DECL
8080 || TREE_CODE (op00) == RESULT_DECL)
8081 && !TREE_READONLY (op00))
8082 return op00;
8084 return NULL_TREE;
8086 case VEC_UNPACK_LO_EXPR:
8087 case VEC_UNPACK_HI_EXPR:
8088 case VEC_UNPACK_FLOAT_LO_EXPR:
8089 case VEC_UNPACK_FLOAT_HI_EXPR:
8091 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8092 tree *elts;
8093 enum tree_code subcode;
8095 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8096 if (TREE_CODE (arg0) != VECTOR_CST)
8097 return NULL_TREE;
8099 elts = XALLOCAVEC (tree, nelts * 2);
8100 if (!vec_cst_ctor_to_array (arg0, elts))
8101 return NULL_TREE;
8103 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8104 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8105 elts += nelts;
8107 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8108 subcode = NOP_EXPR;
8109 else
8110 subcode = FLOAT_EXPR;
8112 for (i = 0; i < nelts; i++)
8114 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8115 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8116 return NULL_TREE;
8119 return build_vector (type, elts);
8122 case REDUC_MIN_EXPR:
8123 case REDUC_MAX_EXPR:
8124 case REDUC_PLUS_EXPR:
8126 unsigned int nelts, i;
8127 tree *elts;
8128 enum tree_code subcode;
8130 if (TREE_CODE (op0) != VECTOR_CST)
8131 return NULL_TREE;
8132 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (op0));
8134 elts = XALLOCAVEC (tree, nelts);
8135 if (!vec_cst_ctor_to_array (op0, elts))
8136 return NULL_TREE;
8138 switch (code)
8140 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8141 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8142 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8143 default: gcc_unreachable ();
8146 for (i = 1; i < nelts; i++)
8148 elts[0] = const_binop (subcode, elts[0], elts[i]);
8149 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8150 return NULL_TREE;
8153 return elts[0];
8156 default:
8157 return NULL_TREE;
8158 } /* switch (code) */
8162 /* If the operation was a conversion do _not_ mark a resulting constant
8163 with TREE_OVERFLOW if the original constant was not. These conversions
8164 have implementation defined behavior and retaining the TREE_OVERFLOW
8165 flag here would confuse later passes such as VRP. */
8166 tree
8167 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8168 tree type, tree op0)
8170 tree res = fold_unary_loc (loc, code, type, op0);
8171 if (res
8172 && TREE_CODE (res) == INTEGER_CST
8173 && TREE_CODE (op0) == INTEGER_CST
8174 && CONVERT_EXPR_CODE_P (code))
8175 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8177 return res;
8180 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8181 operands OP0 and OP1. LOC is the location of the resulting expression.
8182 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8183 Return the folded expression if folding is successful. Otherwise,
8184 return NULL_TREE. */
8185 static tree
8186 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8187 tree arg0, tree arg1, tree op0, tree op1)
8189 tree tem;
8191 /* We only do these simplifications if we are optimizing. */
8192 if (!optimize)
8193 return NULL_TREE;
8195 /* Check for things like (A || B) && (A || C). We can convert this
8196 to A || (B && C). Note that either operator can be any of the four
8197 truth and/or operations and the transformation will still be
8198 valid. Also note that we only care about order for the
8199 ANDIF and ORIF operators. If B contains side effects, this
8200 might change the truth-value of A. */
8201 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8202 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8203 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8204 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8205 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8206 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8208 tree a00 = TREE_OPERAND (arg0, 0);
8209 tree a01 = TREE_OPERAND (arg0, 1);
8210 tree a10 = TREE_OPERAND (arg1, 0);
8211 tree a11 = TREE_OPERAND (arg1, 1);
8212 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8213 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8214 && (code == TRUTH_AND_EXPR
8215 || code == TRUTH_OR_EXPR));
8217 if (operand_equal_p (a00, a10, 0))
8218 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8219 fold_build2_loc (loc, code, type, a01, a11));
8220 else if (commutative && operand_equal_p (a00, a11, 0))
8221 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8222 fold_build2_loc (loc, code, type, a01, a10));
8223 else if (commutative && operand_equal_p (a01, a10, 0))
8224 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8225 fold_build2_loc (loc, code, type, a00, a11));
8227 /* This case if tricky because we must either have commutative
8228 operators or else A10 must not have side-effects. */
8230 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8231 && operand_equal_p (a01, a11, 0))
8232 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8233 fold_build2_loc (loc, code, type, a00, a10),
8234 a01);
8237 /* See if we can build a range comparison. */
8238 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8239 return tem;
8241 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8242 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8244 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8245 if (tem)
8246 return fold_build2_loc (loc, code, type, tem, arg1);
8249 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8250 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8252 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8253 if (tem)
8254 return fold_build2_loc (loc, code, type, arg0, tem);
8257 /* Check for the possibility of merging component references. If our
8258 lhs is another similar operation, try to merge its rhs with our
8259 rhs. Then try to merge our lhs and rhs. */
8260 if (TREE_CODE (arg0) == code
8261 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8262 TREE_OPERAND (arg0, 1), arg1)))
8263 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8265 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8266 return tem;
8268 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8269 && (code == TRUTH_AND_EXPR
8270 || code == TRUTH_ANDIF_EXPR
8271 || code == TRUTH_OR_EXPR
8272 || code == TRUTH_ORIF_EXPR))
8274 enum tree_code ncode, icode;
8276 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8277 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8278 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8280 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8281 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8282 We don't want to pack more than two leafs to a non-IF AND/OR
8283 expression.
8284 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8285 equal to IF-CODE, then we don't want to add right-hand operand.
8286 If the inner right-hand side of left-hand operand has
8287 side-effects, or isn't simple, then we can't add to it,
8288 as otherwise we might destroy if-sequence. */
8289 if (TREE_CODE (arg0) == icode
8290 && simple_operand_p_2 (arg1)
8291 /* Needed for sequence points to handle trappings, and
8292 side-effects. */
8293 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8295 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8296 arg1);
8297 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8298 tem);
8300 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8301 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8302 else if (TREE_CODE (arg1) == icode
8303 && simple_operand_p_2 (arg0)
8304 /* Needed for sequence points to handle trappings, and
8305 side-effects. */
8306 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8308 tem = fold_build2_loc (loc, ncode, type,
8309 arg0, TREE_OPERAND (arg1, 0));
8310 return fold_build2_loc (loc, icode, type, tem,
8311 TREE_OPERAND (arg1, 1));
8313 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8314 into (A OR B).
8315 For sequence point consistancy, we need to check for trapping,
8316 and side-effects. */
8317 else if (code == icode && simple_operand_p_2 (arg0)
8318 && simple_operand_p_2 (arg1))
8319 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8322 return NULL_TREE;
8325 /* Fold a binary expression of code CODE and type TYPE with operands
8326 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8327 Return the folded expression if folding is successful. Otherwise,
8328 return NULL_TREE. */
8330 static tree
8331 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8333 enum tree_code compl_code;
8335 if (code == MIN_EXPR)
8336 compl_code = MAX_EXPR;
8337 else if (code == MAX_EXPR)
8338 compl_code = MIN_EXPR;
8339 else
8340 gcc_unreachable ();
8342 /* MIN (MAX (a, b), b) == b. */
8343 if (TREE_CODE (op0) == compl_code
8344 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8345 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8347 /* MIN (MAX (b, a), b) == b. */
8348 if (TREE_CODE (op0) == compl_code
8349 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8350 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8351 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8353 /* MIN (a, MAX (a, b)) == a. */
8354 if (TREE_CODE (op1) == compl_code
8355 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8356 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8357 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8359 /* MIN (a, MAX (b, a)) == a. */
8360 if (TREE_CODE (op1) == compl_code
8361 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8362 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8363 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8365 return NULL_TREE;
8368 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8369 by changing CODE to reduce the magnitude of constants involved in
8370 ARG0 of the comparison.
8371 Returns a canonicalized comparison tree if a simplification was
8372 possible, otherwise returns NULL_TREE.
8373 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8374 valid if signed overflow is undefined. */
8376 static tree
8377 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8378 tree arg0, tree arg1,
8379 bool *strict_overflow_p)
8381 enum tree_code code0 = TREE_CODE (arg0);
8382 tree t, cst0 = NULL_TREE;
8383 int sgn0;
8384 bool swap = false;
8386 /* Match A +- CST code arg1 and CST code arg1. We can change the
8387 first form only if overflow is undefined. */
8388 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8389 /* In principle pointers also have undefined overflow behavior,
8390 but that causes problems elsewhere. */
8391 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8392 && (code0 == MINUS_EXPR
8393 || code0 == PLUS_EXPR)
8394 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8395 || code0 == INTEGER_CST))
8396 return NULL_TREE;
8398 /* Identify the constant in arg0 and its sign. */
8399 if (code0 == INTEGER_CST)
8400 cst0 = arg0;
8401 else
8402 cst0 = TREE_OPERAND (arg0, 1);
8403 sgn0 = tree_int_cst_sgn (cst0);
8405 /* Overflowed constants and zero will cause problems. */
8406 if (integer_zerop (cst0)
8407 || TREE_OVERFLOW (cst0))
8408 return NULL_TREE;
8410 /* See if we can reduce the magnitude of the constant in
8411 arg0 by changing the comparison code. */
8412 if (code0 == INTEGER_CST)
8414 /* CST <= arg1 -> CST-1 < arg1. */
8415 if (code == LE_EXPR && sgn0 == 1)
8416 code = LT_EXPR;
8417 /* -CST < arg1 -> -CST-1 <= arg1. */
8418 else if (code == LT_EXPR && sgn0 == -1)
8419 code = LE_EXPR;
8420 /* CST > arg1 -> CST-1 >= arg1. */
8421 else if (code == GT_EXPR && sgn0 == 1)
8422 code = GE_EXPR;
8423 /* -CST >= arg1 -> -CST-1 > arg1. */
8424 else if (code == GE_EXPR && sgn0 == -1)
8425 code = GT_EXPR;
8426 else
8427 return NULL_TREE;
8428 /* arg1 code' CST' might be more canonical. */
8429 swap = true;
8431 else
8433 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8434 if (code == LT_EXPR
8435 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8436 code = LE_EXPR;
8437 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8438 else if (code == GT_EXPR
8439 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8440 code = GE_EXPR;
8441 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8442 else if (code == LE_EXPR
8443 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8444 code = LT_EXPR;
8445 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8446 else if (code == GE_EXPR
8447 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8448 code = GT_EXPR;
8449 else
8450 return NULL_TREE;
8451 *strict_overflow_p = true;
8454 /* Now build the constant reduced in magnitude. But not if that
8455 would produce one outside of its types range. */
8456 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8457 && ((sgn0 == 1
8458 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8459 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8460 || (sgn0 == -1
8461 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8462 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8463 /* We cannot swap the comparison here as that would cause us to
8464 endlessly recurse. */
8465 return NULL_TREE;
8467 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8468 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8469 if (code0 != INTEGER_CST)
8470 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8471 t = fold_convert (TREE_TYPE (arg1), t);
8473 /* If swapping might yield to a more canonical form, do so. */
8474 if (swap)
8475 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8476 else
8477 return fold_build2_loc (loc, code, type, t, arg1);
8480 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8481 overflow further. Try to decrease the magnitude of constants involved
8482 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8483 and put sole constants at the second argument position.
8484 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8486 static tree
8487 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8488 tree arg0, tree arg1)
8490 tree t;
8491 bool strict_overflow_p;
8492 const char * const warnmsg = G_("assuming signed overflow does not occur "
8493 "when reducing constant in comparison");
8495 /* Try canonicalization by simplifying arg0. */
8496 strict_overflow_p = false;
8497 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8498 &strict_overflow_p);
8499 if (t)
8501 if (strict_overflow_p)
8502 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8503 return t;
8506 /* Try canonicalization by simplifying arg1 using the swapped
8507 comparison. */
8508 code = swap_tree_comparison (code);
8509 strict_overflow_p = false;
8510 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8511 &strict_overflow_p);
8512 if (t && strict_overflow_p)
8513 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8514 return t;
8517 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8518 space. This is used to avoid issuing overflow warnings for
8519 expressions like &p->x which can not wrap. */
8521 static bool
8522 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8524 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8525 return true;
8527 if (bitpos < 0)
8528 return true;
8530 wide_int wi_offset;
8531 int precision = TYPE_PRECISION (TREE_TYPE (base));
8532 if (offset == NULL_TREE)
8533 wi_offset = wi::zero (precision);
8534 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8535 return true;
8536 else
8537 wi_offset = offset;
8539 bool overflow;
8540 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8541 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8542 if (overflow)
8543 return true;
8545 if (!wi::fits_uhwi_p (total))
8546 return true;
8548 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8549 if (size <= 0)
8550 return true;
8552 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8553 array. */
8554 if (TREE_CODE (base) == ADDR_EXPR)
8556 HOST_WIDE_INT base_size;
8558 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8559 if (base_size > 0 && size < base_size)
8560 size = base_size;
8563 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8566 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8567 kind INTEGER_CST. This makes sure to properly sign-extend the
8568 constant. */
8570 static HOST_WIDE_INT
8571 size_low_cst (const_tree t)
8573 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8574 int prec = TYPE_PRECISION (TREE_TYPE (t));
8575 if (prec < HOST_BITS_PER_WIDE_INT)
8576 return sext_hwi (w, prec);
8577 return w;
8580 /* Subroutine of fold_binary. This routine performs all of the
8581 transformations that are common to the equality/inequality
8582 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8583 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8584 fold_binary should call fold_binary. Fold a comparison with
8585 tree code CODE and type TYPE with operands OP0 and OP1. Return
8586 the folded comparison or NULL_TREE. */
8588 static tree
8589 fold_comparison (location_t loc, enum tree_code code, tree type,
8590 tree op0, tree op1)
8592 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8593 tree arg0, arg1, tem;
8595 arg0 = op0;
8596 arg1 = op1;
8598 STRIP_SIGN_NOPS (arg0);
8599 STRIP_SIGN_NOPS (arg1);
8601 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8602 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8603 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8604 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8605 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8606 && TREE_CODE (arg1) == INTEGER_CST
8607 && !TREE_OVERFLOW (arg1))
8609 const enum tree_code
8610 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8611 tree const1 = TREE_OPERAND (arg0, 1);
8612 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8613 tree variable = TREE_OPERAND (arg0, 0);
8614 tree new_const = int_const_binop (reverse_op, const2, const1);
8616 /* If the constant operation overflowed this can be
8617 simplified as a comparison against INT_MAX/INT_MIN. */
8618 if (TREE_OVERFLOW (new_const)
8619 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8621 int const1_sgn = tree_int_cst_sgn (const1);
8622 enum tree_code code2 = code;
8624 /* Get the sign of the constant on the lhs if the
8625 operation were VARIABLE + CONST1. */
8626 if (TREE_CODE (arg0) == MINUS_EXPR)
8627 const1_sgn = -const1_sgn;
8629 /* The sign of the constant determines if we overflowed
8630 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8631 Canonicalize to the INT_MIN overflow by swapping the comparison
8632 if necessary. */
8633 if (const1_sgn == -1)
8634 code2 = swap_tree_comparison (code);
8636 /* We now can look at the canonicalized case
8637 VARIABLE + 1 CODE2 INT_MIN
8638 and decide on the result. */
8639 switch (code2)
8641 case EQ_EXPR:
8642 case LT_EXPR:
8643 case LE_EXPR:
8644 return
8645 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8647 case NE_EXPR:
8648 case GE_EXPR:
8649 case GT_EXPR:
8650 return
8651 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8653 default:
8654 gcc_unreachable ();
8657 else
8659 if (!equality_code)
8660 fold_overflow_warning ("assuming signed overflow does not occur "
8661 "when changing X +- C1 cmp C2 to "
8662 "X cmp C2 -+ C1",
8663 WARN_STRICT_OVERFLOW_COMPARISON);
8664 return fold_build2_loc (loc, code, type, variable, new_const);
8668 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8669 if (TREE_CODE (arg0) == MINUS_EXPR
8670 && equality_code
8671 && integer_zerop (arg1))
8673 /* ??? The transformation is valid for the other operators if overflow
8674 is undefined for the type, but performing it here badly interacts
8675 with the transformation in fold_cond_expr_with_comparison which
8676 attempts to synthetize ABS_EXPR. */
8677 if (!equality_code)
8678 fold_overflow_warning ("assuming signed overflow does not occur "
8679 "when changing X - Y cmp 0 to X cmp Y",
8680 WARN_STRICT_OVERFLOW_COMPARISON);
8681 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8682 TREE_OPERAND (arg0, 1));
8685 /* For comparisons of pointers we can decompose it to a compile time
8686 comparison of the base objects and the offsets into the object.
8687 This requires at least one operand being an ADDR_EXPR or a
8688 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8689 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8690 && (TREE_CODE (arg0) == ADDR_EXPR
8691 || TREE_CODE (arg1) == ADDR_EXPR
8692 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8693 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8695 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8696 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8697 machine_mode mode;
8698 int volatilep, unsignedp;
8699 bool indirect_base0 = false, indirect_base1 = false;
8701 /* Get base and offset for the access. Strip ADDR_EXPR for
8702 get_inner_reference, but put it back by stripping INDIRECT_REF
8703 off the base object if possible. indirect_baseN will be true
8704 if baseN is not an address but refers to the object itself. */
8705 base0 = arg0;
8706 if (TREE_CODE (arg0) == ADDR_EXPR)
8708 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8709 &bitsize, &bitpos0, &offset0, &mode,
8710 &unsignedp, &volatilep, false);
8711 if (TREE_CODE (base0) == INDIRECT_REF)
8712 base0 = TREE_OPERAND (base0, 0);
8713 else
8714 indirect_base0 = true;
8716 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8718 base0 = TREE_OPERAND (arg0, 0);
8719 STRIP_SIGN_NOPS (base0);
8720 if (TREE_CODE (base0) == ADDR_EXPR)
8722 base0 = TREE_OPERAND (base0, 0);
8723 indirect_base0 = true;
8725 offset0 = TREE_OPERAND (arg0, 1);
8726 if (tree_fits_shwi_p (offset0))
8728 HOST_WIDE_INT off = size_low_cst (offset0);
8729 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8730 * BITS_PER_UNIT)
8731 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8733 bitpos0 = off * BITS_PER_UNIT;
8734 offset0 = NULL_TREE;
8739 base1 = arg1;
8740 if (TREE_CODE (arg1) == ADDR_EXPR)
8742 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8743 &bitsize, &bitpos1, &offset1, &mode,
8744 &unsignedp, &volatilep, false);
8745 if (TREE_CODE (base1) == INDIRECT_REF)
8746 base1 = TREE_OPERAND (base1, 0);
8747 else
8748 indirect_base1 = true;
8750 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8752 base1 = TREE_OPERAND (arg1, 0);
8753 STRIP_SIGN_NOPS (base1);
8754 if (TREE_CODE (base1) == ADDR_EXPR)
8756 base1 = TREE_OPERAND (base1, 0);
8757 indirect_base1 = true;
8759 offset1 = TREE_OPERAND (arg1, 1);
8760 if (tree_fits_shwi_p (offset1))
8762 HOST_WIDE_INT off = size_low_cst (offset1);
8763 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8764 * BITS_PER_UNIT)
8765 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8767 bitpos1 = off * BITS_PER_UNIT;
8768 offset1 = NULL_TREE;
8773 /* A local variable can never be pointed to by
8774 the default SSA name of an incoming parameter. */
8775 if ((TREE_CODE (arg0) == ADDR_EXPR
8776 && indirect_base0
8777 && TREE_CODE (base0) == VAR_DECL
8778 && auto_var_in_fn_p (base0, current_function_decl)
8779 && !indirect_base1
8780 && TREE_CODE (base1) == SSA_NAME
8781 && SSA_NAME_IS_DEFAULT_DEF (base1)
8782 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8783 || (TREE_CODE (arg1) == ADDR_EXPR
8784 && indirect_base1
8785 && TREE_CODE (base1) == VAR_DECL
8786 && auto_var_in_fn_p (base1, current_function_decl)
8787 && !indirect_base0
8788 && TREE_CODE (base0) == SSA_NAME
8789 && SSA_NAME_IS_DEFAULT_DEF (base0)
8790 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8792 if (code == NE_EXPR)
8793 return constant_boolean_node (1, type);
8794 else if (code == EQ_EXPR)
8795 return constant_boolean_node (0, type);
8797 /* If we have equivalent bases we might be able to simplify. */
8798 else if (indirect_base0 == indirect_base1
8799 && operand_equal_p (base0, base1, 0))
8801 /* We can fold this expression to a constant if the non-constant
8802 offset parts are equal. */
8803 if ((offset0 == offset1
8804 || (offset0 && offset1
8805 && operand_equal_p (offset0, offset1, 0)))
8806 && (code == EQ_EXPR
8807 || code == NE_EXPR
8808 || (indirect_base0 && DECL_P (base0))
8809 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8812 if (!equality_code
8813 && bitpos0 != bitpos1
8814 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8815 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8816 fold_overflow_warning (("assuming pointer wraparound does not "
8817 "occur when comparing P +- C1 with "
8818 "P +- C2"),
8819 WARN_STRICT_OVERFLOW_CONDITIONAL);
8821 switch (code)
8823 case EQ_EXPR:
8824 return constant_boolean_node (bitpos0 == bitpos1, type);
8825 case NE_EXPR:
8826 return constant_boolean_node (bitpos0 != bitpos1, type);
8827 case LT_EXPR:
8828 return constant_boolean_node (bitpos0 < bitpos1, type);
8829 case LE_EXPR:
8830 return constant_boolean_node (bitpos0 <= bitpos1, type);
8831 case GE_EXPR:
8832 return constant_boolean_node (bitpos0 >= bitpos1, type);
8833 case GT_EXPR:
8834 return constant_boolean_node (bitpos0 > bitpos1, type);
8835 default:;
8838 /* We can simplify the comparison to a comparison of the variable
8839 offset parts if the constant offset parts are equal.
8840 Be careful to use signed sizetype here because otherwise we
8841 mess with array offsets in the wrong way. This is possible
8842 because pointer arithmetic is restricted to retain within an
8843 object and overflow on pointer differences is undefined as of
8844 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8845 else if (bitpos0 == bitpos1
8846 && (equality_code
8847 || (indirect_base0 && DECL_P (base0))
8848 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8850 /* By converting to signed sizetype we cover middle-end pointer
8851 arithmetic which operates on unsigned pointer types of size
8852 type size and ARRAY_REF offsets which are properly sign or
8853 zero extended from their type in case it is narrower than
8854 sizetype. */
8855 if (offset0 == NULL_TREE)
8856 offset0 = build_int_cst (ssizetype, 0);
8857 else
8858 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8859 if (offset1 == NULL_TREE)
8860 offset1 = build_int_cst (ssizetype, 0);
8861 else
8862 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8864 if (!equality_code
8865 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8866 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8867 fold_overflow_warning (("assuming pointer wraparound does not "
8868 "occur when comparing P +- C1 with "
8869 "P +- C2"),
8870 WARN_STRICT_OVERFLOW_COMPARISON);
8872 return fold_build2_loc (loc, code, type, offset0, offset1);
8875 /* For non-equal bases we can simplify if they are addresses
8876 of local binding decls or constants. */
8877 else if (indirect_base0 && indirect_base1
8878 /* We know that !operand_equal_p (base0, base1, 0)
8879 because the if condition was false. But make
8880 sure two decls are not the same. */
8881 && base0 != base1
8882 && TREE_CODE (arg0) == ADDR_EXPR
8883 && TREE_CODE (arg1) == ADDR_EXPR
8884 && (((TREE_CODE (base0) == VAR_DECL
8885 || TREE_CODE (base0) == PARM_DECL)
8886 && (targetm.binds_local_p (base0)
8887 || CONSTANT_CLASS_P (base1)))
8888 || CONSTANT_CLASS_P (base0))
8889 && (((TREE_CODE (base1) == VAR_DECL
8890 || TREE_CODE (base1) == PARM_DECL)
8891 && (targetm.binds_local_p (base1)
8892 || CONSTANT_CLASS_P (base0)))
8893 || CONSTANT_CLASS_P (base1)))
8895 if (code == EQ_EXPR)
8896 return omit_two_operands_loc (loc, type, boolean_false_node,
8897 arg0, arg1);
8898 else if (code == NE_EXPR)
8899 return omit_two_operands_loc (loc, type, boolean_true_node,
8900 arg0, arg1);
8902 /* For equal offsets we can simplify to a comparison of the
8903 base addresses. */
8904 else if (bitpos0 == bitpos1
8905 && (indirect_base0
8906 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8907 && (indirect_base1
8908 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8909 && ((offset0 == offset1)
8910 || (offset0 && offset1
8911 && operand_equal_p (offset0, offset1, 0))))
8913 if (indirect_base0)
8914 base0 = build_fold_addr_expr_loc (loc, base0);
8915 if (indirect_base1)
8916 base1 = build_fold_addr_expr_loc (loc, base1);
8917 return fold_build2_loc (loc, code, type, base0, base1);
8921 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8922 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8923 the resulting offset is smaller in absolute value than the
8924 original one and has the same sign. */
8925 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8926 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8927 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8928 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8929 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8930 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8931 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8933 tree const1 = TREE_OPERAND (arg0, 1);
8934 tree const2 = TREE_OPERAND (arg1, 1);
8935 tree variable1 = TREE_OPERAND (arg0, 0);
8936 tree variable2 = TREE_OPERAND (arg1, 0);
8937 tree cst;
8938 const char * const warnmsg = G_("assuming signed overflow does not "
8939 "occur when combining constants around "
8940 "a comparison");
8942 /* Put the constant on the side where it doesn't overflow and is
8943 of lower absolute value and of same sign than before. */
8944 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8945 ? MINUS_EXPR : PLUS_EXPR,
8946 const2, const1);
8947 if (!TREE_OVERFLOW (cst)
8948 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8949 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8951 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8952 return fold_build2_loc (loc, code, type,
8953 variable1,
8954 fold_build2_loc (loc, TREE_CODE (arg1),
8955 TREE_TYPE (arg1),
8956 variable2, cst));
8959 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8960 ? MINUS_EXPR : PLUS_EXPR,
8961 const1, const2);
8962 if (!TREE_OVERFLOW (cst)
8963 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8964 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8966 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8967 return fold_build2_loc (loc, code, type,
8968 fold_build2_loc (loc, TREE_CODE (arg0),
8969 TREE_TYPE (arg0),
8970 variable1, cst),
8971 variable2);
8975 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8976 signed arithmetic case. That form is created by the compiler
8977 often enough for folding it to be of value. One example is in
8978 computing loop trip counts after Operator Strength Reduction. */
8979 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8980 && TREE_CODE (arg0) == MULT_EXPR
8981 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8982 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8983 && integer_zerop (arg1))
8985 tree const1 = TREE_OPERAND (arg0, 1);
8986 tree const2 = arg1; /* zero */
8987 tree variable1 = TREE_OPERAND (arg0, 0);
8988 enum tree_code cmp_code = code;
8990 /* Handle unfolded multiplication by zero. */
8991 if (integer_zerop (const1))
8992 return fold_build2_loc (loc, cmp_code, type, const1, const2);
8994 fold_overflow_warning (("assuming signed overflow does not occur when "
8995 "eliminating multiplication in comparison "
8996 "with zero"),
8997 WARN_STRICT_OVERFLOW_COMPARISON);
8999 /* If const1 is negative we swap the sense of the comparison. */
9000 if (tree_int_cst_sgn (const1) < 0)
9001 cmp_code = swap_tree_comparison (cmp_code);
9003 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9006 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9007 if (tem)
9008 return tem;
9010 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9012 tree targ0 = strip_float_extensions (arg0);
9013 tree targ1 = strip_float_extensions (arg1);
9014 tree newtype = TREE_TYPE (targ0);
9016 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9017 newtype = TREE_TYPE (targ1);
9019 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9020 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9021 return fold_build2_loc (loc, code, type,
9022 fold_convert_loc (loc, newtype, targ0),
9023 fold_convert_loc (loc, newtype, targ1));
9025 /* (-a) CMP (-b) -> b CMP a */
9026 if (TREE_CODE (arg0) == NEGATE_EXPR
9027 && TREE_CODE (arg1) == NEGATE_EXPR)
9028 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9029 TREE_OPERAND (arg0, 0));
9031 if (TREE_CODE (arg1) == REAL_CST)
9033 REAL_VALUE_TYPE cst;
9034 cst = TREE_REAL_CST (arg1);
9036 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9037 if (TREE_CODE (arg0) == NEGATE_EXPR)
9038 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9039 TREE_OPERAND (arg0, 0),
9040 build_real (TREE_TYPE (arg1),
9041 real_value_negate (&cst)));
9043 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9044 /* a CMP (-0) -> a CMP 0 */
9045 if (REAL_VALUE_MINUS_ZERO (cst))
9046 return fold_build2_loc (loc, code, type, arg0,
9047 build_real (TREE_TYPE (arg1), dconst0));
9049 /* x != NaN is always true, other ops are always false. */
9050 if (REAL_VALUE_ISNAN (cst)
9051 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9053 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9054 return omit_one_operand_loc (loc, type, tem, arg0);
9057 /* Fold comparisons against infinity. */
9058 if (REAL_VALUE_ISINF (cst)
9059 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9061 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9062 if (tem != NULL_TREE)
9063 return tem;
9067 /* If this is a comparison of a real constant with a PLUS_EXPR
9068 or a MINUS_EXPR of a real constant, we can convert it into a
9069 comparison with a revised real constant as long as no overflow
9070 occurs when unsafe_math_optimizations are enabled. */
9071 if (flag_unsafe_math_optimizations
9072 && TREE_CODE (arg1) == REAL_CST
9073 && (TREE_CODE (arg0) == PLUS_EXPR
9074 || TREE_CODE (arg0) == MINUS_EXPR)
9075 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9076 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9077 ? MINUS_EXPR : PLUS_EXPR,
9078 arg1, TREE_OPERAND (arg0, 1)))
9079 && !TREE_OVERFLOW (tem))
9080 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9082 /* Likewise, we can simplify a comparison of a real constant with
9083 a MINUS_EXPR whose first operand is also a real constant, i.e.
9084 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9085 floating-point types only if -fassociative-math is set. */
9086 if (flag_associative_math
9087 && TREE_CODE (arg1) == REAL_CST
9088 && TREE_CODE (arg0) == MINUS_EXPR
9089 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9090 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9091 arg1))
9092 && !TREE_OVERFLOW (tem))
9093 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9094 TREE_OPERAND (arg0, 1), tem);
9096 /* Fold comparisons against built-in math functions. */
9097 if (TREE_CODE (arg1) == REAL_CST
9098 && flag_unsafe_math_optimizations
9099 && ! flag_errno_math)
9101 enum built_in_function fcode = builtin_mathfn_code (arg0);
9103 if (fcode != END_BUILTINS)
9105 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9106 if (tem != NULL_TREE)
9107 return tem;
9112 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9113 && CONVERT_EXPR_P (arg0))
9115 /* If we are widening one operand of an integer comparison,
9116 see if the other operand is similarly being widened. Perhaps we
9117 can do the comparison in the narrower type. */
9118 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9119 if (tem)
9120 return tem;
9122 /* Or if we are changing signedness. */
9123 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9124 if (tem)
9125 return tem;
9128 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9129 constant, we can simplify it. */
9130 if (TREE_CODE (arg1) == INTEGER_CST
9131 && (TREE_CODE (arg0) == MIN_EXPR
9132 || TREE_CODE (arg0) == MAX_EXPR)
9133 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9135 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9136 if (tem)
9137 return tem;
9140 /* Simplify comparison of something with itself. (For IEEE
9141 floating-point, we can only do some of these simplifications.) */
9142 if (operand_equal_p (arg0, arg1, 0))
9144 switch (code)
9146 case EQ_EXPR:
9147 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9148 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9149 return constant_boolean_node (1, type);
9150 break;
9152 case GE_EXPR:
9153 case LE_EXPR:
9154 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9155 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9156 return constant_boolean_node (1, type);
9157 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9159 case NE_EXPR:
9160 /* For NE, we can only do this simplification if integer
9161 or we don't honor IEEE floating point NaNs. */
9162 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9163 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9164 break;
9165 /* ... fall through ... */
9166 case GT_EXPR:
9167 case LT_EXPR:
9168 return constant_boolean_node (0, type);
9169 default:
9170 gcc_unreachable ();
9174 /* If we are comparing an expression that just has comparisons
9175 of two integer values, arithmetic expressions of those comparisons,
9176 and constants, we can simplify it. There are only three cases
9177 to check: the two values can either be equal, the first can be
9178 greater, or the second can be greater. Fold the expression for
9179 those three values. Since each value must be 0 or 1, we have
9180 eight possibilities, each of which corresponds to the constant 0
9181 or 1 or one of the six possible comparisons.
9183 This handles common cases like (a > b) == 0 but also handles
9184 expressions like ((x > y) - (y > x)) > 0, which supposedly
9185 occur in macroized code. */
9187 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9189 tree cval1 = 0, cval2 = 0;
9190 int save_p = 0;
9192 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9193 /* Don't handle degenerate cases here; they should already
9194 have been handled anyway. */
9195 && cval1 != 0 && cval2 != 0
9196 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9197 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9198 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9199 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9200 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9201 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9202 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9204 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9205 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9207 /* We can't just pass T to eval_subst in case cval1 or cval2
9208 was the same as ARG1. */
9210 tree high_result
9211 = fold_build2_loc (loc, code, type,
9212 eval_subst (loc, arg0, cval1, maxval,
9213 cval2, minval),
9214 arg1);
9215 tree equal_result
9216 = fold_build2_loc (loc, code, type,
9217 eval_subst (loc, arg0, cval1, maxval,
9218 cval2, maxval),
9219 arg1);
9220 tree low_result
9221 = fold_build2_loc (loc, code, type,
9222 eval_subst (loc, arg0, cval1, minval,
9223 cval2, maxval),
9224 arg1);
9226 /* All three of these results should be 0 or 1. Confirm they are.
9227 Then use those values to select the proper code to use. */
9229 if (TREE_CODE (high_result) == INTEGER_CST
9230 && TREE_CODE (equal_result) == INTEGER_CST
9231 && TREE_CODE (low_result) == INTEGER_CST)
9233 /* Make a 3-bit mask with the high-order bit being the
9234 value for `>', the next for '=', and the low for '<'. */
9235 switch ((integer_onep (high_result) * 4)
9236 + (integer_onep (equal_result) * 2)
9237 + integer_onep (low_result))
9239 case 0:
9240 /* Always false. */
9241 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9242 case 1:
9243 code = LT_EXPR;
9244 break;
9245 case 2:
9246 code = EQ_EXPR;
9247 break;
9248 case 3:
9249 code = LE_EXPR;
9250 break;
9251 case 4:
9252 code = GT_EXPR;
9253 break;
9254 case 5:
9255 code = NE_EXPR;
9256 break;
9257 case 6:
9258 code = GE_EXPR;
9259 break;
9260 case 7:
9261 /* Always true. */
9262 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9265 if (save_p)
9267 tem = save_expr (build2 (code, type, cval1, cval2));
9268 SET_EXPR_LOCATION (tem, loc);
9269 return tem;
9271 return fold_build2_loc (loc, code, type, cval1, cval2);
9276 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9277 into a single range test. */
9278 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9279 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9280 && TREE_CODE (arg1) == INTEGER_CST
9281 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9282 && !integer_zerop (TREE_OPERAND (arg0, 1))
9283 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9284 && !TREE_OVERFLOW (arg1))
9286 tem = fold_div_compare (loc, code, type, arg0, arg1);
9287 if (tem != NULL_TREE)
9288 return tem;
9291 /* Fold ~X op ~Y as Y op X. */
9292 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9293 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9295 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9296 return fold_build2_loc (loc, code, type,
9297 fold_convert_loc (loc, cmp_type,
9298 TREE_OPERAND (arg1, 0)),
9299 TREE_OPERAND (arg0, 0));
9302 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9303 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9304 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9306 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9307 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9308 TREE_OPERAND (arg0, 0),
9309 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9310 fold_convert_loc (loc, cmp_type, arg1)));
9313 return NULL_TREE;
9317 /* Subroutine of fold_binary. Optimize complex multiplications of the
9318 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9319 argument EXPR represents the expression "z" of type TYPE. */
9321 static tree
9322 fold_mult_zconjz (location_t loc, tree type, tree expr)
9324 tree itype = TREE_TYPE (type);
9325 tree rpart, ipart, tem;
9327 if (TREE_CODE (expr) == COMPLEX_EXPR)
9329 rpart = TREE_OPERAND (expr, 0);
9330 ipart = TREE_OPERAND (expr, 1);
9332 else if (TREE_CODE (expr) == COMPLEX_CST)
9334 rpart = TREE_REALPART (expr);
9335 ipart = TREE_IMAGPART (expr);
9337 else
9339 expr = save_expr (expr);
9340 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9341 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9344 rpart = save_expr (rpart);
9345 ipart = save_expr (ipart);
9346 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9347 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9348 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9349 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9350 build_zero_cst (itype));
9354 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9355 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9356 guarantees that P and N have the same least significant log2(M) bits.
9357 N is not otherwise constrained. In particular, N is not normalized to
9358 0 <= N < M as is common. In general, the precise value of P is unknown.
9359 M is chosen as large as possible such that constant N can be determined.
9361 Returns M and sets *RESIDUE to N.
9363 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9364 account. This is not always possible due to PR 35705.
9367 static unsigned HOST_WIDE_INT
9368 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9369 bool allow_func_align)
9371 enum tree_code code;
9373 *residue = 0;
9375 code = TREE_CODE (expr);
9376 if (code == ADDR_EXPR)
9378 unsigned int bitalign;
9379 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9380 *residue /= BITS_PER_UNIT;
9381 return bitalign / BITS_PER_UNIT;
9383 else if (code == POINTER_PLUS_EXPR)
9385 tree op0, op1;
9386 unsigned HOST_WIDE_INT modulus;
9387 enum tree_code inner_code;
9389 op0 = TREE_OPERAND (expr, 0);
9390 STRIP_NOPS (op0);
9391 modulus = get_pointer_modulus_and_residue (op0, residue,
9392 allow_func_align);
9394 op1 = TREE_OPERAND (expr, 1);
9395 STRIP_NOPS (op1);
9396 inner_code = TREE_CODE (op1);
9397 if (inner_code == INTEGER_CST)
9399 *residue += TREE_INT_CST_LOW (op1);
9400 return modulus;
9402 else if (inner_code == MULT_EXPR)
9404 op1 = TREE_OPERAND (op1, 1);
9405 if (TREE_CODE (op1) == INTEGER_CST)
9407 unsigned HOST_WIDE_INT align;
9409 /* Compute the greatest power-of-2 divisor of op1. */
9410 align = TREE_INT_CST_LOW (op1);
9411 align &= -align;
9413 /* If align is non-zero and less than *modulus, replace
9414 *modulus with align., If align is 0, then either op1 is 0
9415 or the greatest power-of-2 divisor of op1 doesn't fit in an
9416 unsigned HOST_WIDE_INT. In either case, no additional
9417 constraint is imposed. */
9418 if (align)
9419 modulus = MIN (modulus, align);
9421 return modulus;
9426 /* If we get here, we were unable to determine anything useful about the
9427 expression. */
9428 return 1;
9431 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9432 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9434 static bool
9435 vec_cst_ctor_to_array (tree arg, tree *elts)
9437 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9439 if (TREE_CODE (arg) == VECTOR_CST)
9441 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9442 elts[i] = VECTOR_CST_ELT (arg, i);
9444 else if (TREE_CODE (arg) == CONSTRUCTOR)
9446 constructor_elt *elt;
9448 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9449 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9450 return false;
9451 else
9452 elts[i] = elt->value;
9454 else
9455 return false;
9456 for (; i < nelts; i++)
9457 elts[i]
9458 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9459 return true;
9462 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9463 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9464 NULL_TREE otherwise. */
9466 static tree
9467 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9469 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9470 tree *elts;
9471 bool need_ctor = false;
9473 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9474 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9475 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9476 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9477 return NULL_TREE;
9479 elts = XALLOCAVEC (tree, nelts * 3);
9480 if (!vec_cst_ctor_to_array (arg0, elts)
9481 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9482 return NULL_TREE;
9484 for (i = 0; i < nelts; i++)
9486 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9487 need_ctor = true;
9488 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9491 if (need_ctor)
9493 vec<constructor_elt, va_gc> *v;
9494 vec_alloc (v, nelts);
9495 for (i = 0; i < nelts; i++)
9496 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9497 return build_constructor (type, v);
9499 else
9500 return build_vector (type, &elts[2 * nelts]);
9503 /* Try to fold a pointer difference of type TYPE two address expressions of
9504 array references AREF0 and AREF1 using location LOC. Return a
9505 simplified expression for the difference or NULL_TREE. */
9507 static tree
9508 fold_addr_of_array_ref_difference (location_t loc, tree type,
9509 tree aref0, tree aref1)
9511 tree base0 = TREE_OPERAND (aref0, 0);
9512 tree base1 = TREE_OPERAND (aref1, 0);
9513 tree base_offset = build_int_cst (type, 0);
9515 /* If the bases are array references as well, recurse. If the bases
9516 are pointer indirections compute the difference of the pointers.
9517 If the bases are equal, we are set. */
9518 if ((TREE_CODE (base0) == ARRAY_REF
9519 && TREE_CODE (base1) == ARRAY_REF
9520 && (base_offset
9521 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9522 || (INDIRECT_REF_P (base0)
9523 && INDIRECT_REF_P (base1)
9524 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9525 TREE_OPERAND (base0, 0),
9526 TREE_OPERAND (base1, 0))))
9527 || operand_equal_p (base0, base1, 0))
9529 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9530 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9531 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9532 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9533 return fold_build2_loc (loc, PLUS_EXPR, type,
9534 base_offset,
9535 fold_build2_loc (loc, MULT_EXPR, type,
9536 diff, esz));
9538 return NULL_TREE;
9541 /* If the real or vector real constant CST of type TYPE has an exact
9542 inverse, return it, else return NULL. */
9544 static tree
9545 exact_inverse (tree type, tree cst)
9547 REAL_VALUE_TYPE r;
9548 tree unit_type, *elts;
9549 machine_mode mode;
9550 unsigned vec_nelts, i;
9552 switch (TREE_CODE (cst))
9554 case REAL_CST:
9555 r = TREE_REAL_CST (cst);
9557 if (exact_real_inverse (TYPE_MODE (type), &r))
9558 return build_real (type, r);
9560 return NULL_TREE;
9562 case VECTOR_CST:
9563 vec_nelts = VECTOR_CST_NELTS (cst);
9564 elts = XALLOCAVEC (tree, vec_nelts);
9565 unit_type = TREE_TYPE (type);
9566 mode = TYPE_MODE (unit_type);
9568 for (i = 0; i < vec_nelts; i++)
9570 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9571 if (!exact_real_inverse (mode, &r))
9572 return NULL_TREE;
9573 elts[i] = build_real (unit_type, r);
9576 return build_vector (type, elts);
9578 default:
9579 return NULL_TREE;
9583 /* Mask out the tz least significant bits of X of type TYPE where
9584 tz is the number of trailing zeroes in Y. */
9585 static wide_int
9586 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9588 int tz = wi::ctz (y);
9589 if (tz > 0)
9590 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9591 return x;
9594 /* Return true when T is an address and is known to be nonzero.
9595 For floating point we further ensure that T is not denormal.
9596 Similar logic is present in nonzero_address in rtlanal.h.
9598 If the return value is based on the assumption that signed overflow
9599 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9600 change *STRICT_OVERFLOW_P. */
9602 static bool
9603 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9605 tree type = TREE_TYPE (t);
9606 enum tree_code code;
9608 /* Doing something useful for floating point would need more work. */
9609 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9610 return false;
9612 code = TREE_CODE (t);
9613 switch (TREE_CODE_CLASS (code))
9615 case tcc_unary:
9616 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9617 strict_overflow_p);
9618 case tcc_binary:
9619 case tcc_comparison:
9620 return tree_binary_nonzero_warnv_p (code, type,
9621 TREE_OPERAND (t, 0),
9622 TREE_OPERAND (t, 1),
9623 strict_overflow_p);
9624 case tcc_constant:
9625 case tcc_declaration:
9626 case tcc_reference:
9627 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9629 default:
9630 break;
9633 switch (code)
9635 case TRUTH_NOT_EXPR:
9636 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9637 strict_overflow_p);
9639 case TRUTH_AND_EXPR:
9640 case TRUTH_OR_EXPR:
9641 case TRUTH_XOR_EXPR:
9642 return tree_binary_nonzero_warnv_p (code, type,
9643 TREE_OPERAND (t, 0),
9644 TREE_OPERAND (t, 1),
9645 strict_overflow_p);
9647 case COND_EXPR:
9648 case CONSTRUCTOR:
9649 case OBJ_TYPE_REF:
9650 case ASSERT_EXPR:
9651 case ADDR_EXPR:
9652 case WITH_SIZE_EXPR:
9653 case SSA_NAME:
9654 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9656 case COMPOUND_EXPR:
9657 case MODIFY_EXPR:
9658 case BIND_EXPR:
9659 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9660 strict_overflow_p);
9662 case SAVE_EXPR:
9663 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9664 strict_overflow_p);
9666 case CALL_EXPR:
9668 tree fndecl = get_callee_fndecl (t);
9669 if (!fndecl) return false;
9670 if (flag_delete_null_pointer_checks && !flag_check_new
9671 && DECL_IS_OPERATOR_NEW (fndecl)
9672 && !TREE_NOTHROW (fndecl))
9673 return true;
9674 if (flag_delete_null_pointer_checks
9675 && lookup_attribute ("returns_nonnull",
9676 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9677 return true;
9678 return alloca_call_p (t);
9681 default:
9682 break;
9684 return false;
9687 /* Return true when T is an address and is known to be nonzero.
9688 Handle warnings about undefined signed overflow. */
9690 static bool
9691 tree_expr_nonzero_p (tree t)
9693 bool ret, strict_overflow_p;
9695 strict_overflow_p = false;
9696 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9697 if (strict_overflow_p)
9698 fold_overflow_warning (("assuming signed overflow does not occur when "
9699 "determining that expression is always "
9700 "non-zero"),
9701 WARN_STRICT_OVERFLOW_MISC);
9702 return ret;
9705 /* Fold a binary expression of code CODE and type TYPE with operands
9706 OP0 and OP1. LOC is the location of the resulting expression.
9707 Return the folded expression if folding is successful. Otherwise,
9708 return NULL_TREE. */
9710 tree
9711 fold_binary_loc (location_t loc,
9712 enum tree_code code, tree type, tree op0, tree op1)
9714 enum tree_code_class kind = TREE_CODE_CLASS (code);
9715 tree arg0, arg1, tem;
9716 tree t1 = NULL_TREE;
9717 bool strict_overflow_p;
9718 unsigned int prec;
9720 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9721 && TREE_CODE_LENGTH (code) == 2
9722 && op0 != NULL_TREE
9723 && op1 != NULL_TREE);
9725 arg0 = op0;
9726 arg1 = op1;
9728 /* Strip any conversions that don't change the mode. This is
9729 safe for every expression, except for a comparison expression
9730 because its signedness is derived from its operands. So, in
9731 the latter case, only strip conversions that don't change the
9732 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9733 preserved.
9735 Note that this is done as an internal manipulation within the
9736 constant folder, in order to find the simplest representation
9737 of the arguments so that their form can be studied. In any
9738 cases, the appropriate type conversions should be put back in
9739 the tree that will get out of the constant folder. */
9741 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9743 STRIP_SIGN_NOPS (arg0);
9744 STRIP_SIGN_NOPS (arg1);
9746 else
9748 STRIP_NOPS (arg0);
9749 STRIP_NOPS (arg1);
9752 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9753 constant but we can't do arithmetic on them. */
9754 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9755 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9756 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9757 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9758 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9759 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9760 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9762 if (kind == tcc_binary)
9764 /* Make sure type and arg0 have the same saturating flag. */
9765 gcc_assert (TYPE_SATURATING (type)
9766 == TYPE_SATURATING (TREE_TYPE (arg0)));
9767 tem = const_binop (code, arg0, arg1);
9769 else if (kind == tcc_comparison)
9770 tem = fold_relational_const (code, type, arg0, arg1);
9771 else
9772 tem = NULL_TREE;
9774 if (tem != NULL_TREE)
9776 if (TREE_TYPE (tem) != type)
9777 tem = fold_convert_loc (loc, type, tem);
9778 return tem;
9782 /* If this is a commutative operation, and ARG0 is a constant, move it
9783 to ARG1 to reduce the number of tests below. */
9784 if (commutative_tree_code (code)
9785 && tree_swap_operands_p (arg0, arg1, true))
9786 return fold_build2_loc (loc, code, type, op1, op0);
9788 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9789 to ARG1 to reduce the number of tests below. */
9790 if (kind == tcc_comparison
9791 && tree_swap_operands_p (arg0, arg1, true))
9792 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9794 tem = generic_simplify (loc, code, type, op0, op1);
9795 if (tem)
9796 return tem;
9798 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9800 First check for cases where an arithmetic operation is applied to a
9801 compound, conditional, or comparison operation. Push the arithmetic
9802 operation inside the compound or conditional to see if any folding
9803 can then be done. Convert comparison to conditional for this purpose.
9804 The also optimizes non-constant cases that used to be done in
9805 expand_expr.
9807 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9808 one of the operands is a comparison and the other is a comparison, a
9809 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9810 code below would make the expression more complex. Change it to a
9811 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9812 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9814 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9815 || code == EQ_EXPR || code == NE_EXPR)
9816 && TREE_CODE (type) != VECTOR_TYPE
9817 && ((truth_value_p (TREE_CODE (arg0))
9818 && (truth_value_p (TREE_CODE (arg1))
9819 || (TREE_CODE (arg1) == BIT_AND_EXPR
9820 && integer_onep (TREE_OPERAND (arg1, 1)))))
9821 || (truth_value_p (TREE_CODE (arg1))
9822 && (truth_value_p (TREE_CODE (arg0))
9823 || (TREE_CODE (arg0) == BIT_AND_EXPR
9824 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9826 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9827 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9828 : TRUTH_XOR_EXPR,
9829 boolean_type_node,
9830 fold_convert_loc (loc, boolean_type_node, arg0),
9831 fold_convert_loc (loc, boolean_type_node, arg1));
9833 if (code == EQ_EXPR)
9834 tem = invert_truthvalue_loc (loc, tem);
9836 return fold_convert_loc (loc, type, tem);
9839 if (TREE_CODE_CLASS (code) == tcc_binary
9840 || TREE_CODE_CLASS (code) == tcc_comparison)
9842 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9844 tem = fold_build2_loc (loc, code, type,
9845 fold_convert_loc (loc, TREE_TYPE (op0),
9846 TREE_OPERAND (arg0, 1)), op1);
9847 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9848 tem);
9850 if (TREE_CODE (arg1) == COMPOUND_EXPR
9851 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9853 tem = fold_build2_loc (loc, code, type, op0,
9854 fold_convert_loc (loc, TREE_TYPE (op1),
9855 TREE_OPERAND (arg1, 1)));
9856 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9857 tem);
9860 if (TREE_CODE (arg0) == COND_EXPR
9861 || TREE_CODE (arg0) == VEC_COND_EXPR
9862 || COMPARISON_CLASS_P (arg0))
9864 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9865 arg0, arg1,
9866 /*cond_first_p=*/1);
9867 if (tem != NULL_TREE)
9868 return tem;
9871 if (TREE_CODE (arg1) == COND_EXPR
9872 || TREE_CODE (arg1) == VEC_COND_EXPR
9873 || COMPARISON_CLASS_P (arg1))
9875 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9876 arg1, arg0,
9877 /*cond_first_p=*/0);
9878 if (tem != NULL_TREE)
9879 return tem;
9883 switch (code)
9885 case MEM_REF:
9886 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9887 if (TREE_CODE (arg0) == ADDR_EXPR
9888 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9890 tree iref = TREE_OPERAND (arg0, 0);
9891 return fold_build2 (MEM_REF, type,
9892 TREE_OPERAND (iref, 0),
9893 int_const_binop (PLUS_EXPR, arg1,
9894 TREE_OPERAND (iref, 1)));
9897 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9898 if (TREE_CODE (arg0) == ADDR_EXPR
9899 && handled_component_p (TREE_OPERAND (arg0, 0)))
9901 tree base;
9902 HOST_WIDE_INT coffset;
9903 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9904 &coffset);
9905 if (!base)
9906 return NULL_TREE;
9907 return fold_build2 (MEM_REF, type,
9908 build_fold_addr_expr (base),
9909 int_const_binop (PLUS_EXPR, arg1,
9910 size_int (coffset)));
9913 return NULL_TREE;
9915 case POINTER_PLUS_EXPR:
9916 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9917 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9918 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9919 return fold_convert_loc (loc, type,
9920 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9921 fold_convert_loc (loc, sizetype,
9922 arg1),
9923 fold_convert_loc (loc, sizetype,
9924 arg0)));
9926 /* PTR_CST +p CST -> CST1 */
9927 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9928 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9929 fold_convert_loc (loc, type, arg1));
9931 return NULL_TREE;
9933 case PLUS_EXPR:
9934 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9936 /* X + (X / CST) * -CST is X % CST. */
9937 if (TREE_CODE (arg1) == MULT_EXPR
9938 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9939 && operand_equal_p (arg0,
9940 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9942 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9943 tree cst1 = TREE_OPERAND (arg1, 1);
9944 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9945 cst1, cst0);
9946 if (sum && integer_zerop (sum))
9947 return fold_convert_loc (loc, type,
9948 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9949 TREE_TYPE (arg0), arg0,
9950 cst0));
9954 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9955 one. Make sure the type is not saturating and has the signedness of
9956 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9957 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9958 if ((TREE_CODE (arg0) == MULT_EXPR
9959 || TREE_CODE (arg1) == MULT_EXPR)
9960 && !TYPE_SATURATING (type)
9961 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9962 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9963 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9965 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9966 if (tem)
9967 return tem;
9970 if (! FLOAT_TYPE_P (type))
9972 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9973 with a constant, and the two constants have no bits in common,
9974 we should treat this as a BIT_IOR_EXPR since this may produce more
9975 simplifications. */
9976 if (TREE_CODE (arg0) == BIT_AND_EXPR
9977 && TREE_CODE (arg1) == BIT_AND_EXPR
9978 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9979 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9980 && wi::bit_and (TREE_OPERAND (arg0, 1),
9981 TREE_OPERAND (arg1, 1)) == 0)
9983 code = BIT_IOR_EXPR;
9984 goto bit_ior;
9987 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9988 (plus (plus (mult) (mult)) (foo)) so that we can
9989 take advantage of the factoring cases below. */
9990 if (TYPE_OVERFLOW_WRAPS (type)
9991 && (((TREE_CODE (arg0) == PLUS_EXPR
9992 || TREE_CODE (arg0) == MINUS_EXPR)
9993 && TREE_CODE (arg1) == MULT_EXPR)
9994 || ((TREE_CODE (arg1) == PLUS_EXPR
9995 || TREE_CODE (arg1) == MINUS_EXPR)
9996 && TREE_CODE (arg0) == MULT_EXPR)))
9998 tree parg0, parg1, parg, marg;
9999 enum tree_code pcode;
10001 if (TREE_CODE (arg1) == MULT_EXPR)
10002 parg = arg0, marg = arg1;
10003 else
10004 parg = arg1, marg = arg0;
10005 pcode = TREE_CODE (parg);
10006 parg0 = TREE_OPERAND (parg, 0);
10007 parg1 = TREE_OPERAND (parg, 1);
10008 STRIP_NOPS (parg0);
10009 STRIP_NOPS (parg1);
10011 if (TREE_CODE (parg0) == MULT_EXPR
10012 && TREE_CODE (parg1) != MULT_EXPR)
10013 return fold_build2_loc (loc, pcode, type,
10014 fold_build2_loc (loc, PLUS_EXPR, type,
10015 fold_convert_loc (loc, type,
10016 parg0),
10017 fold_convert_loc (loc, type,
10018 marg)),
10019 fold_convert_loc (loc, type, parg1));
10020 if (TREE_CODE (parg0) != MULT_EXPR
10021 && TREE_CODE (parg1) == MULT_EXPR)
10022 return
10023 fold_build2_loc (loc, PLUS_EXPR, type,
10024 fold_convert_loc (loc, type, parg0),
10025 fold_build2_loc (loc, pcode, type,
10026 fold_convert_loc (loc, type, marg),
10027 fold_convert_loc (loc, type,
10028 parg1)));
10031 else
10033 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10034 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10035 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10037 /* Likewise if the operands are reversed. */
10038 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10039 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10041 /* Convert X + -C into X - C. */
10042 if (TREE_CODE (arg1) == REAL_CST
10043 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10045 tem = fold_negate_const (arg1, type);
10046 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10047 return fold_build2_loc (loc, MINUS_EXPR, type,
10048 fold_convert_loc (loc, type, arg0),
10049 fold_convert_loc (loc, type, tem));
10052 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10053 to __complex__ ( x, y ). This is not the same for SNaNs or
10054 if signed zeros are involved. */
10055 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10056 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10057 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10059 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10060 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10061 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10062 bool arg0rz = false, arg0iz = false;
10063 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10064 || (arg0i && (arg0iz = real_zerop (arg0i))))
10066 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10067 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10068 if (arg0rz && arg1i && real_zerop (arg1i))
10070 tree rp = arg1r ? arg1r
10071 : build1 (REALPART_EXPR, rtype, arg1);
10072 tree ip = arg0i ? arg0i
10073 : build1 (IMAGPART_EXPR, rtype, arg0);
10074 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10076 else if (arg0iz && arg1r && real_zerop (arg1r))
10078 tree rp = arg0r ? arg0r
10079 : build1 (REALPART_EXPR, rtype, arg0);
10080 tree ip = arg1i ? arg1i
10081 : build1 (IMAGPART_EXPR, rtype, arg1);
10082 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10087 if (flag_unsafe_math_optimizations
10088 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10089 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10090 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10091 return tem;
10093 /* Convert x+x into x*2.0. */
10094 if (operand_equal_p (arg0, arg1, 0)
10095 && SCALAR_FLOAT_TYPE_P (type))
10096 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10097 build_real (type, dconst2));
10099 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10100 We associate floats only if the user has specified
10101 -fassociative-math. */
10102 if (flag_associative_math
10103 && TREE_CODE (arg1) == PLUS_EXPR
10104 && TREE_CODE (arg0) != MULT_EXPR)
10106 tree tree10 = TREE_OPERAND (arg1, 0);
10107 tree tree11 = TREE_OPERAND (arg1, 1);
10108 if (TREE_CODE (tree11) == MULT_EXPR
10109 && TREE_CODE (tree10) == MULT_EXPR)
10111 tree tree0;
10112 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10113 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10116 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10117 We associate floats only if the user has specified
10118 -fassociative-math. */
10119 if (flag_associative_math
10120 && TREE_CODE (arg0) == PLUS_EXPR
10121 && TREE_CODE (arg1) != MULT_EXPR)
10123 tree tree00 = TREE_OPERAND (arg0, 0);
10124 tree tree01 = TREE_OPERAND (arg0, 1);
10125 if (TREE_CODE (tree01) == MULT_EXPR
10126 && TREE_CODE (tree00) == MULT_EXPR)
10128 tree tree0;
10129 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10130 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10135 bit_rotate:
10136 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10137 is a rotate of A by C1 bits. */
10138 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10139 is a rotate of A by B bits. */
10141 enum tree_code code0, code1;
10142 tree rtype;
10143 code0 = TREE_CODE (arg0);
10144 code1 = TREE_CODE (arg1);
10145 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10146 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10147 && operand_equal_p (TREE_OPERAND (arg0, 0),
10148 TREE_OPERAND (arg1, 0), 0)
10149 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10150 TYPE_UNSIGNED (rtype))
10151 /* Only create rotates in complete modes. Other cases are not
10152 expanded properly. */
10153 && (element_precision (rtype)
10154 == element_precision (TYPE_MODE (rtype))))
10156 tree tree01, tree11;
10157 enum tree_code code01, code11;
10159 tree01 = TREE_OPERAND (arg0, 1);
10160 tree11 = TREE_OPERAND (arg1, 1);
10161 STRIP_NOPS (tree01);
10162 STRIP_NOPS (tree11);
10163 code01 = TREE_CODE (tree01);
10164 code11 = TREE_CODE (tree11);
10165 if (code01 == INTEGER_CST
10166 && code11 == INTEGER_CST
10167 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10168 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10170 tem = build2_loc (loc, LROTATE_EXPR,
10171 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10172 TREE_OPERAND (arg0, 0),
10173 code0 == LSHIFT_EXPR ? tree01 : tree11);
10174 return fold_convert_loc (loc, type, tem);
10176 else if (code11 == MINUS_EXPR)
10178 tree tree110, tree111;
10179 tree110 = TREE_OPERAND (tree11, 0);
10180 tree111 = TREE_OPERAND (tree11, 1);
10181 STRIP_NOPS (tree110);
10182 STRIP_NOPS (tree111);
10183 if (TREE_CODE (tree110) == INTEGER_CST
10184 && 0 == compare_tree_int (tree110,
10185 element_precision
10186 (TREE_TYPE (TREE_OPERAND
10187 (arg0, 0))))
10188 && operand_equal_p (tree01, tree111, 0))
10189 return
10190 fold_convert_loc (loc, type,
10191 build2 ((code0 == LSHIFT_EXPR
10192 ? LROTATE_EXPR
10193 : RROTATE_EXPR),
10194 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10195 TREE_OPERAND (arg0, 0), tree01));
10197 else if (code01 == MINUS_EXPR)
10199 tree tree010, tree011;
10200 tree010 = TREE_OPERAND (tree01, 0);
10201 tree011 = TREE_OPERAND (tree01, 1);
10202 STRIP_NOPS (tree010);
10203 STRIP_NOPS (tree011);
10204 if (TREE_CODE (tree010) == INTEGER_CST
10205 && 0 == compare_tree_int (tree010,
10206 element_precision
10207 (TREE_TYPE (TREE_OPERAND
10208 (arg0, 0))))
10209 && operand_equal_p (tree11, tree011, 0))
10210 return fold_convert_loc
10211 (loc, type,
10212 build2 ((code0 != LSHIFT_EXPR
10213 ? LROTATE_EXPR
10214 : RROTATE_EXPR),
10215 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10216 TREE_OPERAND (arg0, 0), tree11));
10221 associate:
10222 /* In most languages, can't associate operations on floats through
10223 parentheses. Rather than remember where the parentheses were, we
10224 don't associate floats at all, unless the user has specified
10225 -fassociative-math.
10226 And, we need to make sure type is not saturating. */
10228 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10229 && !TYPE_SATURATING (type))
10231 tree var0, con0, lit0, minus_lit0;
10232 tree var1, con1, lit1, minus_lit1;
10233 tree atype = type;
10234 bool ok = true;
10236 /* Split both trees into variables, constants, and literals. Then
10237 associate each group together, the constants with literals,
10238 then the result with variables. This increases the chances of
10239 literals being recombined later and of generating relocatable
10240 expressions for the sum of a constant and literal. */
10241 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10242 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10243 code == MINUS_EXPR);
10245 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10246 if (code == MINUS_EXPR)
10247 code = PLUS_EXPR;
10249 /* With undefined overflow prefer doing association in a type
10250 which wraps on overflow, if that is one of the operand types. */
10251 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10252 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10254 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10255 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10256 atype = TREE_TYPE (arg0);
10257 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10258 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10259 atype = TREE_TYPE (arg1);
10260 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10263 /* With undefined overflow we can only associate constants with one
10264 variable, and constants whose association doesn't overflow. */
10265 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10266 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10268 if (var0 && var1)
10270 tree tmp0 = var0;
10271 tree tmp1 = var1;
10273 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10274 tmp0 = TREE_OPERAND (tmp0, 0);
10275 if (CONVERT_EXPR_P (tmp0)
10276 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10277 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10278 <= TYPE_PRECISION (atype)))
10279 tmp0 = TREE_OPERAND (tmp0, 0);
10280 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10281 tmp1 = TREE_OPERAND (tmp1, 0);
10282 if (CONVERT_EXPR_P (tmp1)
10283 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10284 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10285 <= TYPE_PRECISION (atype)))
10286 tmp1 = TREE_OPERAND (tmp1, 0);
10287 /* The only case we can still associate with two variables
10288 is if they are the same, modulo negation and bit-pattern
10289 preserving conversions. */
10290 if (!operand_equal_p (tmp0, tmp1, 0))
10291 ok = false;
10295 /* Only do something if we found more than two objects. Otherwise,
10296 nothing has changed and we risk infinite recursion. */
10297 if (ok
10298 && (2 < ((var0 != 0) + (var1 != 0)
10299 + (con0 != 0) + (con1 != 0)
10300 + (lit0 != 0) + (lit1 != 0)
10301 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10303 bool any_overflows = false;
10304 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10305 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10306 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10307 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10308 var0 = associate_trees (loc, var0, var1, code, atype);
10309 con0 = associate_trees (loc, con0, con1, code, atype);
10310 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10311 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10312 code, atype);
10314 /* Preserve the MINUS_EXPR if the negative part of the literal is
10315 greater than the positive part. Otherwise, the multiplicative
10316 folding code (i.e extract_muldiv) may be fooled in case
10317 unsigned constants are subtracted, like in the following
10318 example: ((X*2 + 4) - 8U)/2. */
10319 if (minus_lit0 && lit0)
10321 if (TREE_CODE (lit0) == INTEGER_CST
10322 && TREE_CODE (minus_lit0) == INTEGER_CST
10323 && tree_int_cst_lt (lit0, minus_lit0))
10325 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10326 MINUS_EXPR, atype);
10327 lit0 = 0;
10329 else
10331 lit0 = associate_trees (loc, lit0, minus_lit0,
10332 MINUS_EXPR, atype);
10333 minus_lit0 = 0;
10337 /* Don't introduce overflows through reassociation. */
10338 if (!any_overflows
10339 && ((lit0 && TREE_OVERFLOW (lit0))
10340 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10341 return NULL_TREE;
10343 if (minus_lit0)
10345 if (con0 == 0)
10346 return
10347 fold_convert_loc (loc, type,
10348 associate_trees (loc, var0, minus_lit0,
10349 MINUS_EXPR, atype));
10350 else
10352 con0 = associate_trees (loc, con0, minus_lit0,
10353 MINUS_EXPR, atype);
10354 return
10355 fold_convert_loc (loc, type,
10356 associate_trees (loc, var0, con0,
10357 PLUS_EXPR, atype));
10361 con0 = associate_trees (loc, con0, lit0, code, atype);
10362 return
10363 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10364 code, atype));
10368 return NULL_TREE;
10370 case MINUS_EXPR:
10371 /* Pointer simplifications for subtraction, simple reassociations. */
10372 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10374 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10375 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10376 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10378 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10379 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10380 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10381 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10382 return fold_build2_loc (loc, PLUS_EXPR, type,
10383 fold_build2_loc (loc, MINUS_EXPR, type,
10384 arg00, arg10),
10385 fold_build2_loc (loc, MINUS_EXPR, type,
10386 arg01, arg11));
10388 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10389 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10391 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10392 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10393 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10394 fold_convert_loc (loc, type, arg1));
10395 if (tmp)
10396 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10398 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10399 simplifies. */
10400 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10402 tree arg10 = fold_convert_loc (loc, type,
10403 TREE_OPERAND (arg1, 0));
10404 tree arg11 = fold_convert_loc (loc, type,
10405 TREE_OPERAND (arg1, 1));
10406 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10407 fold_convert_loc (loc, type, arg0),
10408 arg10);
10409 if (tmp)
10410 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10413 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10414 if (TREE_CODE (arg0) == NEGATE_EXPR
10415 && negate_expr_p (arg1)
10416 && reorder_operands_p (arg0, arg1))
10417 return fold_build2_loc (loc, MINUS_EXPR, type,
10418 fold_convert_loc (loc, type,
10419 negate_expr (arg1)),
10420 fold_convert_loc (loc, type,
10421 TREE_OPERAND (arg0, 0)));
10422 /* Convert -A - 1 to ~A. */
10423 if (TREE_CODE (arg0) == NEGATE_EXPR
10424 && integer_each_onep (arg1)
10425 && !TYPE_OVERFLOW_TRAPS (type))
10426 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10427 fold_convert_loc (loc, type,
10428 TREE_OPERAND (arg0, 0)));
10430 /* Convert -1 - A to ~A. */
10431 if (TREE_CODE (type) != COMPLEX_TYPE
10432 && integer_all_onesp (arg0))
10433 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10436 /* X - (X / Y) * Y is X % Y. */
10437 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10438 && TREE_CODE (arg1) == MULT_EXPR
10439 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10440 && operand_equal_p (arg0,
10441 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10442 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10443 TREE_OPERAND (arg1, 1), 0))
10444 return
10445 fold_convert_loc (loc, type,
10446 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10447 arg0, TREE_OPERAND (arg1, 1)));
10449 if (! FLOAT_TYPE_P (type))
10451 if (integer_zerop (arg0))
10452 return negate_expr (fold_convert_loc (loc, type, arg1));
10454 /* Fold A - (A & B) into ~B & A. */
10455 if (!TREE_SIDE_EFFECTS (arg0)
10456 && TREE_CODE (arg1) == BIT_AND_EXPR)
10458 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10460 tree arg10 = fold_convert_loc (loc, type,
10461 TREE_OPERAND (arg1, 0));
10462 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10463 fold_build1_loc (loc, BIT_NOT_EXPR,
10464 type, arg10),
10465 fold_convert_loc (loc, type, arg0));
10467 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10469 tree arg11 = fold_convert_loc (loc,
10470 type, TREE_OPERAND (arg1, 1));
10471 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10472 fold_build1_loc (loc, BIT_NOT_EXPR,
10473 type, arg11),
10474 fold_convert_loc (loc, type, arg0));
10478 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10479 any power of 2 minus 1. */
10480 if (TREE_CODE (arg0) == BIT_AND_EXPR
10481 && TREE_CODE (arg1) == BIT_AND_EXPR
10482 && operand_equal_p (TREE_OPERAND (arg0, 0),
10483 TREE_OPERAND (arg1, 0), 0))
10485 tree mask0 = TREE_OPERAND (arg0, 1);
10486 tree mask1 = TREE_OPERAND (arg1, 1);
10487 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10489 if (operand_equal_p (tem, mask1, 0))
10491 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10492 TREE_OPERAND (arg0, 0), mask1);
10493 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10498 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10499 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10500 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10502 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10503 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10504 (-ARG1 + ARG0) reduces to -ARG1. */
10505 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10506 return negate_expr (fold_convert_loc (loc, type, arg1));
10508 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10509 __complex__ ( x, -y ). This is not the same for SNaNs or if
10510 signed zeros are involved. */
10511 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10512 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10513 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10515 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10516 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10517 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10518 bool arg0rz = false, arg0iz = false;
10519 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10520 || (arg0i && (arg0iz = real_zerop (arg0i))))
10522 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10523 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10524 if (arg0rz && arg1i && real_zerop (arg1i))
10526 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10527 arg1r ? arg1r
10528 : build1 (REALPART_EXPR, rtype, arg1));
10529 tree ip = arg0i ? arg0i
10530 : build1 (IMAGPART_EXPR, rtype, arg0);
10531 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10533 else if (arg0iz && arg1r && real_zerop (arg1r))
10535 tree rp = arg0r ? arg0r
10536 : build1 (REALPART_EXPR, rtype, arg0);
10537 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10538 arg1i ? arg1i
10539 : build1 (IMAGPART_EXPR, rtype, arg1));
10540 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10545 /* A - B -> A + (-B) if B is easily negatable. */
10546 if (negate_expr_p (arg1)
10547 && ((FLOAT_TYPE_P (type)
10548 /* Avoid this transformation if B is a positive REAL_CST. */
10549 && (TREE_CODE (arg1) != REAL_CST
10550 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10551 || INTEGRAL_TYPE_P (type)))
10552 return fold_build2_loc (loc, PLUS_EXPR, type,
10553 fold_convert_loc (loc, type, arg0),
10554 fold_convert_loc (loc, type,
10555 negate_expr (arg1)));
10557 /* Try folding difference of addresses. */
10559 HOST_WIDE_INT diff;
10561 if ((TREE_CODE (arg0) == ADDR_EXPR
10562 || TREE_CODE (arg1) == ADDR_EXPR)
10563 && ptr_difference_const (arg0, arg1, &diff))
10564 return build_int_cst_type (type, diff);
10567 /* Fold &a[i] - &a[j] to i-j. */
10568 if (TREE_CODE (arg0) == ADDR_EXPR
10569 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10570 && TREE_CODE (arg1) == ADDR_EXPR
10571 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10573 tree tem = fold_addr_of_array_ref_difference (loc, type,
10574 TREE_OPERAND (arg0, 0),
10575 TREE_OPERAND (arg1, 0));
10576 if (tem)
10577 return tem;
10580 if (FLOAT_TYPE_P (type)
10581 && flag_unsafe_math_optimizations
10582 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10583 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10584 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10585 return tem;
10587 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10588 one. Make sure the type is not saturating and has the signedness of
10589 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10590 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10591 if ((TREE_CODE (arg0) == MULT_EXPR
10592 || TREE_CODE (arg1) == MULT_EXPR)
10593 && !TYPE_SATURATING (type)
10594 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10595 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10596 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10598 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10599 if (tem)
10600 return tem;
10603 goto associate;
10605 case MULT_EXPR:
10606 /* (-A) * (-B) -> A * B */
10607 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10608 return fold_build2_loc (loc, MULT_EXPR, type,
10609 fold_convert_loc (loc, type,
10610 TREE_OPERAND (arg0, 0)),
10611 fold_convert_loc (loc, type,
10612 negate_expr (arg1)));
10613 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10614 return fold_build2_loc (loc, MULT_EXPR, type,
10615 fold_convert_loc (loc, type,
10616 negate_expr (arg0)),
10617 fold_convert_loc (loc, type,
10618 TREE_OPERAND (arg1, 0)));
10620 if (! FLOAT_TYPE_P (type))
10622 /* Transform x * -1 into -x. Make sure to do the negation
10623 on the original operand with conversions not stripped
10624 because we can only strip non-sign-changing conversions. */
10625 if (integer_minus_onep (arg1))
10626 return fold_convert_loc (loc, type, negate_expr (op0));
10627 /* Transform x * -C into -x * C if x is easily negatable. */
10628 if (TREE_CODE (arg1) == INTEGER_CST
10629 && tree_int_cst_sgn (arg1) == -1
10630 && negate_expr_p (arg0)
10631 && (tem = negate_expr (arg1)) != arg1
10632 && !TREE_OVERFLOW (tem))
10633 return fold_build2_loc (loc, MULT_EXPR, type,
10634 fold_convert_loc (loc, type,
10635 negate_expr (arg0)),
10636 tem);
10638 /* (a * (1 << b)) is (a << b) */
10639 if (TREE_CODE (arg1) == LSHIFT_EXPR
10640 && integer_onep (TREE_OPERAND (arg1, 0)))
10641 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10642 TREE_OPERAND (arg1, 1));
10643 if (TREE_CODE (arg0) == LSHIFT_EXPR
10644 && integer_onep (TREE_OPERAND (arg0, 0)))
10645 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10646 TREE_OPERAND (arg0, 1));
10648 /* (A + A) * C -> A * 2 * C */
10649 if (TREE_CODE (arg0) == PLUS_EXPR
10650 && TREE_CODE (arg1) == INTEGER_CST
10651 && operand_equal_p (TREE_OPERAND (arg0, 0),
10652 TREE_OPERAND (arg0, 1), 0))
10653 return fold_build2_loc (loc, MULT_EXPR, type,
10654 omit_one_operand_loc (loc, type,
10655 TREE_OPERAND (arg0, 0),
10656 TREE_OPERAND (arg0, 1)),
10657 fold_build2_loc (loc, MULT_EXPR, type,
10658 build_int_cst (type, 2) , arg1));
10660 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10661 sign-changing only. */
10662 if (TREE_CODE (arg1) == INTEGER_CST
10663 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10664 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10665 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10667 strict_overflow_p = false;
10668 if (TREE_CODE (arg1) == INTEGER_CST
10669 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10670 &strict_overflow_p)))
10672 if (strict_overflow_p)
10673 fold_overflow_warning (("assuming signed overflow does not "
10674 "occur when simplifying "
10675 "multiplication"),
10676 WARN_STRICT_OVERFLOW_MISC);
10677 return fold_convert_loc (loc, type, tem);
10680 /* Optimize z * conj(z) for integer complex numbers. */
10681 if (TREE_CODE (arg0) == CONJ_EXPR
10682 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10683 return fold_mult_zconjz (loc, type, arg1);
10684 if (TREE_CODE (arg1) == CONJ_EXPR
10685 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10686 return fold_mult_zconjz (loc, type, arg0);
10688 else
10690 /* Maybe fold x * 0 to 0. The expressions aren't the same
10691 when x is NaN, since x * 0 is also NaN. Nor are they the
10692 same in modes with signed zeros, since multiplying a
10693 negative value by 0 gives -0, not +0. */
10694 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10695 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10696 && real_zerop (arg1))
10697 return omit_one_operand_loc (loc, type, arg1, arg0);
10698 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10699 Likewise for complex arithmetic with signed zeros. */
10700 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10701 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10702 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10703 && real_onep (arg1))
10704 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10706 /* Transform x * -1.0 into -x. */
10707 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10708 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10709 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10710 && real_minus_onep (arg1))
10711 return fold_convert_loc (loc, type, negate_expr (arg0));
10713 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10714 the result for floating point types due to rounding so it is applied
10715 only if -fassociative-math was specify. */
10716 if (flag_associative_math
10717 && TREE_CODE (arg0) == RDIV_EXPR
10718 && TREE_CODE (arg1) == REAL_CST
10719 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10721 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10722 arg1);
10723 if (tem)
10724 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10725 TREE_OPERAND (arg0, 1));
10728 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10729 if (operand_equal_p (arg0, arg1, 0))
10731 tree tem = fold_strip_sign_ops (arg0);
10732 if (tem != NULL_TREE)
10734 tem = fold_convert_loc (loc, type, tem);
10735 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10739 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10740 This is not the same for NaNs or if signed zeros are
10741 involved. */
10742 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10743 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10744 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10745 && TREE_CODE (arg1) == COMPLEX_CST
10746 && real_zerop (TREE_REALPART (arg1)))
10748 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10749 if (real_onep (TREE_IMAGPART (arg1)))
10750 return
10751 fold_build2_loc (loc, COMPLEX_EXPR, type,
10752 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10753 rtype, arg0)),
10754 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10755 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10756 return
10757 fold_build2_loc (loc, COMPLEX_EXPR, type,
10758 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10759 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10760 rtype, arg0)));
10763 /* Optimize z * conj(z) for floating point complex numbers.
10764 Guarded by flag_unsafe_math_optimizations as non-finite
10765 imaginary components don't produce scalar results. */
10766 if (flag_unsafe_math_optimizations
10767 && TREE_CODE (arg0) == CONJ_EXPR
10768 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10769 return fold_mult_zconjz (loc, type, arg1);
10770 if (flag_unsafe_math_optimizations
10771 && TREE_CODE (arg1) == CONJ_EXPR
10772 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10773 return fold_mult_zconjz (loc, type, arg0);
10775 if (flag_unsafe_math_optimizations)
10777 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10778 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10780 /* Optimizations of root(...)*root(...). */
10781 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10783 tree rootfn, arg;
10784 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10785 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10787 /* Optimize sqrt(x)*sqrt(x) as x. */
10788 if (BUILTIN_SQRT_P (fcode0)
10789 && operand_equal_p (arg00, arg10, 0)
10790 && ! HONOR_SNANS (TYPE_MODE (type)))
10791 return arg00;
10793 /* Optimize root(x)*root(y) as root(x*y). */
10794 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10795 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10796 return build_call_expr_loc (loc, rootfn, 1, arg);
10799 /* Optimize expN(x)*expN(y) as expN(x+y). */
10800 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10802 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10803 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10804 CALL_EXPR_ARG (arg0, 0),
10805 CALL_EXPR_ARG (arg1, 0));
10806 return build_call_expr_loc (loc, expfn, 1, arg);
10809 /* Optimizations of pow(...)*pow(...). */
10810 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10811 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10812 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10814 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10815 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10816 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10817 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10819 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10820 if (operand_equal_p (arg01, arg11, 0))
10822 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10823 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10824 arg00, arg10);
10825 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10828 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10829 if (operand_equal_p (arg00, arg10, 0))
10831 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10832 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10833 arg01, arg11);
10834 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10838 /* Optimize tan(x)*cos(x) as sin(x). */
10839 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10840 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10841 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10842 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10843 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10844 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10845 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10846 CALL_EXPR_ARG (arg1, 0), 0))
10848 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10850 if (sinfn != NULL_TREE)
10851 return build_call_expr_loc (loc, sinfn, 1,
10852 CALL_EXPR_ARG (arg0, 0));
10855 /* Optimize x*pow(x,c) as pow(x,c+1). */
10856 if (fcode1 == BUILT_IN_POW
10857 || fcode1 == BUILT_IN_POWF
10858 || fcode1 == BUILT_IN_POWL)
10860 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10861 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10862 if (TREE_CODE (arg11) == REAL_CST
10863 && !TREE_OVERFLOW (arg11)
10864 && operand_equal_p (arg0, arg10, 0))
10866 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10867 REAL_VALUE_TYPE c;
10868 tree arg;
10870 c = TREE_REAL_CST (arg11);
10871 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10872 arg = build_real (type, c);
10873 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10877 /* Optimize pow(x,c)*x as pow(x,c+1). */
10878 if (fcode0 == BUILT_IN_POW
10879 || fcode0 == BUILT_IN_POWF
10880 || fcode0 == BUILT_IN_POWL)
10882 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10883 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10884 if (TREE_CODE (arg01) == REAL_CST
10885 && !TREE_OVERFLOW (arg01)
10886 && operand_equal_p (arg1, arg00, 0))
10888 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10889 REAL_VALUE_TYPE c;
10890 tree arg;
10892 c = TREE_REAL_CST (arg01);
10893 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10894 arg = build_real (type, c);
10895 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10899 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10900 if (!in_gimple_form
10901 && optimize
10902 && operand_equal_p (arg0, arg1, 0))
10904 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10906 if (powfn)
10908 tree arg = build_real (type, dconst2);
10909 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10914 goto associate;
10916 case BIT_IOR_EXPR:
10917 bit_ior:
10918 /* ~X | X is -1. */
10919 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10920 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10922 t1 = build_zero_cst (type);
10923 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10924 return omit_one_operand_loc (loc, type, t1, arg1);
10927 /* X | ~X is -1. */
10928 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10929 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10931 t1 = build_zero_cst (type);
10932 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10933 return omit_one_operand_loc (loc, type, t1, arg0);
10936 /* Canonicalize (X & C1) | C2. */
10937 if (TREE_CODE (arg0) == BIT_AND_EXPR
10938 && TREE_CODE (arg1) == INTEGER_CST
10939 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10941 int width = TYPE_PRECISION (type), w;
10942 wide_int c1 = TREE_OPERAND (arg0, 1);
10943 wide_int c2 = arg1;
10945 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10946 if ((c1 & c2) == c1)
10947 return omit_one_operand_loc (loc, type, arg1,
10948 TREE_OPERAND (arg0, 0));
10950 wide_int msk = wi::mask (width, false,
10951 TYPE_PRECISION (TREE_TYPE (arg1)));
10953 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10954 if (msk.and_not (c1 | c2) == 0)
10955 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10956 TREE_OPERAND (arg0, 0), arg1);
10958 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10959 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10960 mode which allows further optimizations. */
10961 c1 &= msk;
10962 c2 &= msk;
10963 wide_int c3 = c1.and_not (c2);
10964 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10966 wide_int mask = wi::mask (w, false,
10967 TYPE_PRECISION (type));
10968 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10970 c3 = mask;
10971 break;
10975 if (c3 != c1)
10976 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10977 fold_build2_loc (loc, BIT_AND_EXPR, type,
10978 TREE_OPERAND (arg0, 0),
10979 wide_int_to_tree (type,
10980 c3)),
10981 arg1);
10984 /* (X & ~Y) | (~X & Y) is X ^ Y */
10985 if (TREE_CODE (arg0) == BIT_AND_EXPR
10986 && TREE_CODE (arg1) == BIT_AND_EXPR)
10988 tree a0, a1, l0, l1, n0, n1;
10990 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10991 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10993 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10994 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10996 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
10997 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
10999 if ((operand_equal_p (n0, a0, 0)
11000 && operand_equal_p (n1, a1, 0))
11001 || (operand_equal_p (n0, a1, 0)
11002 && operand_equal_p (n1, a0, 0)))
11003 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11006 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11007 if (t1 != NULL_TREE)
11008 return t1;
11010 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11012 This results in more efficient code for machines without a NAND
11013 instruction. Combine will canonicalize to the first form
11014 which will allow use of NAND instructions provided by the
11015 backend if they exist. */
11016 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11017 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11019 return
11020 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11021 build2 (BIT_AND_EXPR, type,
11022 fold_convert_loc (loc, type,
11023 TREE_OPERAND (arg0, 0)),
11024 fold_convert_loc (loc, type,
11025 TREE_OPERAND (arg1, 0))));
11028 /* See if this can be simplified into a rotate first. If that
11029 is unsuccessful continue in the association code. */
11030 goto bit_rotate;
11032 case BIT_XOR_EXPR:
11033 /* ~X ^ X is -1. */
11034 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11035 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11037 t1 = build_zero_cst (type);
11038 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11039 return omit_one_operand_loc (loc, type, t1, arg1);
11042 /* X ^ ~X is -1. */
11043 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11044 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11046 t1 = build_zero_cst (type);
11047 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11048 return omit_one_operand_loc (loc, type, t1, arg0);
11051 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11052 with a constant, and the two constants have no bits in common,
11053 we should treat this as a BIT_IOR_EXPR since this may produce more
11054 simplifications. */
11055 if (TREE_CODE (arg0) == BIT_AND_EXPR
11056 && TREE_CODE (arg1) == BIT_AND_EXPR
11057 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11058 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11059 && wi::bit_and (TREE_OPERAND (arg0, 1),
11060 TREE_OPERAND (arg1, 1)) == 0)
11062 code = BIT_IOR_EXPR;
11063 goto bit_ior;
11066 /* (X | Y) ^ X -> Y & ~ X*/
11067 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11068 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11070 tree t2 = TREE_OPERAND (arg0, 1);
11071 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11072 arg1);
11073 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11074 fold_convert_loc (loc, type, t2),
11075 fold_convert_loc (loc, type, t1));
11076 return t1;
11079 /* (Y | X) ^ X -> Y & ~ X*/
11080 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11081 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11083 tree t2 = TREE_OPERAND (arg0, 0);
11084 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11085 arg1);
11086 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11087 fold_convert_loc (loc, type, t2),
11088 fold_convert_loc (loc, type, t1));
11089 return t1;
11092 /* X ^ (X | Y) -> Y & ~ X*/
11093 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11094 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11096 tree t2 = TREE_OPERAND (arg1, 1);
11097 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11098 arg0);
11099 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11100 fold_convert_loc (loc, type, t2),
11101 fold_convert_loc (loc, type, t1));
11102 return t1;
11105 /* X ^ (Y | X) -> Y & ~ X*/
11106 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11107 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11109 tree t2 = TREE_OPERAND (arg1, 0);
11110 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11111 arg0);
11112 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11113 fold_convert_loc (loc, type, t2),
11114 fold_convert_loc (loc, type, t1));
11115 return t1;
11118 /* Convert ~X ^ ~Y to X ^ Y. */
11119 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11120 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11121 return fold_build2_loc (loc, code, type,
11122 fold_convert_loc (loc, type,
11123 TREE_OPERAND (arg0, 0)),
11124 fold_convert_loc (loc, type,
11125 TREE_OPERAND (arg1, 0)));
11127 /* Convert ~X ^ C to X ^ ~C. */
11128 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11129 && TREE_CODE (arg1) == INTEGER_CST)
11130 return fold_build2_loc (loc, code, type,
11131 fold_convert_loc (loc, type,
11132 TREE_OPERAND (arg0, 0)),
11133 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11135 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11136 if (TREE_CODE (arg0) == BIT_AND_EXPR
11137 && INTEGRAL_TYPE_P (type)
11138 && integer_onep (TREE_OPERAND (arg0, 1))
11139 && integer_onep (arg1))
11140 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11141 build_zero_cst (TREE_TYPE (arg0)));
11143 /* Fold (X & Y) ^ Y as ~X & Y. */
11144 if (TREE_CODE (arg0) == BIT_AND_EXPR
11145 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11147 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11148 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11149 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11150 fold_convert_loc (loc, type, arg1));
11152 /* Fold (X & Y) ^ X as ~Y & X. */
11153 if (TREE_CODE (arg0) == BIT_AND_EXPR
11154 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11155 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11157 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11158 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11159 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11160 fold_convert_loc (loc, type, arg1));
11162 /* Fold X ^ (X & Y) as X & ~Y. */
11163 if (TREE_CODE (arg1) == BIT_AND_EXPR
11164 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11166 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11167 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11168 fold_convert_loc (loc, type, arg0),
11169 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11171 /* Fold X ^ (Y & X) as ~Y & X. */
11172 if (TREE_CODE (arg1) == BIT_AND_EXPR
11173 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11174 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11176 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11177 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11178 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11179 fold_convert_loc (loc, type, arg0));
11182 /* See if this can be simplified into a rotate first. If that
11183 is unsuccessful continue in the association code. */
11184 goto bit_rotate;
11186 case BIT_AND_EXPR:
11187 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11188 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11189 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11190 || (TREE_CODE (arg0) == EQ_EXPR
11191 && integer_zerop (TREE_OPERAND (arg0, 1))))
11192 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11193 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11195 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11196 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11197 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11198 || (TREE_CODE (arg1) == EQ_EXPR
11199 && integer_zerop (TREE_OPERAND (arg1, 1))))
11200 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11201 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11203 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11204 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11205 && INTEGRAL_TYPE_P (type)
11206 && integer_onep (TREE_OPERAND (arg0, 1))
11207 && integer_onep (arg1))
11209 tree tem2;
11210 tem = TREE_OPERAND (arg0, 0);
11211 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11212 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11213 tem, tem2);
11214 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11215 build_zero_cst (TREE_TYPE (tem)));
11217 /* Fold ~X & 1 as (X & 1) == 0. */
11218 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11219 && INTEGRAL_TYPE_P (type)
11220 && integer_onep (arg1))
11222 tree tem2;
11223 tem = TREE_OPERAND (arg0, 0);
11224 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11225 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11226 tem, tem2);
11227 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11228 build_zero_cst (TREE_TYPE (tem)));
11230 /* Fold !X & 1 as X == 0. */
11231 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11232 && integer_onep (arg1))
11234 tem = TREE_OPERAND (arg0, 0);
11235 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11236 build_zero_cst (TREE_TYPE (tem)));
11239 /* Fold (X ^ Y) & Y as ~X & Y. */
11240 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11241 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11243 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11244 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11245 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11246 fold_convert_loc (loc, type, arg1));
11248 /* Fold (X ^ Y) & X as ~Y & X. */
11249 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11250 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11251 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11253 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11254 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11255 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11256 fold_convert_loc (loc, type, arg1));
11258 /* Fold X & (X ^ Y) as X & ~Y. */
11259 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11260 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11262 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11263 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11264 fold_convert_loc (loc, type, arg0),
11265 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11267 /* Fold X & (Y ^ X) as ~Y & X. */
11268 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11269 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11270 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11272 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11273 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11274 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11275 fold_convert_loc (loc, type, arg0));
11278 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11279 multiple of 1 << CST. */
11280 if (TREE_CODE (arg1) == INTEGER_CST)
11282 wide_int cst1 = arg1;
11283 wide_int ncst1 = -cst1;
11284 if ((cst1 & ncst1) == ncst1
11285 && multiple_of_p (type, arg0,
11286 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11287 return fold_convert_loc (loc, type, arg0);
11290 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11291 bits from CST2. */
11292 if (TREE_CODE (arg1) == INTEGER_CST
11293 && TREE_CODE (arg0) == MULT_EXPR
11294 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11296 wide_int warg1 = arg1;
11297 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11299 if (masked == 0)
11300 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11301 arg0, arg1);
11302 else if (masked != warg1)
11304 /* Avoid the transform if arg1 is a mask of some
11305 mode which allows further optimizations. */
11306 int pop = wi::popcount (warg1);
11307 if (!(pop >= BITS_PER_UNIT
11308 && exact_log2 (pop) != -1
11309 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11310 return fold_build2_loc (loc, code, type, op0,
11311 wide_int_to_tree (type, masked));
11315 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11316 ((A & N) + B) & M -> (A + B) & M
11317 Similarly if (N & M) == 0,
11318 ((A | N) + B) & M -> (A + B) & M
11319 and for - instead of + (or unary - instead of +)
11320 and/or ^ instead of |.
11321 If B is constant and (B & M) == 0, fold into A & M. */
11322 if (TREE_CODE (arg1) == INTEGER_CST)
11324 wide_int cst1 = arg1;
11325 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11326 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11327 && (TREE_CODE (arg0) == PLUS_EXPR
11328 || TREE_CODE (arg0) == MINUS_EXPR
11329 || TREE_CODE (arg0) == NEGATE_EXPR)
11330 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11331 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11333 tree pmop[2];
11334 int which = 0;
11335 wide_int cst0;
11337 /* Now we know that arg0 is (C + D) or (C - D) or
11338 -C and arg1 (M) is == (1LL << cst) - 1.
11339 Store C into PMOP[0] and D into PMOP[1]. */
11340 pmop[0] = TREE_OPERAND (arg0, 0);
11341 pmop[1] = NULL;
11342 if (TREE_CODE (arg0) != NEGATE_EXPR)
11344 pmop[1] = TREE_OPERAND (arg0, 1);
11345 which = 1;
11348 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11349 which = -1;
11351 for (; which >= 0; which--)
11352 switch (TREE_CODE (pmop[which]))
11354 case BIT_AND_EXPR:
11355 case BIT_IOR_EXPR:
11356 case BIT_XOR_EXPR:
11357 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11358 != INTEGER_CST)
11359 break;
11360 cst0 = TREE_OPERAND (pmop[which], 1);
11361 cst0 &= cst1;
11362 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11364 if (cst0 != cst1)
11365 break;
11367 else if (cst0 != 0)
11368 break;
11369 /* If C or D is of the form (A & N) where
11370 (N & M) == M, or of the form (A | N) or
11371 (A ^ N) where (N & M) == 0, replace it with A. */
11372 pmop[which] = TREE_OPERAND (pmop[which], 0);
11373 break;
11374 case INTEGER_CST:
11375 /* If C or D is a N where (N & M) == 0, it can be
11376 omitted (assumed 0). */
11377 if ((TREE_CODE (arg0) == PLUS_EXPR
11378 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11379 && (cst1 & pmop[which]) == 0)
11380 pmop[which] = NULL;
11381 break;
11382 default:
11383 break;
11386 /* Only build anything new if we optimized one or both arguments
11387 above. */
11388 if (pmop[0] != TREE_OPERAND (arg0, 0)
11389 || (TREE_CODE (arg0) != NEGATE_EXPR
11390 && pmop[1] != TREE_OPERAND (arg0, 1)))
11392 tree utype = TREE_TYPE (arg0);
11393 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11395 /* Perform the operations in a type that has defined
11396 overflow behavior. */
11397 utype = unsigned_type_for (TREE_TYPE (arg0));
11398 if (pmop[0] != NULL)
11399 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11400 if (pmop[1] != NULL)
11401 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11404 if (TREE_CODE (arg0) == NEGATE_EXPR)
11405 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11406 else if (TREE_CODE (arg0) == PLUS_EXPR)
11408 if (pmop[0] != NULL && pmop[1] != NULL)
11409 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11410 pmop[0], pmop[1]);
11411 else if (pmop[0] != NULL)
11412 tem = pmop[0];
11413 else if (pmop[1] != NULL)
11414 tem = pmop[1];
11415 else
11416 return build_int_cst (type, 0);
11418 else if (pmop[0] == NULL)
11419 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11420 else
11421 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11422 pmop[0], pmop[1]);
11423 /* TEM is now the new binary +, - or unary - replacement. */
11424 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11425 fold_convert_loc (loc, utype, arg1));
11426 return fold_convert_loc (loc, type, tem);
11431 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11432 if (t1 != NULL_TREE)
11433 return t1;
11434 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11435 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11436 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11438 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11440 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11441 if (mask == -1)
11442 return
11443 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11446 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11448 This results in more efficient code for machines without a NOR
11449 instruction. Combine will canonicalize to the first form
11450 which will allow use of NOR instructions provided by the
11451 backend if they exist. */
11452 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11453 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11455 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11456 build2 (BIT_IOR_EXPR, type,
11457 fold_convert_loc (loc, type,
11458 TREE_OPERAND (arg0, 0)),
11459 fold_convert_loc (loc, type,
11460 TREE_OPERAND (arg1, 0))));
11463 /* If arg0 is derived from the address of an object or function, we may
11464 be able to fold this expression using the object or function's
11465 alignment. */
11466 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11468 unsigned HOST_WIDE_INT modulus, residue;
11469 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11471 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11472 integer_onep (arg1));
11474 /* This works because modulus is a power of 2. If this weren't the
11475 case, we'd have to replace it by its greatest power-of-2
11476 divisor: modulus & -modulus. */
11477 if (low < modulus)
11478 return build_int_cst (type, residue & low);
11481 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11482 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11483 if the new mask might be further optimized. */
11484 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11485 || TREE_CODE (arg0) == RSHIFT_EXPR)
11486 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11487 && TREE_CODE (arg1) == INTEGER_CST
11488 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11489 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11490 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11491 < TYPE_PRECISION (TREE_TYPE (arg0))))
11493 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11494 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11495 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11496 tree shift_type = TREE_TYPE (arg0);
11498 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11499 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11500 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11501 && TYPE_PRECISION (TREE_TYPE (arg0))
11502 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11504 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11505 tree arg00 = TREE_OPERAND (arg0, 0);
11506 /* See if more bits can be proven as zero because of
11507 zero extension. */
11508 if (TREE_CODE (arg00) == NOP_EXPR
11509 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11511 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11512 if (TYPE_PRECISION (inner_type)
11513 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11514 && TYPE_PRECISION (inner_type) < prec)
11516 prec = TYPE_PRECISION (inner_type);
11517 /* See if we can shorten the right shift. */
11518 if (shiftc < prec)
11519 shift_type = inner_type;
11520 /* Otherwise X >> C1 is all zeros, so we'll optimize
11521 it into (X, 0) later on by making sure zerobits
11522 is all ones. */
11525 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11526 if (shiftc < prec)
11528 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11529 zerobits <<= prec - shiftc;
11531 /* For arithmetic shift if sign bit could be set, zerobits
11532 can contain actually sign bits, so no transformation is
11533 possible, unless MASK masks them all away. In that
11534 case the shift needs to be converted into logical shift. */
11535 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11536 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11538 if ((mask & zerobits) == 0)
11539 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11540 else
11541 zerobits = 0;
11545 /* ((X << 16) & 0xff00) is (X, 0). */
11546 if ((mask & zerobits) == mask)
11547 return omit_one_operand_loc (loc, type,
11548 build_int_cst (type, 0), arg0);
11550 newmask = mask | zerobits;
11551 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11553 /* Only do the transformation if NEWMASK is some integer
11554 mode's mask. */
11555 for (prec = BITS_PER_UNIT;
11556 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11557 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11558 break;
11559 if (prec < HOST_BITS_PER_WIDE_INT
11560 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11562 tree newmaskt;
11564 if (shift_type != TREE_TYPE (arg0))
11566 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11567 fold_convert_loc (loc, shift_type,
11568 TREE_OPERAND (arg0, 0)),
11569 TREE_OPERAND (arg0, 1));
11570 tem = fold_convert_loc (loc, type, tem);
11572 else
11573 tem = op0;
11574 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11575 if (!tree_int_cst_equal (newmaskt, arg1))
11576 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11581 goto associate;
11583 case RDIV_EXPR:
11584 /* Don't touch a floating-point divide by zero unless the mode
11585 of the constant can represent infinity. */
11586 if (TREE_CODE (arg1) == REAL_CST
11587 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11588 && real_zerop (arg1))
11589 return NULL_TREE;
11591 /* Optimize A / A to 1.0 if we don't care about
11592 NaNs or Infinities. Skip the transformation
11593 for non-real operands. */
11594 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11595 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11596 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11597 && operand_equal_p (arg0, arg1, 0))
11599 tree r = build_real (TREE_TYPE (arg0), dconst1);
11601 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11604 /* The complex version of the above A / A optimization. */
11605 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11606 && operand_equal_p (arg0, arg1, 0))
11608 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11609 if (! HONOR_NANS (TYPE_MODE (elem_type))
11610 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11612 tree r = build_real (elem_type, dconst1);
11613 /* omit_two_operands will call fold_convert for us. */
11614 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11618 /* (-A) / (-B) -> A / B */
11619 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11620 return fold_build2_loc (loc, RDIV_EXPR, type,
11621 TREE_OPERAND (arg0, 0),
11622 negate_expr (arg1));
11623 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11624 return fold_build2_loc (loc, RDIV_EXPR, type,
11625 negate_expr (arg0),
11626 TREE_OPERAND (arg1, 0));
11628 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11629 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11630 && real_onep (arg1))
11631 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11633 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11634 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11635 && real_minus_onep (arg1))
11636 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11637 negate_expr (arg0)));
11639 /* If ARG1 is a constant, we can convert this to a multiply by the
11640 reciprocal. This does not have the same rounding properties,
11641 so only do this if -freciprocal-math. We can actually
11642 always safely do it if ARG1 is a power of two, but it's hard to
11643 tell if it is or not in a portable manner. */
11644 if (optimize
11645 && (TREE_CODE (arg1) == REAL_CST
11646 || (TREE_CODE (arg1) == COMPLEX_CST
11647 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11648 || (TREE_CODE (arg1) == VECTOR_CST
11649 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11651 if (flag_reciprocal_math
11652 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11653 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11654 /* Find the reciprocal if optimizing and the result is exact.
11655 TODO: Complex reciprocal not implemented. */
11656 if (TREE_CODE (arg1) != COMPLEX_CST)
11658 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11660 if (inverse)
11661 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11664 /* Convert A/B/C to A/(B*C). */
11665 if (flag_reciprocal_math
11666 && TREE_CODE (arg0) == RDIV_EXPR)
11667 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11668 fold_build2_loc (loc, MULT_EXPR, type,
11669 TREE_OPERAND (arg0, 1), arg1));
11671 /* Convert A/(B/C) to (A/B)*C. */
11672 if (flag_reciprocal_math
11673 && TREE_CODE (arg1) == RDIV_EXPR)
11674 return fold_build2_loc (loc, MULT_EXPR, type,
11675 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11676 TREE_OPERAND (arg1, 0)),
11677 TREE_OPERAND (arg1, 1));
11679 /* Convert C1/(X*C2) into (C1/C2)/X. */
11680 if (flag_reciprocal_math
11681 && TREE_CODE (arg1) == MULT_EXPR
11682 && TREE_CODE (arg0) == REAL_CST
11683 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11685 tree tem = const_binop (RDIV_EXPR, arg0,
11686 TREE_OPERAND (arg1, 1));
11687 if (tem)
11688 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11689 TREE_OPERAND (arg1, 0));
11692 if (flag_unsafe_math_optimizations)
11694 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11695 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11697 /* Optimize sin(x)/cos(x) as tan(x). */
11698 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11699 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11700 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11701 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11702 CALL_EXPR_ARG (arg1, 0), 0))
11704 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11706 if (tanfn != NULL_TREE)
11707 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11710 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11711 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11712 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11713 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11714 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11715 CALL_EXPR_ARG (arg1, 0), 0))
11717 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11719 if (tanfn != NULL_TREE)
11721 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11722 CALL_EXPR_ARG (arg0, 0));
11723 return fold_build2_loc (loc, RDIV_EXPR, type,
11724 build_real (type, dconst1), tmp);
11728 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11729 NaNs or Infinities. */
11730 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11731 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11732 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11734 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11735 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11737 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11738 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11739 && operand_equal_p (arg00, arg01, 0))
11741 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11743 if (cosfn != NULL_TREE)
11744 return build_call_expr_loc (loc, cosfn, 1, arg00);
11748 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11749 NaNs or Infinities. */
11750 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11751 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11752 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11754 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11755 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11757 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11758 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11759 && operand_equal_p (arg00, arg01, 0))
11761 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11763 if (cosfn != NULL_TREE)
11765 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11766 return fold_build2_loc (loc, RDIV_EXPR, type,
11767 build_real (type, dconst1),
11768 tmp);
11773 /* Optimize pow(x,c)/x as pow(x,c-1). */
11774 if (fcode0 == BUILT_IN_POW
11775 || fcode0 == BUILT_IN_POWF
11776 || fcode0 == BUILT_IN_POWL)
11778 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11779 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11780 if (TREE_CODE (arg01) == REAL_CST
11781 && !TREE_OVERFLOW (arg01)
11782 && operand_equal_p (arg1, arg00, 0))
11784 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11785 REAL_VALUE_TYPE c;
11786 tree arg;
11788 c = TREE_REAL_CST (arg01);
11789 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11790 arg = build_real (type, c);
11791 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11795 /* Optimize a/root(b/c) into a*root(c/b). */
11796 if (BUILTIN_ROOT_P (fcode1))
11798 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11800 if (TREE_CODE (rootarg) == RDIV_EXPR)
11802 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11803 tree b = TREE_OPERAND (rootarg, 0);
11804 tree c = TREE_OPERAND (rootarg, 1);
11806 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11808 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11809 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11813 /* Optimize x/expN(y) into x*expN(-y). */
11814 if (BUILTIN_EXPONENT_P (fcode1))
11816 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11817 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11818 arg1 = build_call_expr_loc (loc,
11819 expfn, 1,
11820 fold_convert_loc (loc, type, arg));
11821 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11824 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11825 if (fcode1 == BUILT_IN_POW
11826 || fcode1 == BUILT_IN_POWF
11827 || fcode1 == BUILT_IN_POWL)
11829 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11830 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11831 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11832 tree neg11 = fold_convert_loc (loc, type,
11833 negate_expr (arg11));
11834 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11835 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11838 return NULL_TREE;
11840 case TRUNC_DIV_EXPR:
11841 /* Optimize (X & (-A)) / A where A is a power of 2,
11842 to X >> log2(A) */
11843 if (TREE_CODE (arg0) == BIT_AND_EXPR
11844 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11845 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11847 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11848 arg1, TREE_OPERAND (arg0, 1));
11849 if (sum && integer_zerop (sum)) {
11850 tree pow2 = build_int_cst (integer_type_node,
11851 wi::exact_log2 (arg1));
11852 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11853 TREE_OPERAND (arg0, 0), pow2);
11857 /* Fall through */
11859 case FLOOR_DIV_EXPR:
11860 /* Simplify A / (B << N) where A and B are positive and B is
11861 a power of 2, to A >> (N + log2(B)). */
11862 strict_overflow_p = false;
11863 if (TREE_CODE (arg1) == LSHIFT_EXPR
11864 && (TYPE_UNSIGNED (type)
11865 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11867 tree sval = TREE_OPERAND (arg1, 0);
11868 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11870 tree sh_cnt = TREE_OPERAND (arg1, 1);
11871 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11872 wi::exact_log2 (sval));
11874 if (strict_overflow_p)
11875 fold_overflow_warning (("assuming signed overflow does not "
11876 "occur when simplifying A / (B << N)"),
11877 WARN_STRICT_OVERFLOW_MISC);
11879 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11880 sh_cnt, pow2);
11881 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11882 fold_convert_loc (loc, type, arg0), sh_cnt);
11886 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11887 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11888 if (INTEGRAL_TYPE_P (type)
11889 && TYPE_UNSIGNED (type)
11890 && code == FLOOR_DIV_EXPR)
11891 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11893 /* Fall through */
11895 case ROUND_DIV_EXPR:
11896 case CEIL_DIV_EXPR:
11897 case EXACT_DIV_EXPR:
11898 if (integer_zerop (arg1))
11899 return NULL_TREE;
11900 /* X / -1 is -X. */
11901 if (!TYPE_UNSIGNED (type)
11902 && TREE_CODE (arg1) == INTEGER_CST
11903 && wi::eq_p (arg1, -1))
11904 return fold_convert_loc (loc, type, negate_expr (arg0));
11906 /* Convert -A / -B to A / B when the type is signed and overflow is
11907 undefined. */
11908 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11909 && TREE_CODE (arg0) == NEGATE_EXPR
11910 && negate_expr_p (arg1))
11912 if (INTEGRAL_TYPE_P (type))
11913 fold_overflow_warning (("assuming signed overflow does not occur "
11914 "when distributing negation across "
11915 "division"),
11916 WARN_STRICT_OVERFLOW_MISC);
11917 return fold_build2_loc (loc, code, type,
11918 fold_convert_loc (loc, type,
11919 TREE_OPERAND (arg0, 0)),
11920 fold_convert_loc (loc, type,
11921 negate_expr (arg1)));
11923 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11924 && TREE_CODE (arg1) == NEGATE_EXPR
11925 && negate_expr_p (arg0))
11927 if (INTEGRAL_TYPE_P (type))
11928 fold_overflow_warning (("assuming signed overflow does not occur "
11929 "when distributing negation across "
11930 "division"),
11931 WARN_STRICT_OVERFLOW_MISC);
11932 return fold_build2_loc (loc, code, type,
11933 fold_convert_loc (loc, type,
11934 negate_expr (arg0)),
11935 fold_convert_loc (loc, type,
11936 TREE_OPERAND (arg1, 0)));
11939 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11940 operation, EXACT_DIV_EXPR.
11942 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11943 At one time others generated faster code, it's not clear if they do
11944 after the last round to changes to the DIV code in expmed.c. */
11945 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11946 && multiple_of_p (type, arg0, arg1))
11947 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11949 strict_overflow_p = false;
11950 if (TREE_CODE (arg1) == INTEGER_CST
11951 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11952 &strict_overflow_p)))
11954 if (strict_overflow_p)
11955 fold_overflow_warning (("assuming signed overflow does not occur "
11956 "when simplifying division"),
11957 WARN_STRICT_OVERFLOW_MISC);
11958 return fold_convert_loc (loc, type, tem);
11961 return NULL_TREE;
11963 case CEIL_MOD_EXPR:
11964 case FLOOR_MOD_EXPR:
11965 case ROUND_MOD_EXPR:
11966 case TRUNC_MOD_EXPR:
11967 /* X % -1 is zero. */
11968 if (!TYPE_UNSIGNED (type)
11969 && TREE_CODE (arg1) == INTEGER_CST
11970 && wi::eq_p (arg1, -1))
11971 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11973 /* X % -C is the same as X % C. */
11974 if (code == TRUNC_MOD_EXPR
11975 && TYPE_SIGN (type) == SIGNED
11976 && TREE_CODE (arg1) == INTEGER_CST
11977 && !TREE_OVERFLOW (arg1)
11978 && wi::neg_p (arg1)
11979 && !TYPE_OVERFLOW_TRAPS (type)
11980 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11981 && !sign_bit_p (arg1, arg1))
11982 return fold_build2_loc (loc, code, type,
11983 fold_convert_loc (loc, type, arg0),
11984 fold_convert_loc (loc, type,
11985 negate_expr (arg1)));
11987 /* X % -Y is the same as X % Y. */
11988 if (code == TRUNC_MOD_EXPR
11989 && !TYPE_UNSIGNED (type)
11990 && TREE_CODE (arg1) == NEGATE_EXPR
11991 && !TYPE_OVERFLOW_TRAPS (type))
11992 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11993 fold_convert_loc (loc, type,
11994 TREE_OPERAND (arg1, 0)));
11996 strict_overflow_p = false;
11997 if (TREE_CODE (arg1) == INTEGER_CST
11998 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11999 &strict_overflow_p)))
12001 if (strict_overflow_p)
12002 fold_overflow_warning (("assuming signed overflow does not occur "
12003 "when simplifying modulus"),
12004 WARN_STRICT_OVERFLOW_MISC);
12005 return fold_convert_loc (loc, type, tem);
12008 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12009 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12010 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12011 && (TYPE_UNSIGNED (type)
12012 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12014 tree c = arg1;
12015 /* Also optimize A % (C << N) where C is a power of 2,
12016 to A & ((C << N) - 1). */
12017 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12018 c = TREE_OPERAND (arg1, 0);
12020 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12022 tree mask
12023 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12024 build_int_cst (TREE_TYPE (arg1), 1));
12025 if (strict_overflow_p)
12026 fold_overflow_warning (("assuming signed overflow does not "
12027 "occur when simplifying "
12028 "X % (power of two)"),
12029 WARN_STRICT_OVERFLOW_MISC);
12030 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12031 fold_convert_loc (loc, type, arg0),
12032 fold_convert_loc (loc, type, mask));
12036 return NULL_TREE;
12038 case LROTATE_EXPR:
12039 case RROTATE_EXPR:
12040 if (integer_all_onesp (arg0))
12041 return omit_one_operand_loc (loc, type, arg0, arg1);
12042 goto shift;
12044 case RSHIFT_EXPR:
12045 /* Optimize -1 >> x for arithmetic right shifts. */
12046 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12047 && tree_expr_nonnegative_p (arg1))
12048 return omit_one_operand_loc (loc, type, arg0, arg1);
12049 /* ... fall through ... */
12051 case LSHIFT_EXPR:
12052 shift:
12053 if (integer_zerop (arg1))
12054 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12055 if (integer_zerop (arg0))
12056 return omit_one_operand_loc (loc, type, arg0, arg1);
12058 /* Prefer vector1 << scalar to vector1 << vector2
12059 if vector2 is uniform. */
12060 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12061 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12062 return fold_build2_loc (loc, code, type, op0, tem);
12064 /* Since negative shift count is not well-defined,
12065 don't try to compute it in the compiler. */
12066 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12067 return NULL_TREE;
12069 prec = element_precision (type);
12071 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12072 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12073 && tree_to_uhwi (arg1) < prec
12074 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12075 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12077 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12078 + tree_to_uhwi (arg1));
12080 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12081 being well defined. */
12082 if (low >= prec)
12084 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12085 low = low % prec;
12086 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12087 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12088 TREE_OPERAND (arg0, 0));
12089 else
12090 low = prec - 1;
12093 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12094 build_int_cst (TREE_TYPE (arg1), low));
12097 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12098 into x & ((unsigned)-1 >> c) for unsigned types. */
12099 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12100 || (TYPE_UNSIGNED (type)
12101 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12102 && tree_fits_uhwi_p (arg1)
12103 && tree_to_uhwi (arg1) < prec
12104 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12105 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12107 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12108 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12109 tree lshift;
12110 tree arg00;
12112 if (low0 == low1)
12114 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12116 lshift = build_minus_one_cst (type);
12117 lshift = const_binop (code, lshift, arg1);
12119 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12123 /* Rewrite an LROTATE_EXPR by a constant into an
12124 RROTATE_EXPR by a new constant. */
12125 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12127 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12128 tem = const_binop (MINUS_EXPR, tem, arg1);
12129 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12132 /* If we have a rotate of a bit operation with the rotate count and
12133 the second operand of the bit operation both constant,
12134 permute the two operations. */
12135 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12136 && (TREE_CODE (arg0) == BIT_AND_EXPR
12137 || TREE_CODE (arg0) == BIT_IOR_EXPR
12138 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12139 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12140 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12141 fold_build2_loc (loc, code, type,
12142 TREE_OPERAND (arg0, 0), arg1),
12143 fold_build2_loc (loc, code, type,
12144 TREE_OPERAND (arg0, 1), arg1));
12146 /* Two consecutive rotates adding up to the some integer
12147 multiple of the precision of the type can be ignored. */
12148 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12149 && TREE_CODE (arg0) == RROTATE_EXPR
12150 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12151 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12152 prec) == 0)
12153 return TREE_OPERAND (arg0, 0);
12155 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12156 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12157 if the latter can be further optimized. */
12158 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12159 && TREE_CODE (arg0) == BIT_AND_EXPR
12160 && TREE_CODE (arg1) == INTEGER_CST
12161 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12163 tree mask = fold_build2_loc (loc, code, type,
12164 fold_convert_loc (loc, type,
12165 TREE_OPERAND (arg0, 1)),
12166 arg1);
12167 tree shift = fold_build2_loc (loc, code, type,
12168 fold_convert_loc (loc, type,
12169 TREE_OPERAND (arg0, 0)),
12170 arg1);
12171 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12172 if (tem)
12173 return tem;
12176 return NULL_TREE;
12178 case MIN_EXPR:
12179 if (operand_equal_p (arg0, arg1, 0))
12180 return omit_one_operand_loc (loc, type, arg0, arg1);
12181 if (INTEGRAL_TYPE_P (type)
12182 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12183 return omit_one_operand_loc (loc, type, arg1, arg0);
12184 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12185 if (tem)
12186 return tem;
12187 goto associate;
12189 case MAX_EXPR:
12190 if (operand_equal_p (arg0, arg1, 0))
12191 return omit_one_operand_loc (loc, type, arg0, arg1);
12192 if (INTEGRAL_TYPE_P (type)
12193 && TYPE_MAX_VALUE (type)
12194 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12195 return omit_one_operand_loc (loc, type, arg1, arg0);
12196 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12197 if (tem)
12198 return tem;
12199 goto associate;
12201 case TRUTH_ANDIF_EXPR:
12202 /* Note that the operands of this must be ints
12203 and their values must be 0 or 1.
12204 ("true" is a fixed value perhaps depending on the language.) */
12205 /* If first arg is constant zero, return it. */
12206 if (integer_zerop (arg0))
12207 return fold_convert_loc (loc, type, arg0);
12208 case TRUTH_AND_EXPR:
12209 /* If either arg is constant true, drop it. */
12210 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12211 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12212 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12213 /* Preserve sequence points. */
12214 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12215 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12216 /* If second arg is constant zero, result is zero, but first arg
12217 must be evaluated. */
12218 if (integer_zerop (arg1))
12219 return omit_one_operand_loc (loc, type, arg1, arg0);
12220 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12221 case will be handled here. */
12222 if (integer_zerop (arg0))
12223 return omit_one_operand_loc (loc, type, arg0, arg1);
12225 /* !X && X is always false. */
12226 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12227 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12228 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12229 /* X && !X is always false. */
12230 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12231 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12232 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12234 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12235 means A >= Y && A != MAX, but in this case we know that
12236 A < X <= MAX. */
12238 if (!TREE_SIDE_EFFECTS (arg0)
12239 && !TREE_SIDE_EFFECTS (arg1))
12241 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12242 if (tem && !operand_equal_p (tem, arg0, 0))
12243 return fold_build2_loc (loc, code, type, tem, arg1);
12245 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12246 if (tem && !operand_equal_p (tem, arg1, 0))
12247 return fold_build2_loc (loc, code, type, arg0, tem);
12250 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12251 != NULL_TREE)
12252 return tem;
12254 return NULL_TREE;
12256 case TRUTH_ORIF_EXPR:
12257 /* Note that the operands of this must be ints
12258 and their values must be 0 or true.
12259 ("true" is a fixed value perhaps depending on the language.) */
12260 /* If first arg is constant true, return it. */
12261 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12262 return fold_convert_loc (loc, type, arg0);
12263 case TRUTH_OR_EXPR:
12264 /* If either arg is constant zero, drop it. */
12265 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12266 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12267 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12268 /* Preserve sequence points. */
12269 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12270 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12271 /* If second arg is constant true, result is true, but we must
12272 evaluate first arg. */
12273 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12274 return omit_one_operand_loc (loc, type, arg1, arg0);
12275 /* Likewise for first arg, but note this only occurs here for
12276 TRUTH_OR_EXPR. */
12277 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12278 return omit_one_operand_loc (loc, type, arg0, arg1);
12280 /* !X || X is always true. */
12281 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12282 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12283 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12284 /* X || !X is always true. */
12285 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12286 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12287 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12289 /* (X && !Y) || (!X && Y) is X ^ Y */
12290 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12291 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12293 tree a0, a1, l0, l1, n0, n1;
12295 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12296 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12298 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12299 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12301 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12302 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12304 if ((operand_equal_p (n0, a0, 0)
12305 && operand_equal_p (n1, a1, 0))
12306 || (operand_equal_p (n0, a1, 0)
12307 && operand_equal_p (n1, a0, 0)))
12308 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12311 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12312 != NULL_TREE)
12313 return tem;
12315 return NULL_TREE;
12317 case TRUTH_XOR_EXPR:
12318 /* If the second arg is constant zero, drop it. */
12319 if (integer_zerop (arg1))
12320 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12321 /* If the second arg is constant true, this is a logical inversion. */
12322 if (integer_onep (arg1))
12324 tem = invert_truthvalue_loc (loc, arg0);
12325 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12327 /* Identical arguments cancel to zero. */
12328 if (operand_equal_p (arg0, arg1, 0))
12329 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12331 /* !X ^ X is always true. */
12332 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12333 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12334 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12336 /* X ^ !X is always true. */
12337 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12338 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12339 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12341 return NULL_TREE;
12343 case EQ_EXPR:
12344 case NE_EXPR:
12345 STRIP_NOPS (arg0);
12346 STRIP_NOPS (arg1);
12348 tem = fold_comparison (loc, code, type, op0, op1);
12349 if (tem != NULL_TREE)
12350 return tem;
12352 /* bool_var != 0 becomes bool_var. */
12353 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12354 && code == NE_EXPR)
12355 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12357 /* bool_var == 1 becomes bool_var. */
12358 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12359 && code == EQ_EXPR)
12360 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12362 /* bool_var != 1 becomes !bool_var. */
12363 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12364 && code == NE_EXPR)
12365 return fold_convert_loc (loc, type,
12366 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12367 TREE_TYPE (arg0), arg0));
12369 /* bool_var == 0 becomes !bool_var. */
12370 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12371 && code == EQ_EXPR)
12372 return fold_convert_loc (loc, type,
12373 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12374 TREE_TYPE (arg0), arg0));
12376 /* !exp != 0 becomes !exp */
12377 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12378 && code == NE_EXPR)
12379 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12381 /* If this is an equality comparison of the address of two non-weak,
12382 unaliased symbols neither of which are extern (since we do not
12383 have access to attributes for externs), then we know the result. */
12384 if (TREE_CODE (arg0) == ADDR_EXPR
12385 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12386 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12387 && ! lookup_attribute ("alias",
12388 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12389 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12390 && TREE_CODE (arg1) == ADDR_EXPR
12391 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12392 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12393 && ! lookup_attribute ("alias",
12394 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12395 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12397 /* We know that we're looking at the address of two
12398 non-weak, unaliased, static _DECL nodes.
12400 It is both wasteful and incorrect to call operand_equal_p
12401 to compare the two ADDR_EXPR nodes. It is wasteful in that
12402 all we need to do is test pointer equality for the arguments
12403 to the two ADDR_EXPR nodes. It is incorrect to use
12404 operand_equal_p as that function is NOT equivalent to a
12405 C equality test. It can in fact return false for two
12406 objects which would test as equal using the C equality
12407 operator. */
12408 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12409 return constant_boolean_node (equal
12410 ? code == EQ_EXPR : code != EQ_EXPR,
12411 type);
12414 /* Similarly for a NEGATE_EXPR. */
12415 if (TREE_CODE (arg0) == NEGATE_EXPR
12416 && TREE_CODE (arg1) == INTEGER_CST
12417 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12418 arg1)))
12419 && TREE_CODE (tem) == INTEGER_CST
12420 && !TREE_OVERFLOW (tem))
12421 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12423 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12424 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12425 && TREE_CODE (arg1) == INTEGER_CST
12426 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12427 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12428 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12429 fold_convert_loc (loc,
12430 TREE_TYPE (arg0),
12431 arg1),
12432 TREE_OPERAND (arg0, 1)));
12434 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12435 if ((TREE_CODE (arg0) == PLUS_EXPR
12436 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12437 || TREE_CODE (arg0) == MINUS_EXPR)
12438 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12439 0)),
12440 arg1, 0)
12441 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12442 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12444 tree val = TREE_OPERAND (arg0, 1);
12445 return omit_two_operands_loc (loc, type,
12446 fold_build2_loc (loc, code, type,
12447 val,
12448 build_int_cst (TREE_TYPE (val),
12449 0)),
12450 TREE_OPERAND (arg0, 0), arg1);
12453 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12454 if (TREE_CODE (arg0) == MINUS_EXPR
12455 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12456 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12457 1)),
12458 arg1, 0)
12459 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12461 return omit_two_operands_loc (loc, type,
12462 code == NE_EXPR
12463 ? boolean_true_node : boolean_false_node,
12464 TREE_OPERAND (arg0, 1), arg1);
12467 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12468 if (TREE_CODE (arg0) == ABS_EXPR
12469 && (integer_zerop (arg1) || real_zerop (arg1)))
12470 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12472 /* If this is an EQ or NE comparison with zero and ARG0 is
12473 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12474 two operations, but the latter can be done in one less insn
12475 on machines that have only two-operand insns or on which a
12476 constant cannot be the first operand. */
12477 if (TREE_CODE (arg0) == BIT_AND_EXPR
12478 && integer_zerop (arg1))
12480 tree arg00 = TREE_OPERAND (arg0, 0);
12481 tree arg01 = TREE_OPERAND (arg0, 1);
12482 if (TREE_CODE (arg00) == LSHIFT_EXPR
12483 && integer_onep (TREE_OPERAND (arg00, 0)))
12485 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12486 arg01, TREE_OPERAND (arg00, 1));
12487 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12488 build_int_cst (TREE_TYPE (arg0), 1));
12489 return fold_build2_loc (loc, code, type,
12490 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12491 arg1);
12493 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12494 && integer_onep (TREE_OPERAND (arg01, 0)))
12496 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12497 arg00, TREE_OPERAND (arg01, 1));
12498 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12499 build_int_cst (TREE_TYPE (arg0), 1));
12500 return fold_build2_loc (loc, code, type,
12501 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12502 arg1);
12506 /* If this is an NE or EQ comparison of zero against the result of a
12507 signed MOD operation whose second operand is a power of 2, make
12508 the MOD operation unsigned since it is simpler and equivalent. */
12509 if (integer_zerop (arg1)
12510 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12511 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12512 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12513 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12514 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12515 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12517 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12518 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12519 fold_convert_loc (loc, newtype,
12520 TREE_OPERAND (arg0, 0)),
12521 fold_convert_loc (loc, newtype,
12522 TREE_OPERAND (arg0, 1)));
12524 return fold_build2_loc (loc, code, type, newmod,
12525 fold_convert_loc (loc, newtype, arg1));
12528 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12529 C1 is a valid shift constant, and C2 is a power of two, i.e.
12530 a single bit. */
12531 if (TREE_CODE (arg0) == BIT_AND_EXPR
12532 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12533 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12534 == INTEGER_CST
12535 && integer_pow2p (TREE_OPERAND (arg0, 1))
12536 && integer_zerop (arg1))
12538 tree itype = TREE_TYPE (arg0);
12539 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12540 prec = TYPE_PRECISION (itype);
12542 /* Check for a valid shift count. */
12543 if (wi::ltu_p (arg001, prec))
12545 tree arg01 = TREE_OPERAND (arg0, 1);
12546 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12547 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12548 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12549 can be rewritten as (X & (C2 << C1)) != 0. */
12550 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12552 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12553 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12554 return fold_build2_loc (loc, code, type, tem,
12555 fold_convert_loc (loc, itype, arg1));
12557 /* Otherwise, for signed (arithmetic) shifts,
12558 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12559 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12560 else if (!TYPE_UNSIGNED (itype))
12561 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12562 arg000, build_int_cst (itype, 0));
12563 /* Otherwise, of unsigned (logical) shifts,
12564 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12565 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12566 else
12567 return omit_one_operand_loc (loc, type,
12568 code == EQ_EXPR ? integer_one_node
12569 : integer_zero_node,
12570 arg000);
12574 /* If we have (A & C) == C where C is a power of 2, convert this into
12575 (A & C) != 0. Similarly for NE_EXPR. */
12576 if (TREE_CODE (arg0) == BIT_AND_EXPR
12577 && integer_pow2p (TREE_OPERAND (arg0, 1))
12578 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12579 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12580 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12581 integer_zero_node));
12583 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12584 bit, then fold the expression into A < 0 or A >= 0. */
12585 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12586 if (tem)
12587 return tem;
12589 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12590 Similarly for NE_EXPR. */
12591 if (TREE_CODE (arg0) == BIT_AND_EXPR
12592 && TREE_CODE (arg1) == INTEGER_CST
12593 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12595 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12596 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12597 TREE_OPERAND (arg0, 1));
12598 tree dandnotc
12599 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12600 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12601 notc);
12602 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12603 if (integer_nonzerop (dandnotc))
12604 return omit_one_operand_loc (loc, type, rslt, arg0);
12607 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12608 Similarly for NE_EXPR. */
12609 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12610 && TREE_CODE (arg1) == INTEGER_CST
12611 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12613 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12614 tree candnotd
12615 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12616 TREE_OPERAND (arg0, 1),
12617 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12618 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12619 if (integer_nonzerop (candnotd))
12620 return omit_one_operand_loc (loc, type, rslt, arg0);
12623 /* If this is a comparison of a field, we may be able to simplify it. */
12624 if ((TREE_CODE (arg0) == COMPONENT_REF
12625 || TREE_CODE (arg0) == BIT_FIELD_REF)
12626 /* Handle the constant case even without -O
12627 to make sure the warnings are given. */
12628 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12630 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12631 if (t1)
12632 return t1;
12635 /* Optimize comparisons of strlen vs zero to a compare of the
12636 first character of the string vs zero. To wit,
12637 strlen(ptr) == 0 => *ptr == 0
12638 strlen(ptr) != 0 => *ptr != 0
12639 Other cases should reduce to one of these two (or a constant)
12640 due to the return value of strlen being unsigned. */
12641 if (TREE_CODE (arg0) == CALL_EXPR
12642 && integer_zerop (arg1))
12644 tree fndecl = get_callee_fndecl (arg0);
12646 if (fndecl
12647 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12648 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12649 && call_expr_nargs (arg0) == 1
12650 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12652 tree iref = build_fold_indirect_ref_loc (loc,
12653 CALL_EXPR_ARG (arg0, 0));
12654 return fold_build2_loc (loc, code, type, iref,
12655 build_int_cst (TREE_TYPE (iref), 0));
12659 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12660 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12661 if (TREE_CODE (arg0) == RSHIFT_EXPR
12662 && integer_zerop (arg1)
12663 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12665 tree arg00 = TREE_OPERAND (arg0, 0);
12666 tree arg01 = TREE_OPERAND (arg0, 1);
12667 tree itype = TREE_TYPE (arg00);
12668 if (wi::eq_p (arg01, element_precision (itype) - 1))
12670 if (TYPE_UNSIGNED (itype))
12672 itype = signed_type_for (itype);
12673 arg00 = fold_convert_loc (loc, itype, arg00);
12675 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12676 type, arg00, build_zero_cst (itype));
12680 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12681 if (integer_zerop (arg1)
12682 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12683 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12684 TREE_OPERAND (arg0, 1));
12686 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12687 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12688 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12689 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12690 build_zero_cst (TREE_TYPE (arg0)));
12691 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12692 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12693 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12694 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12695 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12696 build_zero_cst (TREE_TYPE (arg0)));
12698 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12699 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12700 && TREE_CODE (arg1) == INTEGER_CST
12701 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12702 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12703 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12704 TREE_OPERAND (arg0, 1), arg1));
12706 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12707 (X & C) == 0 when C is a single bit. */
12708 if (TREE_CODE (arg0) == BIT_AND_EXPR
12709 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12710 && integer_zerop (arg1)
12711 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12713 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12714 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12715 TREE_OPERAND (arg0, 1));
12716 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12717 type, tem,
12718 fold_convert_loc (loc, TREE_TYPE (arg0),
12719 arg1));
12722 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12723 constant C is a power of two, i.e. a single bit. */
12724 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12725 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12726 && integer_zerop (arg1)
12727 && integer_pow2p (TREE_OPERAND (arg0, 1))
12728 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12729 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12731 tree arg00 = TREE_OPERAND (arg0, 0);
12732 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12733 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12736 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12737 when is C is a power of two, i.e. a single bit. */
12738 if (TREE_CODE (arg0) == BIT_AND_EXPR
12739 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12740 && integer_zerop (arg1)
12741 && integer_pow2p (TREE_OPERAND (arg0, 1))
12742 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12743 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12745 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12746 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12747 arg000, TREE_OPERAND (arg0, 1));
12748 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12749 tem, build_int_cst (TREE_TYPE (tem), 0));
12752 if (integer_zerop (arg1)
12753 && tree_expr_nonzero_p (arg0))
12755 tree res = constant_boolean_node (code==NE_EXPR, type);
12756 return omit_one_operand_loc (loc, type, res, arg0);
12759 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12760 if (TREE_CODE (arg0) == NEGATE_EXPR
12761 && TREE_CODE (arg1) == NEGATE_EXPR)
12762 return fold_build2_loc (loc, code, type,
12763 TREE_OPERAND (arg0, 0),
12764 fold_convert_loc (loc, TREE_TYPE (arg0),
12765 TREE_OPERAND (arg1, 0)));
12767 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12768 if (TREE_CODE (arg0) == BIT_AND_EXPR
12769 && TREE_CODE (arg1) == BIT_AND_EXPR)
12771 tree arg00 = TREE_OPERAND (arg0, 0);
12772 tree arg01 = TREE_OPERAND (arg0, 1);
12773 tree arg10 = TREE_OPERAND (arg1, 0);
12774 tree arg11 = TREE_OPERAND (arg1, 1);
12775 tree itype = TREE_TYPE (arg0);
12777 if (operand_equal_p (arg01, arg11, 0))
12778 return fold_build2_loc (loc, code, type,
12779 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12780 fold_build2_loc (loc,
12781 BIT_XOR_EXPR, itype,
12782 arg00, arg10),
12783 arg01),
12784 build_zero_cst (itype));
12786 if (operand_equal_p (arg01, arg10, 0))
12787 return fold_build2_loc (loc, code, type,
12788 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12789 fold_build2_loc (loc,
12790 BIT_XOR_EXPR, itype,
12791 arg00, arg11),
12792 arg01),
12793 build_zero_cst (itype));
12795 if (operand_equal_p (arg00, arg11, 0))
12796 return fold_build2_loc (loc, code, type,
12797 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12798 fold_build2_loc (loc,
12799 BIT_XOR_EXPR, itype,
12800 arg01, arg10),
12801 arg00),
12802 build_zero_cst (itype));
12804 if (operand_equal_p (arg00, arg10, 0))
12805 return fold_build2_loc (loc, code, type,
12806 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12807 fold_build2_loc (loc,
12808 BIT_XOR_EXPR, itype,
12809 arg01, arg11),
12810 arg00),
12811 build_zero_cst (itype));
12814 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12815 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12817 tree arg00 = TREE_OPERAND (arg0, 0);
12818 tree arg01 = TREE_OPERAND (arg0, 1);
12819 tree arg10 = TREE_OPERAND (arg1, 0);
12820 tree arg11 = TREE_OPERAND (arg1, 1);
12821 tree itype = TREE_TYPE (arg0);
12823 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12824 operand_equal_p guarantees no side-effects so we don't need
12825 to use omit_one_operand on Z. */
12826 if (operand_equal_p (arg01, arg11, 0))
12827 return fold_build2_loc (loc, code, type, arg00,
12828 fold_convert_loc (loc, TREE_TYPE (arg00),
12829 arg10));
12830 if (operand_equal_p (arg01, arg10, 0))
12831 return fold_build2_loc (loc, code, type, arg00,
12832 fold_convert_loc (loc, TREE_TYPE (arg00),
12833 arg11));
12834 if (operand_equal_p (arg00, arg11, 0))
12835 return fold_build2_loc (loc, code, type, arg01,
12836 fold_convert_loc (loc, TREE_TYPE (arg01),
12837 arg10));
12838 if (operand_equal_p (arg00, arg10, 0))
12839 return fold_build2_loc (loc, code, type, arg01,
12840 fold_convert_loc (loc, TREE_TYPE (arg01),
12841 arg11));
12843 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12844 if (TREE_CODE (arg01) == INTEGER_CST
12845 && TREE_CODE (arg11) == INTEGER_CST)
12847 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12848 fold_convert_loc (loc, itype, arg11));
12849 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12850 return fold_build2_loc (loc, code, type, tem,
12851 fold_convert_loc (loc, itype, arg10));
12855 /* Attempt to simplify equality/inequality comparisons of complex
12856 values. Only lower the comparison if the result is known or
12857 can be simplified to a single scalar comparison. */
12858 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12859 || TREE_CODE (arg0) == COMPLEX_CST)
12860 && (TREE_CODE (arg1) == COMPLEX_EXPR
12861 || TREE_CODE (arg1) == COMPLEX_CST))
12863 tree real0, imag0, real1, imag1;
12864 tree rcond, icond;
12866 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12868 real0 = TREE_OPERAND (arg0, 0);
12869 imag0 = TREE_OPERAND (arg0, 1);
12871 else
12873 real0 = TREE_REALPART (arg0);
12874 imag0 = TREE_IMAGPART (arg0);
12877 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12879 real1 = TREE_OPERAND (arg1, 0);
12880 imag1 = TREE_OPERAND (arg1, 1);
12882 else
12884 real1 = TREE_REALPART (arg1);
12885 imag1 = TREE_IMAGPART (arg1);
12888 rcond = fold_binary_loc (loc, code, type, real0, real1);
12889 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12891 if (integer_zerop (rcond))
12893 if (code == EQ_EXPR)
12894 return omit_two_operands_loc (loc, type, boolean_false_node,
12895 imag0, imag1);
12896 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12898 else
12900 if (code == NE_EXPR)
12901 return omit_two_operands_loc (loc, type, boolean_true_node,
12902 imag0, imag1);
12903 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12907 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12908 if (icond && TREE_CODE (icond) == INTEGER_CST)
12910 if (integer_zerop (icond))
12912 if (code == EQ_EXPR)
12913 return omit_two_operands_loc (loc, type, boolean_false_node,
12914 real0, real1);
12915 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12917 else
12919 if (code == NE_EXPR)
12920 return omit_two_operands_loc (loc, type, boolean_true_node,
12921 real0, real1);
12922 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12927 return NULL_TREE;
12929 case LT_EXPR:
12930 case GT_EXPR:
12931 case LE_EXPR:
12932 case GE_EXPR:
12933 tem = fold_comparison (loc, code, type, op0, op1);
12934 if (tem != NULL_TREE)
12935 return tem;
12937 /* Transform comparisons of the form X +- C CMP X. */
12938 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12939 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12940 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12941 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12942 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12943 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12945 tree arg01 = TREE_OPERAND (arg0, 1);
12946 enum tree_code code0 = TREE_CODE (arg0);
12947 int is_positive;
12949 if (TREE_CODE (arg01) == REAL_CST)
12950 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12951 else
12952 is_positive = tree_int_cst_sgn (arg01);
12954 /* (X - c) > X becomes false. */
12955 if (code == GT_EXPR
12956 && ((code0 == MINUS_EXPR && is_positive >= 0)
12957 || (code0 == PLUS_EXPR && is_positive <= 0)))
12959 if (TREE_CODE (arg01) == INTEGER_CST
12960 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12961 fold_overflow_warning (("assuming signed overflow does not "
12962 "occur when assuming that (X - c) > X "
12963 "is always false"),
12964 WARN_STRICT_OVERFLOW_ALL);
12965 return constant_boolean_node (0, type);
12968 /* Likewise (X + c) < X becomes false. */
12969 if (code == LT_EXPR
12970 && ((code0 == PLUS_EXPR && is_positive >= 0)
12971 || (code0 == MINUS_EXPR && is_positive <= 0)))
12973 if (TREE_CODE (arg01) == INTEGER_CST
12974 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12975 fold_overflow_warning (("assuming signed overflow does not "
12976 "occur when assuming that "
12977 "(X + c) < X is always false"),
12978 WARN_STRICT_OVERFLOW_ALL);
12979 return constant_boolean_node (0, type);
12982 /* Convert (X - c) <= X to true. */
12983 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12984 && code == LE_EXPR
12985 && ((code0 == MINUS_EXPR && is_positive >= 0)
12986 || (code0 == PLUS_EXPR && is_positive <= 0)))
12988 if (TREE_CODE (arg01) == INTEGER_CST
12989 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12990 fold_overflow_warning (("assuming signed overflow does not "
12991 "occur when assuming that "
12992 "(X - c) <= X is always true"),
12993 WARN_STRICT_OVERFLOW_ALL);
12994 return constant_boolean_node (1, type);
12997 /* Convert (X + c) >= X to true. */
12998 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12999 && code == GE_EXPR
13000 && ((code0 == PLUS_EXPR && is_positive >= 0)
13001 || (code0 == MINUS_EXPR && is_positive <= 0)))
13003 if (TREE_CODE (arg01) == INTEGER_CST
13004 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13005 fold_overflow_warning (("assuming signed overflow does not "
13006 "occur when assuming that "
13007 "(X + c) >= X is always true"),
13008 WARN_STRICT_OVERFLOW_ALL);
13009 return constant_boolean_node (1, type);
13012 if (TREE_CODE (arg01) == INTEGER_CST)
13014 /* Convert X + c > X and X - c < X to true for integers. */
13015 if (code == GT_EXPR
13016 && ((code0 == PLUS_EXPR && is_positive > 0)
13017 || (code0 == MINUS_EXPR && is_positive < 0)))
13019 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13020 fold_overflow_warning (("assuming signed overflow does "
13021 "not occur when assuming that "
13022 "(X + c) > X is always true"),
13023 WARN_STRICT_OVERFLOW_ALL);
13024 return constant_boolean_node (1, type);
13027 if (code == LT_EXPR
13028 && ((code0 == MINUS_EXPR && is_positive > 0)
13029 || (code0 == PLUS_EXPR && is_positive < 0)))
13031 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13032 fold_overflow_warning (("assuming signed overflow does "
13033 "not occur when assuming that "
13034 "(X - c) < X is always true"),
13035 WARN_STRICT_OVERFLOW_ALL);
13036 return constant_boolean_node (1, type);
13039 /* Convert X + c <= X and X - c >= X to false for integers. */
13040 if (code == LE_EXPR
13041 && ((code0 == PLUS_EXPR && is_positive > 0)
13042 || (code0 == MINUS_EXPR && is_positive < 0)))
13044 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13045 fold_overflow_warning (("assuming signed overflow does "
13046 "not occur when assuming that "
13047 "(X + c) <= X is always false"),
13048 WARN_STRICT_OVERFLOW_ALL);
13049 return constant_boolean_node (0, type);
13052 if (code == GE_EXPR
13053 && ((code0 == MINUS_EXPR && is_positive > 0)
13054 || (code0 == PLUS_EXPR && is_positive < 0)))
13056 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13057 fold_overflow_warning (("assuming signed overflow does "
13058 "not occur when assuming that "
13059 "(X - c) >= X is always false"),
13060 WARN_STRICT_OVERFLOW_ALL);
13061 return constant_boolean_node (0, type);
13066 /* Comparisons with the highest or lowest possible integer of
13067 the specified precision will have known values. */
13069 tree arg1_type = TREE_TYPE (arg1);
13070 unsigned int prec = TYPE_PRECISION (arg1_type);
13072 if (TREE_CODE (arg1) == INTEGER_CST
13073 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13075 wide_int max = wi::max_value (arg1_type);
13076 wide_int signed_max = wi::max_value (prec, SIGNED);
13077 wide_int min = wi::min_value (arg1_type);
13079 if (wi::eq_p (arg1, max))
13080 switch (code)
13082 case GT_EXPR:
13083 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13085 case GE_EXPR:
13086 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13088 case LE_EXPR:
13089 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13091 case LT_EXPR:
13092 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13094 /* The GE_EXPR and LT_EXPR cases above are not normally
13095 reached because of previous transformations. */
13097 default:
13098 break;
13100 else if (wi::eq_p (arg1, max - 1))
13101 switch (code)
13103 case GT_EXPR:
13104 arg1 = const_binop (PLUS_EXPR, arg1,
13105 build_int_cst (TREE_TYPE (arg1), 1));
13106 return fold_build2_loc (loc, EQ_EXPR, type,
13107 fold_convert_loc (loc,
13108 TREE_TYPE (arg1), arg0),
13109 arg1);
13110 case LE_EXPR:
13111 arg1 = const_binop (PLUS_EXPR, arg1,
13112 build_int_cst (TREE_TYPE (arg1), 1));
13113 return fold_build2_loc (loc, NE_EXPR, type,
13114 fold_convert_loc (loc, TREE_TYPE (arg1),
13115 arg0),
13116 arg1);
13117 default:
13118 break;
13120 else if (wi::eq_p (arg1, min))
13121 switch (code)
13123 case LT_EXPR:
13124 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13126 case LE_EXPR:
13127 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13129 case GE_EXPR:
13130 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13132 case GT_EXPR:
13133 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13135 default:
13136 break;
13138 else if (wi::eq_p (arg1, min + 1))
13139 switch (code)
13141 case GE_EXPR:
13142 arg1 = const_binop (MINUS_EXPR, arg1,
13143 build_int_cst (TREE_TYPE (arg1), 1));
13144 return fold_build2_loc (loc, NE_EXPR, type,
13145 fold_convert_loc (loc,
13146 TREE_TYPE (arg1), arg0),
13147 arg1);
13148 case LT_EXPR:
13149 arg1 = const_binop (MINUS_EXPR, arg1,
13150 build_int_cst (TREE_TYPE (arg1), 1));
13151 return fold_build2_loc (loc, EQ_EXPR, type,
13152 fold_convert_loc (loc, TREE_TYPE (arg1),
13153 arg0),
13154 arg1);
13155 default:
13156 break;
13159 else if (wi::eq_p (arg1, signed_max)
13160 && TYPE_UNSIGNED (arg1_type)
13161 /* We will flip the signedness of the comparison operator
13162 associated with the mode of arg1, so the sign bit is
13163 specified by this mode. Check that arg1 is the signed
13164 max associated with this sign bit. */
13165 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13166 /* signed_type does not work on pointer types. */
13167 && INTEGRAL_TYPE_P (arg1_type))
13169 /* The following case also applies to X < signed_max+1
13170 and X >= signed_max+1 because previous transformations. */
13171 if (code == LE_EXPR || code == GT_EXPR)
13173 tree st = signed_type_for (arg1_type);
13174 return fold_build2_loc (loc,
13175 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13176 type, fold_convert_loc (loc, st, arg0),
13177 build_int_cst (st, 0));
13183 /* If we are comparing an ABS_EXPR with a constant, we can
13184 convert all the cases into explicit comparisons, but they may
13185 well not be faster than doing the ABS and one comparison.
13186 But ABS (X) <= C is a range comparison, which becomes a subtraction
13187 and a comparison, and is probably faster. */
13188 if (code == LE_EXPR
13189 && TREE_CODE (arg1) == INTEGER_CST
13190 && TREE_CODE (arg0) == ABS_EXPR
13191 && ! TREE_SIDE_EFFECTS (arg0)
13192 && (0 != (tem = negate_expr (arg1)))
13193 && TREE_CODE (tem) == INTEGER_CST
13194 && !TREE_OVERFLOW (tem))
13195 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13196 build2 (GE_EXPR, type,
13197 TREE_OPERAND (arg0, 0), tem),
13198 build2 (LE_EXPR, type,
13199 TREE_OPERAND (arg0, 0), arg1));
13201 /* Convert ABS_EXPR<x> >= 0 to true. */
13202 strict_overflow_p = false;
13203 if (code == GE_EXPR
13204 && (integer_zerop (arg1)
13205 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13206 && real_zerop (arg1)))
13207 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13209 if (strict_overflow_p)
13210 fold_overflow_warning (("assuming signed overflow does not occur "
13211 "when simplifying comparison of "
13212 "absolute value and zero"),
13213 WARN_STRICT_OVERFLOW_CONDITIONAL);
13214 return omit_one_operand_loc (loc, type,
13215 constant_boolean_node (true, type),
13216 arg0);
13219 /* Convert ABS_EXPR<x> < 0 to false. */
13220 strict_overflow_p = false;
13221 if (code == LT_EXPR
13222 && (integer_zerop (arg1) || real_zerop (arg1))
13223 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13225 if (strict_overflow_p)
13226 fold_overflow_warning (("assuming signed overflow does not occur "
13227 "when simplifying comparison of "
13228 "absolute value and zero"),
13229 WARN_STRICT_OVERFLOW_CONDITIONAL);
13230 return omit_one_operand_loc (loc, type,
13231 constant_boolean_node (false, type),
13232 arg0);
13235 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13236 and similarly for >= into !=. */
13237 if ((code == LT_EXPR || code == GE_EXPR)
13238 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13239 && TREE_CODE (arg1) == LSHIFT_EXPR
13240 && integer_onep (TREE_OPERAND (arg1, 0)))
13241 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13242 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13243 TREE_OPERAND (arg1, 1)),
13244 build_zero_cst (TREE_TYPE (arg0)));
13246 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13247 otherwise Y might be >= # of bits in X's type and thus e.g.
13248 (unsigned char) (1 << Y) for Y 15 might be 0.
13249 If the cast is widening, then 1 << Y should have unsigned type,
13250 otherwise if Y is number of bits in the signed shift type minus 1,
13251 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13252 31 might be 0xffffffff80000000. */
13253 if ((code == LT_EXPR || code == GE_EXPR)
13254 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13255 && CONVERT_EXPR_P (arg1)
13256 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13257 && (TYPE_PRECISION (TREE_TYPE (arg1))
13258 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13259 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13260 || (TYPE_PRECISION (TREE_TYPE (arg1))
13261 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13262 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13264 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13265 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13266 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13267 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13268 build_zero_cst (TREE_TYPE (arg0)));
13271 return NULL_TREE;
13273 case UNORDERED_EXPR:
13274 case ORDERED_EXPR:
13275 case UNLT_EXPR:
13276 case UNLE_EXPR:
13277 case UNGT_EXPR:
13278 case UNGE_EXPR:
13279 case UNEQ_EXPR:
13280 case LTGT_EXPR:
13281 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13283 t1 = fold_relational_const (code, type, arg0, arg1);
13284 if (t1 != NULL_TREE)
13285 return t1;
13288 /* If the first operand is NaN, the result is constant. */
13289 if (TREE_CODE (arg0) == REAL_CST
13290 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13291 && (code != LTGT_EXPR || ! flag_trapping_math))
13293 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13294 ? integer_zero_node
13295 : integer_one_node;
13296 return omit_one_operand_loc (loc, type, t1, arg1);
13299 /* If the second operand is NaN, the result is constant. */
13300 if (TREE_CODE (arg1) == REAL_CST
13301 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13302 && (code != LTGT_EXPR || ! flag_trapping_math))
13304 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13305 ? integer_zero_node
13306 : integer_one_node;
13307 return omit_one_operand_loc (loc, type, t1, arg0);
13310 /* Simplify unordered comparison of something with itself. */
13311 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13312 && operand_equal_p (arg0, arg1, 0))
13313 return constant_boolean_node (1, type);
13315 if (code == LTGT_EXPR
13316 && !flag_trapping_math
13317 && operand_equal_p (arg0, arg1, 0))
13318 return constant_boolean_node (0, type);
13320 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13322 tree targ0 = strip_float_extensions (arg0);
13323 tree targ1 = strip_float_extensions (arg1);
13324 tree newtype = TREE_TYPE (targ0);
13326 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13327 newtype = TREE_TYPE (targ1);
13329 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13330 return fold_build2_loc (loc, code, type,
13331 fold_convert_loc (loc, newtype, targ0),
13332 fold_convert_loc (loc, newtype, targ1));
13335 return NULL_TREE;
13337 case COMPOUND_EXPR:
13338 /* When pedantic, a compound expression can be neither an lvalue
13339 nor an integer constant expression. */
13340 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13341 return NULL_TREE;
13342 /* Don't let (0, 0) be null pointer constant. */
13343 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13344 : fold_convert_loc (loc, type, arg1);
13345 return pedantic_non_lvalue_loc (loc, tem);
13347 case COMPLEX_EXPR:
13348 if ((TREE_CODE (arg0) == REAL_CST
13349 && TREE_CODE (arg1) == REAL_CST)
13350 || (TREE_CODE (arg0) == INTEGER_CST
13351 && TREE_CODE (arg1) == INTEGER_CST))
13352 return build_complex (type, arg0, arg1);
13353 if (TREE_CODE (arg0) == REALPART_EXPR
13354 && TREE_CODE (arg1) == IMAGPART_EXPR
13355 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13356 && operand_equal_p (TREE_OPERAND (arg0, 0),
13357 TREE_OPERAND (arg1, 0), 0))
13358 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13359 TREE_OPERAND (arg1, 0));
13360 return NULL_TREE;
13362 case ASSERT_EXPR:
13363 /* An ASSERT_EXPR should never be passed to fold_binary. */
13364 gcc_unreachable ();
13366 case VEC_PACK_TRUNC_EXPR:
13367 case VEC_PACK_FIX_TRUNC_EXPR:
13369 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13370 tree *elts;
13372 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13373 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13374 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13375 return NULL_TREE;
13377 elts = XALLOCAVEC (tree, nelts);
13378 if (!vec_cst_ctor_to_array (arg0, elts)
13379 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13380 return NULL_TREE;
13382 for (i = 0; i < nelts; i++)
13384 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13385 ? NOP_EXPR : FIX_TRUNC_EXPR,
13386 TREE_TYPE (type), elts[i]);
13387 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13388 return NULL_TREE;
13391 return build_vector (type, elts);
13394 case VEC_WIDEN_MULT_LO_EXPR:
13395 case VEC_WIDEN_MULT_HI_EXPR:
13396 case VEC_WIDEN_MULT_EVEN_EXPR:
13397 case VEC_WIDEN_MULT_ODD_EXPR:
13399 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13400 unsigned int out, ofs, scale;
13401 tree *elts;
13403 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13404 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13405 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13406 return NULL_TREE;
13408 elts = XALLOCAVEC (tree, nelts * 4);
13409 if (!vec_cst_ctor_to_array (arg0, elts)
13410 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13411 return NULL_TREE;
13413 if (code == VEC_WIDEN_MULT_LO_EXPR)
13414 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13415 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13416 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13417 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13418 scale = 1, ofs = 0;
13419 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13420 scale = 1, ofs = 1;
13422 for (out = 0; out < nelts; out++)
13424 unsigned int in1 = (out << scale) + ofs;
13425 unsigned int in2 = in1 + nelts * 2;
13426 tree t1, t2;
13428 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13429 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13431 if (t1 == NULL_TREE || t2 == NULL_TREE)
13432 return NULL_TREE;
13433 elts[out] = const_binop (MULT_EXPR, t1, t2);
13434 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13435 return NULL_TREE;
13438 return build_vector (type, elts);
13441 default:
13442 return NULL_TREE;
13443 } /* switch (code) */
13446 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13447 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13448 of GOTO_EXPR. */
13450 static tree
13451 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13453 switch (TREE_CODE (*tp))
13455 case LABEL_EXPR:
13456 return *tp;
13458 case GOTO_EXPR:
13459 *walk_subtrees = 0;
13461 /* ... fall through ... */
13463 default:
13464 return NULL_TREE;
13468 /* Return whether the sub-tree ST contains a label which is accessible from
13469 outside the sub-tree. */
13471 static bool
13472 contains_label_p (tree st)
13474 return
13475 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13478 /* Fold a ternary expression of code CODE and type TYPE with operands
13479 OP0, OP1, and OP2. Return the folded expression if folding is
13480 successful. Otherwise, return NULL_TREE. */
13482 tree
13483 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13484 tree op0, tree op1, tree op2)
13486 tree tem;
13487 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13488 enum tree_code_class kind = TREE_CODE_CLASS (code);
13490 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13491 && TREE_CODE_LENGTH (code) == 3);
13493 /* If this is a commutative operation, and OP0 is a constant, move it
13494 to OP1 to reduce the number of tests below. */
13495 if (commutative_ternary_tree_code (code)
13496 && tree_swap_operands_p (op0, op1, true))
13497 return fold_build3_loc (loc, code, type, op1, op0, op2);
13499 tem = generic_simplify (loc, code, type, op0, op1, op2);
13500 if (tem)
13501 return tem;
13503 /* Strip any conversions that don't change the mode. This is safe
13504 for every expression, except for a comparison expression because
13505 its signedness is derived from its operands. So, in the latter
13506 case, only strip conversions that don't change the signedness.
13508 Note that this is done as an internal manipulation within the
13509 constant folder, in order to find the simplest representation of
13510 the arguments so that their form can be studied. In any cases,
13511 the appropriate type conversions should be put back in the tree
13512 that will get out of the constant folder. */
13513 if (op0)
13515 arg0 = op0;
13516 STRIP_NOPS (arg0);
13519 if (op1)
13521 arg1 = op1;
13522 STRIP_NOPS (arg1);
13525 if (op2)
13527 arg2 = op2;
13528 STRIP_NOPS (arg2);
13531 switch (code)
13533 case COMPONENT_REF:
13534 if (TREE_CODE (arg0) == CONSTRUCTOR
13535 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13537 unsigned HOST_WIDE_INT idx;
13538 tree field, value;
13539 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13540 if (field == arg1)
13541 return value;
13543 return NULL_TREE;
13545 case COND_EXPR:
13546 case VEC_COND_EXPR:
13547 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13548 so all simple results must be passed through pedantic_non_lvalue. */
13549 if (TREE_CODE (arg0) == INTEGER_CST)
13551 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13552 tem = integer_zerop (arg0) ? op2 : op1;
13553 /* Only optimize constant conditions when the selected branch
13554 has the same type as the COND_EXPR. This avoids optimizing
13555 away "c ? x : throw", where the throw has a void type.
13556 Avoid throwing away that operand which contains label. */
13557 if ((!TREE_SIDE_EFFECTS (unused_op)
13558 || !contains_label_p (unused_op))
13559 && (! VOID_TYPE_P (TREE_TYPE (tem))
13560 || VOID_TYPE_P (type)))
13561 return pedantic_non_lvalue_loc (loc, tem);
13562 return NULL_TREE;
13564 else if (TREE_CODE (arg0) == VECTOR_CST)
13566 if (integer_all_onesp (arg0))
13567 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
13568 if (integer_zerop (arg0))
13569 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
13571 if ((TREE_CODE (arg1) == VECTOR_CST
13572 || TREE_CODE (arg1) == CONSTRUCTOR)
13573 && (TREE_CODE (arg2) == VECTOR_CST
13574 || TREE_CODE (arg2) == CONSTRUCTOR))
13576 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13577 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13578 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13579 for (i = 0; i < nelts; i++)
13581 tree val = VECTOR_CST_ELT (arg0, i);
13582 if (integer_all_onesp (val))
13583 sel[i] = i;
13584 else if (integer_zerop (val))
13585 sel[i] = nelts + i;
13586 else /* Currently unreachable. */
13587 return NULL_TREE;
13589 tree t = fold_vec_perm (type, arg1, arg2, sel);
13590 if (t != NULL_TREE)
13591 return t;
13595 if (operand_equal_p (arg1, op2, 0))
13596 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13598 /* If we have A op B ? A : C, we may be able to convert this to a
13599 simpler expression, depending on the operation and the values
13600 of B and C. Signed zeros prevent all of these transformations,
13601 for reasons given above each one.
13603 Also try swapping the arguments and inverting the conditional. */
13604 if (COMPARISON_CLASS_P (arg0)
13605 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13606 arg1, TREE_OPERAND (arg0, 1))
13607 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13609 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13610 if (tem)
13611 return tem;
13614 if (COMPARISON_CLASS_P (arg0)
13615 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13616 op2,
13617 TREE_OPERAND (arg0, 1))
13618 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13620 location_t loc0 = expr_location_or (arg0, loc);
13621 tem = fold_invert_truthvalue (loc0, arg0);
13622 if (tem && COMPARISON_CLASS_P (tem))
13624 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13625 if (tem)
13626 return tem;
13630 /* If the second operand is simpler than the third, swap them
13631 since that produces better jump optimization results. */
13632 if (truth_value_p (TREE_CODE (arg0))
13633 && tree_swap_operands_p (op1, op2, false))
13635 location_t loc0 = expr_location_or (arg0, loc);
13636 /* See if this can be inverted. If it can't, possibly because
13637 it was a floating-point inequality comparison, don't do
13638 anything. */
13639 tem = fold_invert_truthvalue (loc0, arg0);
13640 if (tem)
13641 return fold_build3_loc (loc, code, type, tem, op2, op1);
13644 /* Convert A ? 1 : 0 to simply A. */
13645 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13646 : (integer_onep (op1)
13647 && !VECTOR_TYPE_P (type)))
13648 && integer_zerop (op2)
13649 /* If we try to convert OP0 to our type, the
13650 call to fold will try to move the conversion inside
13651 a COND, which will recurse. In that case, the COND_EXPR
13652 is probably the best choice, so leave it alone. */
13653 && type == TREE_TYPE (arg0))
13654 return pedantic_non_lvalue_loc (loc, arg0);
13656 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13657 over COND_EXPR in cases such as floating point comparisons. */
13658 if (integer_zerop (op1)
13659 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13660 : (integer_onep (op2)
13661 && !VECTOR_TYPE_P (type)))
13662 && truth_value_p (TREE_CODE (arg0)))
13663 return pedantic_non_lvalue_loc (loc,
13664 fold_convert_loc (loc, type,
13665 invert_truthvalue_loc (loc,
13666 arg0)));
13668 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13669 if (TREE_CODE (arg0) == LT_EXPR
13670 && integer_zerop (TREE_OPERAND (arg0, 1))
13671 && integer_zerop (op2)
13672 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13674 /* sign_bit_p looks through both zero and sign extensions,
13675 but for this optimization only sign extensions are
13676 usable. */
13677 tree tem2 = TREE_OPERAND (arg0, 0);
13678 while (tem != tem2)
13680 if (TREE_CODE (tem2) != NOP_EXPR
13681 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13683 tem = NULL_TREE;
13684 break;
13686 tem2 = TREE_OPERAND (tem2, 0);
13688 /* sign_bit_p only checks ARG1 bits within A's precision.
13689 If <sign bit of A> has wider type than A, bits outside
13690 of A's precision in <sign bit of A> need to be checked.
13691 If they are all 0, this optimization needs to be done
13692 in unsigned A's type, if they are all 1 in signed A's type,
13693 otherwise this can't be done. */
13694 if (tem
13695 && TYPE_PRECISION (TREE_TYPE (tem))
13696 < TYPE_PRECISION (TREE_TYPE (arg1))
13697 && TYPE_PRECISION (TREE_TYPE (tem))
13698 < TYPE_PRECISION (type))
13700 int inner_width, outer_width;
13701 tree tem_type;
13703 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13704 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13705 if (outer_width > TYPE_PRECISION (type))
13706 outer_width = TYPE_PRECISION (type);
13708 wide_int mask = wi::shifted_mask
13709 (inner_width, outer_width - inner_width, false,
13710 TYPE_PRECISION (TREE_TYPE (arg1)));
13712 wide_int common = mask & arg1;
13713 if (common == mask)
13715 tem_type = signed_type_for (TREE_TYPE (tem));
13716 tem = fold_convert_loc (loc, tem_type, tem);
13718 else if (common == 0)
13720 tem_type = unsigned_type_for (TREE_TYPE (tem));
13721 tem = fold_convert_loc (loc, tem_type, tem);
13723 else
13724 tem = NULL;
13727 if (tem)
13728 return
13729 fold_convert_loc (loc, type,
13730 fold_build2_loc (loc, BIT_AND_EXPR,
13731 TREE_TYPE (tem), tem,
13732 fold_convert_loc (loc,
13733 TREE_TYPE (tem),
13734 arg1)));
13737 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13738 already handled above. */
13739 if (TREE_CODE (arg0) == BIT_AND_EXPR
13740 && integer_onep (TREE_OPERAND (arg0, 1))
13741 && integer_zerop (op2)
13742 && integer_pow2p (arg1))
13744 tree tem = TREE_OPERAND (arg0, 0);
13745 STRIP_NOPS (tem);
13746 if (TREE_CODE (tem) == RSHIFT_EXPR
13747 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13748 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13749 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13750 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13751 TREE_OPERAND (tem, 0), arg1);
13754 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13755 is probably obsolete because the first operand should be a
13756 truth value (that's why we have the two cases above), but let's
13757 leave it in until we can confirm this for all front-ends. */
13758 if (integer_zerop (op2)
13759 && TREE_CODE (arg0) == NE_EXPR
13760 && integer_zerop (TREE_OPERAND (arg0, 1))
13761 && integer_pow2p (arg1)
13762 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13763 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13764 arg1, OEP_ONLY_CONST))
13765 return pedantic_non_lvalue_loc (loc,
13766 fold_convert_loc (loc, type,
13767 TREE_OPERAND (arg0, 0)));
13769 /* Disable the transformations below for vectors, since
13770 fold_binary_op_with_conditional_arg may undo them immediately,
13771 yielding an infinite loop. */
13772 if (code == VEC_COND_EXPR)
13773 return NULL_TREE;
13775 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13776 if (integer_zerop (op2)
13777 && truth_value_p (TREE_CODE (arg0))
13778 && truth_value_p (TREE_CODE (arg1))
13779 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13780 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13781 : TRUTH_ANDIF_EXPR,
13782 type, fold_convert_loc (loc, type, arg0), arg1);
13784 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13785 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13786 && truth_value_p (TREE_CODE (arg0))
13787 && truth_value_p (TREE_CODE (arg1))
13788 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13790 location_t loc0 = expr_location_or (arg0, loc);
13791 /* Only perform transformation if ARG0 is easily inverted. */
13792 tem = fold_invert_truthvalue (loc0, arg0);
13793 if (tem)
13794 return fold_build2_loc (loc, code == VEC_COND_EXPR
13795 ? BIT_IOR_EXPR
13796 : TRUTH_ORIF_EXPR,
13797 type, fold_convert_loc (loc, type, tem),
13798 arg1);
13801 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13802 if (integer_zerop (arg1)
13803 && truth_value_p (TREE_CODE (arg0))
13804 && truth_value_p (TREE_CODE (op2))
13805 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13807 location_t loc0 = expr_location_or (arg0, loc);
13808 /* Only perform transformation if ARG0 is easily inverted. */
13809 tem = fold_invert_truthvalue (loc0, arg0);
13810 if (tem)
13811 return fold_build2_loc (loc, code == VEC_COND_EXPR
13812 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13813 type, fold_convert_loc (loc, type, tem),
13814 op2);
13817 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13818 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13819 && truth_value_p (TREE_CODE (arg0))
13820 && truth_value_p (TREE_CODE (op2))
13821 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13822 return fold_build2_loc (loc, code == VEC_COND_EXPR
13823 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13824 type, fold_convert_loc (loc, type, arg0), op2);
13826 return NULL_TREE;
13828 case CALL_EXPR:
13829 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13830 of fold_ternary on them. */
13831 gcc_unreachable ();
13833 case BIT_FIELD_REF:
13834 if ((TREE_CODE (arg0) == VECTOR_CST
13835 || (TREE_CODE (arg0) == CONSTRUCTOR
13836 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
13837 && (type == TREE_TYPE (TREE_TYPE (arg0))
13838 || (TREE_CODE (type) == VECTOR_TYPE
13839 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
13841 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13842 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
13843 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13844 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13846 if (n != 0
13847 && (idx % width) == 0
13848 && (n % width) == 0
13849 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13851 idx = idx / width;
13852 n = n / width;
13854 if (TREE_CODE (arg0) == VECTOR_CST)
13856 if (n == 1)
13857 return VECTOR_CST_ELT (arg0, idx);
13859 tree *vals = XALLOCAVEC (tree, n);
13860 for (unsigned i = 0; i < n; ++i)
13861 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
13862 return build_vector (type, vals);
13865 /* Constructor elements can be subvectors. */
13866 unsigned HOST_WIDE_INT k = 1;
13867 if (CONSTRUCTOR_NELTS (arg0) != 0)
13869 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
13870 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
13871 k = TYPE_VECTOR_SUBPARTS (cons_elem);
13874 /* We keep an exact subset of the constructor elements. */
13875 if ((idx % k) == 0 && (n % k) == 0)
13877 if (CONSTRUCTOR_NELTS (arg0) == 0)
13878 return build_constructor (type, NULL);
13879 idx /= k;
13880 n /= k;
13881 if (n == 1)
13883 if (idx < CONSTRUCTOR_NELTS (arg0))
13884 return CONSTRUCTOR_ELT (arg0, idx)->value;
13885 return build_zero_cst (type);
13888 vec<constructor_elt, va_gc> *vals;
13889 vec_alloc (vals, n);
13890 for (unsigned i = 0;
13891 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
13892 ++i)
13893 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
13894 CONSTRUCTOR_ELT
13895 (arg0, idx + i)->value);
13896 return build_constructor (type, vals);
13898 /* The bitfield references a single constructor element. */
13899 else if (idx + n <= (idx / k + 1) * k)
13901 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
13902 return build_zero_cst (type);
13903 else if (n == k)
13904 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
13905 else
13906 return fold_build3_loc (loc, code, type,
13907 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
13908 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
13913 /* A bit-field-ref that referenced the full argument can be stripped. */
13914 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13915 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
13916 && integer_zerop (op2))
13917 return fold_convert_loc (loc, type, arg0);
13919 /* On constants we can use native encode/interpret to constant
13920 fold (nearly) all BIT_FIELD_REFs. */
13921 if (CONSTANT_CLASS_P (arg0)
13922 && can_native_interpret_type_p (type)
13923 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
13924 /* This limitation should not be necessary, we just need to
13925 round this up to mode size. */
13926 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
13927 /* Need bit-shifting of the buffer to relax the following. */
13928 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
13930 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13931 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13932 unsigned HOST_WIDE_INT clen;
13933 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
13934 /* ??? We cannot tell native_encode_expr to start at
13935 some random byte only. So limit us to a reasonable amount
13936 of work. */
13937 if (clen <= 4096)
13939 unsigned char *b = XALLOCAVEC (unsigned char, clen);
13940 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
13941 if (len > 0
13942 && len * BITS_PER_UNIT >= bitpos + bitsize)
13944 tree v = native_interpret_expr (type,
13945 b + bitpos / BITS_PER_UNIT,
13946 bitsize / BITS_PER_UNIT);
13947 if (v)
13948 return v;
13953 return NULL_TREE;
13955 case FMA_EXPR:
13956 /* For integers we can decompose the FMA if possible. */
13957 if (TREE_CODE (arg0) == INTEGER_CST
13958 && TREE_CODE (arg1) == INTEGER_CST)
13959 return fold_build2_loc (loc, PLUS_EXPR, type,
13960 const_binop (MULT_EXPR, arg0, arg1), arg2);
13961 if (integer_zerop (arg2))
13962 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13964 return fold_fma (loc, type, arg0, arg1, arg2);
13966 case VEC_PERM_EXPR:
13967 if (TREE_CODE (arg2) == VECTOR_CST)
13969 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
13970 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
13971 unsigned char *sel2 = sel + nelts;
13972 bool need_mask_canon = false;
13973 bool need_mask_canon2 = false;
13974 bool all_in_vec0 = true;
13975 bool all_in_vec1 = true;
13976 bool maybe_identity = true;
13977 bool single_arg = (op0 == op1);
13978 bool changed = false;
13980 mask2 = 2 * nelts - 1;
13981 mask = single_arg ? (nelts - 1) : mask2;
13982 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
13983 for (i = 0; i < nelts; i++)
13985 tree val = VECTOR_CST_ELT (arg2, i);
13986 if (TREE_CODE (val) != INTEGER_CST)
13987 return NULL_TREE;
13989 /* Make sure that the perm value is in an acceptable
13990 range. */
13991 wide_int t = val;
13992 need_mask_canon |= wi::gtu_p (t, mask);
13993 need_mask_canon2 |= wi::gtu_p (t, mask2);
13994 sel[i] = t.to_uhwi () & mask;
13995 sel2[i] = t.to_uhwi () & mask2;
13997 if (sel[i] < nelts)
13998 all_in_vec1 = false;
13999 else
14000 all_in_vec0 = false;
14002 if ((sel[i] & (nelts-1)) != i)
14003 maybe_identity = false;
14006 if (maybe_identity)
14008 if (all_in_vec0)
14009 return op0;
14010 if (all_in_vec1)
14011 return op1;
14014 if (all_in_vec0)
14015 op1 = op0;
14016 else if (all_in_vec1)
14018 op0 = op1;
14019 for (i = 0; i < nelts; i++)
14020 sel[i] -= nelts;
14021 need_mask_canon = true;
14024 if ((TREE_CODE (op0) == VECTOR_CST
14025 || TREE_CODE (op0) == CONSTRUCTOR)
14026 && (TREE_CODE (op1) == VECTOR_CST
14027 || TREE_CODE (op1) == CONSTRUCTOR))
14029 tree t = fold_vec_perm (type, op0, op1, sel);
14030 if (t != NULL_TREE)
14031 return t;
14034 if (op0 == op1 && !single_arg)
14035 changed = true;
14037 /* Some targets are deficient and fail to expand a single
14038 argument permutation while still allowing an equivalent
14039 2-argument version. */
14040 if (need_mask_canon && arg2 == op2
14041 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
14042 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
14044 need_mask_canon = need_mask_canon2;
14045 sel = sel2;
14048 if (need_mask_canon && arg2 == op2)
14050 tree *tsel = XALLOCAVEC (tree, nelts);
14051 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14052 for (i = 0; i < nelts; i++)
14053 tsel[i] = build_int_cst (eltype, sel[i]);
14054 op2 = build_vector (TREE_TYPE (arg2), tsel);
14055 changed = true;
14058 if (changed)
14059 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14061 return NULL_TREE;
14063 default:
14064 return NULL_TREE;
14065 } /* switch (code) */
14068 /* Perform constant folding and related simplification of EXPR.
14069 The related simplifications include x*1 => x, x*0 => 0, etc.,
14070 and application of the associative law.
14071 NOP_EXPR conversions may be removed freely (as long as we
14072 are careful not to change the type of the overall expression).
14073 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14074 but we can constant-fold them if they have constant operands. */
14076 #ifdef ENABLE_FOLD_CHECKING
14077 # define fold(x) fold_1 (x)
14078 static tree fold_1 (tree);
14079 static
14080 #endif
14081 tree
14082 fold (tree expr)
14084 const tree t = expr;
14085 enum tree_code code = TREE_CODE (t);
14086 enum tree_code_class kind = TREE_CODE_CLASS (code);
14087 tree tem;
14088 location_t loc = EXPR_LOCATION (expr);
14090 /* Return right away if a constant. */
14091 if (kind == tcc_constant)
14092 return t;
14094 /* CALL_EXPR-like objects with variable numbers of operands are
14095 treated specially. */
14096 if (kind == tcc_vl_exp)
14098 if (code == CALL_EXPR)
14100 tem = fold_call_expr (loc, expr, false);
14101 return tem ? tem : expr;
14103 return expr;
14106 if (IS_EXPR_CODE_CLASS (kind))
14108 tree type = TREE_TYPE (t);
14109 tree op0, op1, op2;
14111 switch (TREE_CODE_LENGTH (code))
14113 case 1:
14114 op0 = TREE_OPERAND (t, 0);
14115 tem = fold_unary_loc (loc, code, type, op0);
14116 return tem ? tem : expr;
14117 case 2:
14118 op0 = TREE_OPERAND (t, 0);
14119 op1 = TREE_OPERAND (t, 1);
14120 tem = fold_binary_loc (loc, code, type, op0, op1);
14121 return tem ? tem : expr;
14122 case 3:
14123 op0 = TREE_OPERAND (t, 0);
14124 op1 = TREE_OPERAND (t, 1);
14125 op2 = TREE_OPERAND (t, 2);
14126 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14127 return tem ? tem : expr;
14128 default:
14129 break;
14133 switch (code)
14135 case ARRAY_REF:
14137 tree op0 = TREE_OPERAND (t, 0);
14138 tree op1 = TREE_OPERAND (t, 1);
14140 if (TREE_CODE (op1) == INTEGER_CST
14141 && TREE_CODE (op0) == CONSTRUCTOR
14142 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14144 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14145 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14146 unsigned HOST_WIDE_INT begin = 0;
14148 /* Find a matching index by means of a binary search. */
14149 while (begin != end)
14151 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14152 tree index = (*elts)[middle].index;
14154 if (TREE_CODE (index) == INTEGER_CST
14155 && tree_int_cst_lt (index, op1))
14156 begin = middle + 1;
14157 else if (TREE_CODE (index) == INTEGER_CST
14158 && tree_int_cst_lt (op1, index))
14159 end = middle;
14160 else if (TREE_CODE (index) == RANGE_EXPR
14161 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14162 begin = middle + 1;
14163 else if (TREE_CODE (index) == RANGE_EXPR
14164 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14165 end = middle;
14166 else
14167 return (*elts)[middle].value;
14171 return t;
14174 /* Return a VECTOR_CST if possible. */
14175 case CONSTRUCTOR:
14177 tree type = TREE_TYPE (t);
14178 if (TREE_CODE (type) != VECTOR_TYPE)
14179 return t;
14181 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14182 unsigned HOST_WIDE_INT idx, pos = 0;
14183 tree value;
14185 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14187 if (!CONSTANT_CLASS_P (value))
14188 return t;
14189 if (TREE_CODE (value) == VECTOR_CST)
14191 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14192 vec[pos++] = VECTOR_CST_ELT (value, i);
14194 else
14195 vec[pos++] = value;
14197 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14198 vec[pos] = build_zero_cst (TREE_TYPE (type));
14200 return build_vector (type, vec);
14203 case CONST_DECL:
14204 return fold (DECL_INITIAL (t));
14206 default:
14207 return t;
14208 } /* switch (code) */
14211 #ifdef ENABLE_FOLD_CHECKING
14212 #undef fold
14214 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14215 hash_table<pointer_hash<const tree_node> > *);
14216 static void fold_check_failed (const_tree, const_tree);
14217 void print_fold_checksum (const_tree);
14219 /* When --enable-checking=fold, compute a digest of expr before
14220 and after actual fold call to see if fold did not accidentally
14221 change original expr. */
14223 tree
14224 fold (tree expr)
14226 tree ret;
14227 struct md5_ctx ctx;
14228 unsigned char checksum_before[16], checksum_after[16];
14229 hash_table<pointer_hash<const tree_node> > ht (32);
14231 md5_init_ctx (&ctx);
14232 fold_checksum_tree (expr, &ctx, &ht);
14233 md5_finish_ctx (&ctx, checksum_before);
14234 ht.empty ();
14236 ret = fold_1 (expr);
14238 md5_init_ctx (&ctx);
14239 fold_checksum_tree (expr, &ctx, &ht);
14240 md5_finish_ctx (&ctx, checksum_after);
14242 if (memcmp (checksum_before, checksum_after, 16))
14243 fold_check_failed (expr, ret);
14245 return ret;
14248 void
14249 print_fold_checksum (const_tree expr)
14251 struct md5_ctx ctx;
14252 unsigned char checksum[16], cnt;
14253 hash_table<pointer_hash<const tree_node> > ht (32);
14255 md5_init_ctx (&ctx);
14256 fold_checksum_tree (expr, &ctx, &ht);
14257 md5_finish_ctx (&ctx, checksum);
14258 for (cnt = 0; cnt < 16; ++cnt)
14259 fprintf (stderr, "%02x", checksum[cnt]);
14260 putc ('\n', stderr);
14263 static void
14264 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14266 internal_error ("fold check: original tree changed by fold");
14269 static void
14270 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14271 hash_table<pointer_hash <const tree_node> > *ht)
14273 const tree_node **slot;
14274 enum tree_code code;
14275 union tree_node buf;
14276 int i, len;
14278 recursive_label:
14279 if (expr == NULL)
14280 return;
14281 slot = ht->find_slot (expr, INSERT);
14282 if (*slot != NULL)
14283 return;
14284 *slot = expr;
14285 code = TREE_CODE (expr);
14286 if (TREE_CODE_CLASS (code) == tcc_declaration
14287 && DECL_ASSEMBLER_NAME_SET_P (expr))
14289 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14290 memcpy ((char *) &buf, expr, tree_size (expr));
14291 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14292 expr = (tree) &buf;
14294 else if (TREE_CODE_CLASS (code) == tcc_type
14295 && (TYPE_POINTER_TO (expr)
14296 || TYPE_REFERENCE_TO (expr)
14297 || TYPE_CACHED_VALUES_P (expr)
14298 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14299 || TYPE_NEXT_VARIANT (expr)))
14301 /* Allow these fields to be modified. */
14302 tree tmp;
14303 memcpy ((char *) &buf, expr, tree_size (expr));
14304 expr = tmp = (tree) &buf;
14305 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14306 TYPE_POINTER_TO (tmp) = NULL;
14307 TYPE_REFERENCE_TO (tmp) = NULL;
14308 TYPE_NEXT_VARIANT (tmp) = NULL;
14309 if (TYPE_CACHED_VALUES_P (tmp))
14311 TYPE_CACHED_VALUES_P (tmp) = 0;
14312 TYPE_CACHED_VALUES (tmp) = NULL;
14315 md5_process_bytes (expr, tree_size (expr), ctx);
14316 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14317 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14318 if (TREE_CODE_CLASS (code) != tcc_type
14319 && TREE_CODE_CLASS (code) != tcc_declaration
14320 && code != TREE_LIST
14321 && code != SSA_NAME
14322 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14323 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14324 switch (TREE_CODE_CLASS (code))
14326 case tcc_constant:
14327 switch (code)
14329 case STRING_CST:
14330 md5_process_bytes (TREE_STRING_POINTER (expr),
14331 TREE_STRING_LENGTH (expr), ctx);
14332 break;
14333 case COMPLEX_CST:
14334 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14335 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14336 break;
14337 case VECTOR_CST:
14338 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14339 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14340 break;
14341 default:
14342 break;
14344 break;
14345 case tcc_exceptional:
14346 switch (code)
14348 case TREE_LIST:
14349 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14350 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14351 expr = TREE_CHAIN (expr);
14352 goto recursive_label;
14353 break;
14354 case TREE_VEC:
14355 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14356 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14357 break;
14358 default:
14359 break;
14361 break;
14362 case tcc_expression:
14363 case tcc_reference:
14364 case tcc_comparison:
14365 case tcc_unary:
14366 case tcc_binary:
14367 case tcc_statement:
14368 case tcc_vl_exp:
14369 len = TREE_OPERAND_LENGTH (expr);
14370 for (i = 0; i < len; ++i)
14371 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14372 break;
14373 case tcc_declaration:
14374 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14375 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14376 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14378 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14379 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14380 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14381 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14382 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14385 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14387 if (TREE_CODE (expr) == FUNCTION_DECL)
14389 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14390 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14392 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14394 break;
14395 case tcc_type:
14396 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14397 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14398 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14399 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14400 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14401 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14402 if (INTEGRAL_TYPE_P (expr)
14403 || SCALAR_FLOAT_TYPE_P (expr))
14405 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14406 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14408 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14409 if (TREE_CODE (expr) == RECORD_TYPE
14410 || TREE_CODE (expr) == UNION_TYPE
14411 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14412 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14413 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14414 break;
14415 default:
14416 break;
14420 /* Helper function for outputting the checksum of a tree T. When
14421 debugging with gdb, you can "define mynext" to be "next" followed
14422 by "call debug_fold_checksum (op0)", then just trace down till the
14423 outputs differ. */
14425 DEBUG_FUNCTION void
14426 debug_fold_checksum (const_tree t)
14428 int i;
14429 unsigned char checksum[16];
14430 struct md5_ctx ctx;
14431 hash_table<pointer_hash<const tree_node> > ht (32);
14433 md5_init_ctx (&ctx);
14434 fold_checksum_tree (t, &ctx, &ht);
14435 md5_finish_ctx (&ctx, checksum);
14436 ht.empty ();
14438 for (i = 0; i < 16; i++)
14439 fprintf (stderr, "%d ", checksum[i]);
14441 fprintf (stderr, "\n");
14444 #endif
14446 /* Fold a unary tree expression with code CODE of type TYPE with an
14447 operand OP0. LOC is the location of the resulting expression.
14448 Return a folded expression if successful. Otherwise, return a tree
14449 expression with code CODE of type TYPE with an operand OP0. */
14451 tree
14452 fold_build1_stat_loc (location_t loc,
14453 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14455 tree tem;
14456 #ifdef ENABLE_FOLD_CHECKING
14457 unsigned char checksum_before[16], checksum_after[16];
14458 struct md5_ctx ctx;
14459 hash_table<pointer_hash<const tree_node> > ht (32);
14461 md5_init_ctx (&ctx);
14462 fold_checksum_tree (op0, &ctx, &ht);
14463 md5_finish_ctx (&ctx, checksum_before);
14464 ht.empty ();
14465 #endif
14467 tem = fold_unary_loc (loc, code, type, op0);
14468 if (!tem)
14469 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14471 #ifdef ENABLE_FOLD_CHECKING
14472 md5_init_ctx (&ctx);
14473 fold_checksum_tree (op0, &ctx, &ht);
14474 md5_finish_ctx (&ctx, checksum_after);
14476 if (memcmp (checksum_before, checksum_after, 16))
14477 fold_check_failed (op0, tem);
14478 #endif
14479 return tem;
14482 /* Fold a binary tree expression with code CODE of type TYPE with
14483 operands OP0 and OP1. LOC is the location of the resulting
14484 expression. Return a folded expression if successful. Otherwise,
14485 return a tree expression with code CODE of type TYPE with operands
14486 OP0 and OP1. */
14488 tree
14489 fold_build2_stat_loc (location_t loc,
14490 enum tree_code code, tree type, tree op0, tree op1
14491 MEM_STAT_DECL)
14493 tree tem;
14494 #ifdef ENABLE_FOLD_CHECKING
14495 unsigned char checksum_before_op0[16],
14496 checksum_before_op1[16],
14497 checksum_after_op0[16],
14498 checksum_after_op1[16];
14499 struct md5_ctx ctx;
14500 hash_table<pointer_hash<const tree_node> > ht (32);
14502 md5_init_ctx (&ctx);
14503 fold_checksum_tree (op0, &ctx, &ht);
14504 md5_finish_ctx (&ctx, checksum_before_op0);
14505 ht.empty ();
14507 md5_init_ctx (&ctx);
14508 fold_checksum_tree (op1, &ctx, &ht);
14509 md5_finish_ctx (&ctx, checksum_before_op1);
14510 ht.empty ();
14511 #endif
14513 tem = fold_binary_loc (loc, code, type, op0, op1);
14514 if (!tem)
14515 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14517 #ifdef ENABLE_FOLD_CHECKING
14518 md5_init_ctx (&ctx);
14519 fold_checksum_tree (op0, &ctx, &ht);
14520 md5_finish_ctx (&ctx, checksum_after_op0);
14521 ht.empty ();
14523 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14524 fold_check_failed (op0, tem);
14526 md5_init_ctx (&ctx);
14527 fold_checksum_tree (op1, &ctx, &ht);
14528 md5_finish_ctx (&ctx, checksum_after_op1);
14530 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14531 fold_check_failed (op1, tem);
14532 #endif
14533 return tem;
14536 /* Fold a ternary tree expression with code CODE of type TYPE with
14537 operands OP0, OP1, and OP2. Return a folded expression if
14538 successful. Otherwise, return a tree expression with code CODE of
14539 type TYPE with operands OP0, OP1, and OP2. */
14541 tree
14542 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14543 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14545 tree tem;
14546 #ifdef ENABLE_FOLD_CHECKING
14547 unsigned char checksum_before_op0[16],
14548 checksum_before_op1[16],
14549 checksum_before_op2[16],
14550 checksum_after_op0[16],
14551 checksum_after_op1[16],
14552 checksum_after_op2[16];
14553 struct md5_ctx ctx;
14554 hash_table<pointer_hash<const tree_node> > ht (32);
14556 md5_init_ctx (&ctx);
14557 fold_checksum_tree (op0, &ctx, &ht);
14558 md5_finish_ctx (&ctx, checksum_before_op0);
14559 ht.empty ();
14561 md5_init_ctx (&ctx);
14562 fold_checksum_tree (op1, &ctx, &ht);
14563 md5_finish_ctx (&ctx, checksum_before_op1);
14564 ht.empty ();
14566 md5_init_ctx (&ctx);
14567 fold_checksum_tree (op2, &ctx, &ht);
14568 md5_finish_ctx (&ctx, checksum_before_op2);
14569 ht.empty ();
14570 #endif
14572 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14573 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14574 if (!tem)
14575 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14577 #ifdef ENABLE_FOLD_CHECKING
14578 md5_init_ctx (&ctx);
14579 fold_checksum_tree (op0, &ctx, &ht);
14580 md5_finish_ctx (&ctx, checksum_after_op0);
14581 ht.empty ();
14583 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14584 fold_check_failed (op0, tem);
14586 md5_init_ctx (&ctx);
14587 fold_checksum_tree (op1, &ctx, &ht);
14588 md5_finish_ctx (&ctx, checksum_after_op1);
14589 ht.empty ();
14591 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14592 fold_check_failed (op1, tem);
14594 md5_init_ctx (&ctx);
14595 fold_checksum_tree (op2, &ctx, &ht);
14596 md5_finish_ctx (&ctx, checksum_after_op2);
14598 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14599 fold_check_failed (op2, tem);
14600 #endif
14601 return tem;
14604 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14605 arguments in ARGARRAY, and a null static chain.
14606 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14607 of type TYPE from the given operands as constructed by build_call_array. */
14609 tree
14610 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14611 int nargs, tree *argarray)
14613 tree tem;
14614 #ifdef ENABLE_FOLD_CHECKING
14615 unsigned char checksum_before_fn[16],
14616 checksum_before_arglist[16],
14617 checksum_after_fn[16],
14618 checksum_after_arglist[16];
14619 struct md5_ctx ctx;
14620 hash_table<pointer_hash<const tree_node> > ht (32);
14621 int i;
14623 md5_init_ctx (&ctx);
14624 fold_checksum_tree (fn, &ctx, &ht);
14625 md5_finish_ctx (&ctx, checksum_before_fn);
14626 ht.empty ();
14628 md5_init_ctx (&ctx);
14629 for (i = 0; i < nargs; i++)
14630 fold_checksum_tree (argarray[i], &ctx, &ht);
14631 md5_finish_ctx (&ctx, checksum_before_arglist);
14632 ht.empty ();
14633 #endif
14635 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14637 #ifdef ENABLE_FOLD_CHECKING
14638 md5_init_ctx (&ctx);
14639 fold_checksum_tree (fn, &ctx, &ht);
14640 md5_finish_ctx (&ctx, checksum_after_fn);
14641 ht.empty ();
14643 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14644 fold_check_failed (fn, tem);
14646 md5_init_ctx (&ctx);
14647 for (i = 0; i < nargs; i++)
14648 fold_checksum_tree (argarray[i], &ctx, &ht);
14649 md5_finish_ctx (&ctx, checksum_after_arglist);
14651 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14652 fold_check_failed (NULL_TREE, tem);
14653 #endif
14654 return tem;
14657 /* Perform constant folding and related simplification of initializer
14658 expression EXPR. These behave identically to "fold_buildN" but ignore
14659 potential run-time traps and exceptions that fold must preserve. */
14661 #define START_FOLD_INIT \
14662 int saved_signaling_nans = flag_signaling_nans;\
14663 int saved_trapping_math = flag_trapping_math;\
14664 int saved_rounding_math = flag_rounding_math;\
14665 int saved_trapv = flag_trapv;\
14666 int saved_folding_initializer = folding_initializer;\
14667 flag_signaling_nans = 0;\
14668 flag_trapping_math = 0;\
14669 flag_rounding_math = 0;\
14670 flag_trapv = 0;\
14671 folding_initializer = 1;
14673 #define END_FOLD_INIT \
14674 flag_signaling_nans = saved_signaling_nans;\
14675 flag_trapping_math = saved_trapping_math;\
14676 flag_rounding_math = saved_rounding_math;\
14677 flag_trapv = saved_trapv;\
14678 folding_initializer = saved_folding_initializer;
14680 tree
14681 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14682 tree type, tree op)
14684 tree result;
14685 START_FOLD_INIT;
14687 result = fold_build1_loc (loc, code, type, op);
14689 END_FOLD_INIT;
14690 return result;
14693 tree
14694 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14695 tree type, tree op0, tree op1)
14697 tree result;
14698 START_FOLD_INIT;
14700 result = fold_build2_loc (loc, code, type, op0, op1);
14702 END_FOLD_INIT;
14703 return result;
14706 tree
14707 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14708 int nargs, tree *argarray)
14710 tree result;
14711 START_FOLD_INIT;
14713 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14715 END_FOLD_INIT;
14716 return result;
14719 #undef START_FOLD_INIT
14720 #undef END_FOLD_INIT
14722 /* Determine if first argument is a multiple of second argument. Return 0 if
14723 it is not, or we cannot easily determined it to be.
14725 An example of the sort of thing we care about (at this point; this routine
14726 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14727 fold cases do now) is discovering that
14729 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14731 is a multiple of
14733 SAVE_EXPR (J * 8)
14735 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14737 This code also handles discovering that
14739 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14741 is a multiple of 8 so we don't have to worry about dealing with a
14742 possible remainder.
14744 Note that we *look* inside a SAVE_EXPR only to determine how it was
14745 calculated; it is not safe for fold to do much of anything else with the
14746 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14747 at run time. For example, the latter example above *cannot* be implemented
14748 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14749 evaluation time of the original SAVE_EXPR is not necessarily the same at
14750 the time the new expression is evaluated. The only optimization of this
14751 sort that would be valid is changing
14753 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14755 divided by 8 to
14757 SAVE_EXPR (I) * SAVE_EXPR (J)
14759 (where the same SAVE_EXPR (J) is used in the original and the
14760 transformed version). */
14763 multiple_of_p (tree type, const_tree top, const_tree bottom)
14765 if (operand_equal_p (top, bottom, 0))
14766 return 1;
14768 if (TREE_CODE (type) != INTEGER_TYPE)
14769 return 0;
14771 switch (TREE_CODE (top))
14773 case BIT_AND_EXPR:
14774 /* Bitwise and provides a power of two multiple. If the mask is
14775 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14776 if (!integer_pow2p (bottom))
14777 return 0;
14778 /* FALLTHRU */
14780 case MULT_EXPR:
14781 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14782 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14784 case PLUS_EXPR:
14785 case MINUS_EXPR:
14786 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14787 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14789 case LSHIFT_EXPR:
14790 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14792 tree op1, t1;
14794 op1 = TREE_OPERAND (top, 1);
14795 /* const_binop may not detect overflow correctly,
14796 so check for it explicitly here. */
14797 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
14798 && 0 != (t1 = fold_convert (type,
14799 const_binop (LSHIFT_EXPR,
14800 size_one_node,
14801 op1)))
14802 && !TREE_OVERFLOW (t1))
14803 return multiple_of_p (type, t1, bottom);
14805 return 0;
14807 case NOP_EXPR:
14808 /* Can't handle conversions from non-integral or wider integral type. */
14809 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14810 || (TYPE_PRECISION (type)
14811 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14812 return 0;
14814 /* .. fall through ... */
14816 case SAVE_EXPR:
14817 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14819 case COND_EXPR:
14820 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14821 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14823 case INTEGER_CST:
14824 if (TREE_CODE (bottom) != INTEGER_CST
14825 || integer_zerop (bottom)
14826 || (TYPE_UNSIGNED (type)
14827 && (tree_int_cst_sgn (top) < 0
14828 || tree_int_cst_sgn (bottom) < 0)))
14829 return 0;
14830 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14831 SIGNED);
14833 default:
14834 return 0;
14838 /* Return true if CODE or TYPE is known to be non-negative. */
14840 static bool
14841 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14843 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14844 && truth_value_p (code))
14845 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14846 have a signed:1 type (where the value is -1 and 0). */
14847 return true;
14848 return false;
14851 /* Return true if (CODE OP0) is known to be non-negative. If the return
14852 value is based on the assumption that signed overflow is undefined,
14853 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14854 *STRICT_OVERFLOW_P. */
14856 bool
14857 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14858 bool *strict_overflow_p)
14860 if (TYPE_UNSIGNED (type))
14861 return true;
14863 switch (code)
14865 case ABS_EXPR:
14866 /* We can't return 1 if flag_wrapv is set because
14867 ABS_EXPR<INT_MIN> = INT_MIN. */
14868 if (!INTEGRAL_TYPE_P (type))
14869 return true;
14870 if (TYPE_OVERFLOW_UNDEFINED (type))
14872 *strict_overflow_p = true;
14873 return true;
14875 break;
14877 case NON_LVALUE_EXPR:
14878 case FLOAT_EXPR:
14879 case FIX_TRUNC_EXPR:
14880 return tree_expr_nonnegative_warnv_p (op0,
14881 strict_overflow_p);
14883 case NOP_EXPR:
14885 tree inner_type = TREE_TYPE (op0);
14886 tree outer_type = type;
14888 if (TREE_CODE (outer_type) == REAL_TYPE)
14890 if (TREE_CODE (inner_type) == REAL_TYPE)
14891 return tree_expr_nonnegative_warnv_p (op0,
14892 strict_overflow_p);
14893 if (INTEGRAL_TYPE_P (inner_type))
14895 if (TYPE_UNSIGNED (inner_type))
14896 return true;
14897 return tree_expr_nonnegative_warnv_p (op0,
14898 strict_overflow_p);
14901 else if (INTEGRAL_TYPE_P (outer_type))
14903 if (TREE_CODE (inner_type) == REAL_TYPE)
14904 return tree_expr_nonnegative_warnv_p (op0,
14905 strict_overflow_p);
14906 if (INTEGRAL_TYPE_P (inner_type))
14907 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14908 && TYPE_UNSIGNED (inner_type);
14911 break;
14913 default:
14914 return tree_simple_nonnegative_warnv_p (code, type);
14917 /* We don't know sign of `t', so be conservative and return false. */
14918 return false;
14921 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14922 value is based on the assumption that signed overflow is undefined,
14923 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14924 *STRICT_OVERFLOW_P. */
14926 bool
14927 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14928 tree op1, bool *strict_overflow_p)
14930 if (TYPE_UNSIGNED (type))
14931 return true;
14933 switch (code)
14935 case POINTER_PLUS_EXPR:
14936 case PLUS_EXPR:
14937 if (FLOAT_TYPE_P (type))
14938 return (tree_expr_nonnegative_warnv_p (op0,
14939 strict_overflow_p)
14940 && tree_expr_nonnegative_warnv_p (op1,
14941 strict_overflow_p));
14943 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14944 both unsigned and at least 2 bits shorter than the result. */
14945 if (TREE_CODE (type) == INTEGER_TYPE
14946 && TREE_CODE (op0) == NOP_EXPR
14947 && TREE_CODE (op1) == NOP_EXPR)
14949 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14950 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14951 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14952 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14954 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14955 TYPE_PRECISION (inner2)) + 1;
14956 return prec < TYPE_PRECISION (type);
14959 break;
14961 case MULT_EXPR:
14962 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14964 /* x * x is always non-negative for floating point x
14965 or without overflow. */
14966 if (operand_equal_p (op0, op1, 0)
14967 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14968 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
14970 if (TYPE_OVERFLOW_UNDEFINED (type))
14971 *strict_overflow_p = true;
14972 return true;
14976 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14977 both unsigned and their total bits is shorter than the result. */
14978 if (TREE_CODE (type) == INTEGER_TYPE
14979 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14980 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14982 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14983 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14984 : TREE_TYPE (op0);
14985 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14986 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14987 : TREE_TYPE (op1);
14989 bool unsigned0 = TYPE_UNSIGNED (inner0);
14990 bool unsigned1 = TYPE_UNSIGNED (inner1);
14992 if (TREE_CODE (op0) == INTEGER_CST)
14993 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14995 if (TREE_CODE (op1) == INTEGER_CST)
14996 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14998 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14999 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15001 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15002 ? tree_int_cst_min_precision (op0, UNSIGNED)
15003 : TYPE_PRECISION (inner0);
15005 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15006 ? tree_int_cst_min_precision (op1, UNSIGNED)
15007 : TYPE_PRECISION (inner1);
15009 return precision0 + precision1 < TYPE_PRECISION (type);
15012 return false;
15014 case BIT_AND_EXPR:
15015 case MAX_EXPR:
15016 return (tree_expr_nonnegative_warnv_p (op0,
15017 strict_overflow_p)
15018 || tree_expr_nonnegative_warnv_p (op1,
15019 strict_overflow_p));
15021 case BIT_IOR_EXPR:
15022 case BIT_XOR_EXPR:
15023 case MIN_EXPR:
15024 case RDIV_EXPR:
15025 case TRUNC_DIV_EXPR:
15026 case CEIL_DIV_EXPR:
15027 case FLOOR_DIV_EXPR:
15028 case ROUND_DIV_EXPR:
15029 return (tree_expr_nonnegative_warnv_p (op0,
15030 strict_overflow_p)
15031 && tree_expr_nonnegative_warnv_p (op1,
15032 strict_overflow_p));
15034 case TRUNC_MOD_EXPR:
15035 case CEIL_MOD_EXPR:
15036 case FLOOR_MOD_EXPR:
15037 case ROUND_MOD_EXPR:
15038 return tree_expr_nonnegative_warnv_p (op0,
15039 strict_overflow_p);
15040 default:
15041 return tree_simple_nonnegative_warnv_p (code, type);
15044 /* We don't know sign of `t', so be conservative and return false. */
15045 return false;
15048 /* Return true if T is known to be non-negative. If the return
15049 value is based on the assumption that signed overflow is undefined,
15050 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15051 *STRICT_OVERFLOW_P. */
15053 bool
15054 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15056 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15057 return true;
15059 switch (TREE_CODE (t))
15061 case INTEGER_CST:
15062 return tree_int_cst_sgn (t) >= 0;
15064 case REAL_CST:
15065 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15067 case FIXED_CST:
15068 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15070 case COND_EXPR:
15071 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15072 strict_overflow_p)
15073 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15074 strict_overflow_p));
15075 default:
15076 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15077 TREE_TYPE (t));
15079 /* We don't know sign of `t', so be conservative and return false. */
15080 return false;
15083 /* Return true if T is known to be non-negative. If the return
15084 value is based on the assumption that signed overflow is undefined,
15085 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15086 *STRICT_OVERFLOW_P. */
15088 bool
15089 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15090 tree arg0, tree arg1, bool *strict_overflow_p)
15092 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15093 switch (DECL_FUNCTION_CODE (fndecl))
15095 CASE_FLT_FN (BUILT_IN_ACOS):
15096 CASE_FLT_FN (BUILT_IN_ACOSH):
15097 CASE_FLT_FN (BUILT_IN_CABS):
15098 CASE_FLT_FN (BUILT_IN_COSH):
15099 CASE_FLT_FN (BUILT_IN_ERFC):
15100 CASE_FLT_FN (BUILT_IN_EXP):
15101 CASE_FLT_FN (BUILT_IN_EXP10):
15102 CASE_FLT_FN (BUILT_IN_EXP2):
15103 CASE_FLT_FN (BUILT_IN_FABS):
15104 CASE_FLT_FN (BUILT_IN_FDIM):
15105 CASE_FLT_FN (BUILT_IN_HYPOT):
15106 CASE_FLT_FN (BUILT_IN_POW10):
15107 CASE_INT_FN (BUILT_IN_FFS):
15108 CASE_INT_FN (BUILT_IN_PARITY):
15109 CASE_INT_FN (BUILT_IN_POPCOUNT):
15110 CASE_INT_FN (BUILT_IN_CLZ):
15111 CASE_INT_FN (BUILT_IN_CLRSB):
15112 case BUILT_IN_BSWAP32:
15113 case BUILT_IN_BSWAP64:
15114 /* Always true. */
15115 return true;
15117 CASE_FLT_FN (BUILT_IN_SQRT):
15118 /* sqrt(-0.0) is -0.0. */
15119 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15120 return true;
15121 return tree_expr_nonnegative_warnv_p (arg0,
15122 strict_overflow_p);
15124 CASE_FLT_FN (BUILT_IN_ASINH):
15125 CASE_FLT_FN (BUILT_IN_ATAN):
15126 CASE_FLT_FN (BUILT_IN_ATANH):
15127 CASE_FLT_FN (BUILT_IN_CBRT):
15128 CASE_FLT_FN (BUILT_IN_CEIL):
15129 CASE_FLT_FN (BUILT_IN_ERF):
15130 CASE_FLT_FN (BUILT_IN_EXPM1):
15131 CASE_FLT_FN (BUILT_IN_FLOOR):
15132 CASE_FLT_FN (BUILT_IN_FMOD):
15133 CASE_FLT_FN (BUILT_IN_FREXP):
15134 CASE_FLT_FN (BUILT_IN_ICEIL):
15135 CASE_FLT_FN (BUILT_IN_IFLOOR):
15136 CASE_FLT_FN (BUILT_IN_IRINT):
15137 CASE_FLT_FN (BUILT_IN_IROUND):
15138 CASE_FLT_FN (BUILT_IN_LCEIL):
15139 CASE_FLT_FN (BUILT_IN_LDEXP):
15140 CASE_FLT_FN (BUILT_IN_LFLOOR):
15141 CASE_FLT_FN (BUILT_IN_LLCEIL):
15142 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15143 CASE_FLT_FN (BUILT_IN_LLRINT):
15144 CASE_FLT_FN (BUILT_IN_LLROUND):
15145 CASE_FLT_FN (BUILT_IN_LRINT):
15146 CASE_FLT_FN (BUILT_IN_LROUND):
15147 CASE_FLT_FN (BUILT_IN_MODF):
15148 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15149 CASE_FLT_FN (BUILT_IN_RINT):
15150 CASE_FLT_FN (BUILT_IN_ROUND):
15151 CASE_FLT_FN (BUILT_IN_SCALB):
15152 CASE_FLT_FN (BUILT_IN_SCALBLN):
15153 CASE_FLT_FN (BUILT_IN_SCALBN):
15154 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15155 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15156 CASE_FLT_FN (BUILT_IN_SINH):
15157 CASE_FLT_FN (BUILT_IN_TANH):
15158 CASE_FLT_FN (BUILT_IN_TRUNC):
15159 /* True if the 1st argument is nonnegative. */
15160 return tree_expr_nonnegative_warnv_p (arg0,
15161 strict_overflow_p);
15163 CASE_FLT_FN (BUILT_IN_FMAX):
15164 /* True if the 1st OR 2nd arguments are nonnegative. */
15165 return (tree_expr_nonnegative_warnv_p (arg0,
15166 strict_overflow_p)
15167 || (tree_expr_nonnegative_warnv_p (arg1,
15168 strict_overflow_p)));
15170 CASE_FLT_FN (BUILT_IN_FMIN):
15171 /* True if the 1st AND 2nd arguments are nonnegative. */
15172 return (tree_expr_nonnegative_warnv_p (arg0,
15173 strict_overflow_p)
15174 && (tree_expr_nonnegative_warnv_p (arg1,
15175 strict_overflow_p)));
15177 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15178 /* True if the 2nd argument is nonnegative. */
15179 return tree_expr_nonnegative_warnv_p (arg1,
15180 strict_overflow_p);
15182 CASE_FLT_FN (BUILT_IN_POWI):
15183 /* True if the 1st argument is nonnegative or the second
15184 argument is an even integer. */
15185 if (TREE_CODE (arg1) == INTEGER_CST
15186 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15187 return true;
15188 return tree_expr_nonnegative_warnv_p (arg0,
15189 strict_overflow_p);
15191 CASE_FLT_FN (BUILT_IN_POW):
15192 /* True if the 1st argument is nonnegative or the second
15193 argument is an even integer valued real. */
15194 if (TREE_CODE (arg1) == REAL_CST)
15196 REAL_VALUE_TYPE c;
15197 HOST_WIDE_INT n;
15199 c = TREE_REAL_CST (arg1);
15200 n = real_to_integer (&c);
15201 if ((n & 1) == 0)
15203 REAL_VALUE_TYPE cint;
15204 real_from_integer (&cint, VOIDmode, n, SIGNED);
15205 if (real_identical (&c, &cint))
15206 return true;
15209 return tree_expr_nonnegative_warnv_p (arg0,
15210 strict_overflow_p);
15212 default:
15213 break;
15215 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15216 type);
15219 /* Return true if T is known to be non-negative. If the return
15220 value is based on the assumption that signed overflow is undefined,
15221 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15222 *STRICT_OVERFLOW_P. */
15224 static bool
15225 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15227 enum tree_code code = TREE_CODE (t);
15228 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15229 return true;
15231 switch (code)
15233 case TARGET_EXPR:
15235 tree temp = TARGET_EXPR_SLOT (t);
15236 t = TARGET_EXPR_INITIAL (t);
15238 /* If the initializer is non-void, then it's a normal expression
15239 that will be assigned to the slot. */
15240 if (!VOID_TYPE_P (t))
15241 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15243 /* Otherwise, the initializer sets the slot in some way. One common
15244 way is an assignment statement at the end of the initializer. */
15245 while (1)
15247 if (TREE_CODE (t) == BIND_EXPR)
15248 t = expr_last (BIND_EXPR_BODY (t));
15249 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15250 || TREE_CODE (t) == TRY_CATCH_EXPR)
15251 t = expr_last (TREE_OPERAND (t, 0));
15252 else if (TREE_CODE (t) == STATEMENT_LIST)
15253 t = expr_last (t);
15254 else
15255 break;
15257 if (TREE_CODE (t) == MODIFY_EXPR
15258 && TREE_OPERAND (t, 0) == temp)
15259 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15260 strict_overflow_p);
15262 return false;
15265 case CALL_EXPR:
15267 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15268 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15270 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15271 get_callee_fndecl (t),
15272 arg0,
15273 arg1,
15274 strict_overflow_p);
15276 case COMPOUND_EXPR:
15277 case MODIFY_EXPR:
15278 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15279 strict_overflow_p);
15280 case BIND_EXPR:
15281 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15282 strict_overflow_p);
15283 case SAVE_EXPR:
15284 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15285 strict_overflow_p);
15287 default:
15288 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15289 TREE_TYPE (t));
15292 /* We don't know sign of `t', so be conservative and return false. */
15293 return false;
15296 /* Return true if T is known to be non-negative. If the return
15297 value is based on the assumption that signed overflow is undefined,
15298 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15299 *STRICT_OVERFLOW_P. */
15301 bool
15302 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15304 enum tree_code code;
15305 if (t == error_mark_node)
15306 return false;
15308 code = TREE_CODE (t);
15309 switch (TREE_CODE_CLASS (code))
15311 case tcc_binary:
15312 case tcc_comparison:
15313 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15314 TREE_TYPE (t),
15315 TREE_OPERAND (t, 0),
15316 TREE_OPERAND (t, 1),
15317 strict_overflow_p);
15319 case tcc_unary:
15320 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15321 TREE_TYPE (t),
15322 TREE_OPERAND (t, 0),
15323 strict_overflow_p);
15325 case tcc_constant:
15326 case tcc_declaration:
15327 case tcc_reference:
15328 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15330 default:
15331 break;
15334 switch (code)
15336 case TRUTH_AND_EXPR:
15337 case TRUTH_OR_EXPR:
15338 case TRUTH_XOR_EXPR:
15339 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15340 TREE_TYPE (t),
15341 TREE_OPERAND (t, 0),
15342 TREE_OPERAND (t, 1),
15343 strict_overflow_p);
15344 case TRUTH_NOT_EXPR:
15345 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15346 TREE_TYPE (t),
15347 TREE_OPERAND (t, 0),
15348 strict_overflow_p);
15350 case COND_EXPR:
15351 case CONSTRUCTOR:
15352 case OBJ_TYPE_REF:
15353 case ASSERT_EXPR:
15354 case ADDR_EXPR:
15355 case WITH_SIZE_EXPR:
15356 case SSA_NAME:
15357 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15359 default:
15360 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15364 /* Return true if `t' is known to be non-negative. Handle warnings
15365 about undefined signed overflow. */
15367 bool
15368 tree_expr_nonnegative_p (tree t)
15370 bool ret, strict_overflow_p;
15372 strict_overflow_p = false;
15373 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15374 if (strict_overflow_p)
15375 fold_overflow_warning (("assuming signed overflow does not occur when "
15376 "determining that expression is always "
15377 "non-negative"),
15378 WARN_STRICT_OVERFLOW_MISC);
15379 return ret;
15383 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15384 For floating point we further ensure that T is not denormal.
15385 Similar logic is present in nonzero_address in rtlanal.h.
15387 If the return value is based on the assumption that signed overflow
15388 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15389 change *STRICT_OVERFLOW_P. */
15391 bool
15392 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15393 bool *strict_overflow_p)
15395 switch (code)
15397 case ABS_EXPR:
15398 return tree_expr_nonzero_warnv_p (op0,
15399 strict_overflow_p);
15401 case NOP_EXPR:
15403 tree inner_type = TREE_TYPE (op0);
15404 tree outer_type = type;
15406 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15407 && tree_expr_nonzero_warnv_p (op0,
15408 strict_overflow_p));
15410 break;
15412 case NON_LVALUE_EXPR:
15413 return tree_expr_nonzero_warnv_p (op0,
15414 strict_overflow_p);
15416 default:
15417 break;
15420 return false;
15423 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15424 For floating point we further ensure that T is not denormal.
15425 Similar logic is present in nonzero_address in rtlanal.h.
15427 If the return value is based on the assumption that signed overflow
15428 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15429 change *STRICT_OVERFLOW_P. */
15431 bool
15432 tree_binary_nonzero_warnv_p (enum tree_code code,
15433 tree type,
15434 tree op0,
15435 tree op1, bool *strict_overflow_p)
15437 bool sub_strict_overflow_p;
15438 switch (code)
15440 case POINTER_PLUS_EXPR:
15441 case PLUS_EXPR:
15442 if (TYPE_OVERFLOW_UNDEFINED (type))
15444 /* With the presence of negative values it is hard
15445 to say something. */
15446 sub_strict_overflow_p = false;
15447 if (!tree_expr_nonnegative_warnv_p (op0,
15448 &sub_strict_overflow_p)
15449 || !tree_expr_nonnegative_warnv_p (op1,
15450 &sub_strict_overflow_p))
15451 return false;
15452 /* One of operands must be positive and the other non-negative. */
15453 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15454 overflows, on a twos-complement machine the sum of two
15455 nonnegative numbers can never be zero. */
15456 return (tree_expr_nonzero_warnv_p (op0,
15457 strict_overflow_p)
15458 || tree_expr_nonzero_warnv_p (op1,
15459 strict_overflow_p));
15461 break;
15463 case MULT_EXPR:
15464 if (TYPE_OVERFLOW_UNDEFINED (type))
15466 if (tree_expr_nonzero_warnv_p (op0,
15467 strict_overflow_p)
15468 && tree_expr_nonzero_warnv_p (op1,
15469 strict_overflow_p))
15471 *strict_overflow_p = true;
15472 return true;
15475 break;
15477 case MIN_EXPR:
15478 sub_strict_overflow_p = false;
15479 if (tree_expr_nonzero_warnv_p (op0,
15480 &sub_strict_overflow_p)
15481 && tree_expr_nonzero_warnv_p (op1,
15482 &sub_strict_overflow_p))
15484 if (sub_strict_overflow_p)
15485 *strict_overflow_p = true;
15487 break;
15489 case MAX_EXPR:
15490 sub_strict_overflow_p = false;
15491 if (tree_expr_nonzero_warnv_p (op0,
15492 &sub_strict_overflow_p))
15494 if (sub_strict_overflow_p)
15495 *strict_overflow_p = true;
15497 /* When both operands are nonzero, then MAX must be too. */
15498 if (tree_expr_nonzero_warnv_p (op1,
15499 strict_overflow_p))
15500 return true;
15502 /* MAX where operand 0 is positive is positive. */
15503 return tree_expr_nonnegative_warnv_p (op0,
15504 strict_overflow_p);
15506 /* MAX where operand 1 is positive is positive. */
15507 else if (tree_expr_nonzero_warnv_p (op1,
15508 &sub_strict_overflow_p)
15509 && tree_expr_nonnegative_warnv_p (op1,
15510 &sub_strict_overflow_p))
15512 if (sub_strict_overflow_p)
15513 *strict_overflow_p = true;
15514 return true;
15516 break;
15518 case BIT_IOR_EXPR:
15519 return (tree_expr_nonzero_warnv_p (op1,
15520 strict_overflow_p)
15521 || tree_expr_nonzero_warnv_p (op0,
15522 strict_overflow_p));
15524 default:
15525 break;
15528 return false;
15531 /* Return true when T is an address and is known to be nonzero.
15532 For floating point we further ensure that T is not denormal.
15533 Similar logic is present in nonzero_address in rtlanal.h.
15535 If the return value is based on the assumption that signed overflow
15536 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15537 change *STRICT_OVERFLOW_P. */
15539 bool
15540 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15542 bool sub_strict_overflow_p;
15543 switch (TREE_CODE (t))
15545 case INTEGER_CST:
15546 return !integer_zerop (t);
15548 case ADDR_EXPR:
15550 tree base = TREE_OPERAND (t, 0);
15552 if (!DECL_P (base))
15553 base = get_base_address (base);
15555 if (!base)
15556 return false;
15558 /* For objects in symbol table check if we know they are non-zero.
15559 Don't do anything for variables and functions before symtab is built;
15560 it is quite possible that they will be declared weak later. */
15561 if (DECL_P (base) && decl_in_symtab_p (base))
15563 struct symtab_node *symbol;
15565 symbol = symtab_node::get_create (base);
15566 if (symbol)
15567 return symbol->nonzero_address ();
15568 else
15569 return false;
15572 /* Function local objects are never NULL. */
15573 if (DECL_P (base)
15574 && (DECL_CONTEXT (base)
15575 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15576 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15577 return true;
15579 /* Constants are never weak. */
15580 if (CONSTANT_CLASS_P (base))
15581 return true;
15583 return false;
15586 case COND_EXPR:
15587 sub_strict_overflow_p = false;
15588 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15589 &sub_strict_overflow_p)
15590 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15591 &sub_strict_overflow_p))
15593 if (sub_strict_overflow_p)
15594 *strict_overflow_p = true;
15595 return true;
15597 break;
15599 default:
15600 break;
15602 return false;
15605 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15606 attempt to fold the expression to a constant without modifying TYPE,
15607 OP0 or OP1.
15609 If the expression could be simplified to a constant, then return
15610 the constant. If the expression would not be simplified to a
15611 constant, then return NULL_TREE. */
15613 tree
15614 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15616 tree tem = fold_binary (code, type, op0, op1);
15617 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15620 /* Given the components of a unary expression CODE, TYPE and OP0,
15621 attempt to fold the expression to a constant without modifying
15622 TYPE or OP0.
15624 If the expression could be simplified to a constant, then return
15625 the constant. If the expression would not be simplified to a
15626 constant, then return NULL_TREE. */
15628 tree
15629 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15631 tree tem = fold_unary (code, type, op0);
15632 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15635 /* If EXP represents referencing an element in a constant string
15636 (either via pointer arithmetic or array indexing), return the
15637 tree representing the value accessed, otherwise return NULL. */
15639 tree
15640 fold_read_from_constant_string (tree exp)
15642 if ((TREE_CODE (exp) == INDIRECT_REF
15643 || TREE_CODE (exp) == ARRAY_REF)
15644 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15646 tree exp1 = TREE_OPERAND (exp, 0);
15647 tree index;
15648 tree string;
15649 location_t loc = EXPR_LOCATION (exp);
15651 if (TREE_CODE (exp) == INDIRECT_REF)
15652 string = string_constant (exp1, &index);
15653 else
15655 tree low_bound = array_ref_low_bound (exp);
15656 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15658 /* Optimize the special-case of a zero lower bound.
15660 We convert the low_bound to sizetype to avoid some problems
15661 with constant folding. (E.g. suppose the lower bound is 1,
15662 and its mode is QI. Without the conversion,l (ARRAY
15663 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15664 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15665 if (! integer_zerop (low_bound))
15666 index = size_diffop_loc (loc, index,
15667 fold_convert_loc (loc, sizetype, low_bound));
15669 string = exp1;
15672 if (string
15673 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15674 && TREE_CODE (string) == STRING_CST
15675 && TREE_CODE (index) == INTEGER_CST
15676 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15677 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15678 == MODE_INT)
15679 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15680 return build_int_cst_type (TREE_TYPE (exp),
15681 (TREE_STRING_POINTER (string)
15682 [TREE_INT_CST_LOW (index)]));
15684 return NULL;
15687 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15688 an integer constant, real, or fixed-point constant.
15690 TYPE is the type of the result. */
15692 static tree
15693 fold_negate_const (tree arg0, tree type)
15695 tree t = NULL_TREE;
15697 switch (TREE_CODE (arg0))
15699 case INTEGER_CST:
15701 bool overflow;
15702 wide_int val = wi::neg (arg0, &overflow);
15703 t = force_fit_type (type, val, 1,
15704 (overflow | TREE_OVERFLOW (arg0))
15705 && !TYPE_UNSIGNED (type));
15706 break;
15709 case REAL_CST:
15710 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15711 break;
15713 case FIXED_CST:
15715 FIXED_VALUE_TYPE f;
15716 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15717 &(TREE_FIXED_CST (arg0)), NULL,
15718 TYPE_SATURATING (type));
15719 t = build_fixed (type, f);
15720 /* Propagate overflow flags. */
15721 if (overflow_p | TREE_OVERFLOW (arg0))
15722 TREE_OVERFLOW (t) = 1;
15723 break;
15726 default:
15727 gcc_unreachable ();
15730 return t;
15733 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15734 an integer constant or real constant.
15736 TYPE is the type of the result. */
15738 tree
15739 fold_abs_const (tree arg0, tree type)
15741 tree t = NULL_TREE;
15743 switch (TREE_CODE (arg0))
15745 case INTEGER_CST:
15747 /* If the value is unsigned or non-negative, then the absolute value
15748 is the same as the ordinary value. */
15749 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15750 t = arg0;
15752 /* If the value is negative, then the absolute value is
15753 its negation. */
15754 else
15756 bool overflow;
15757 wide_int val = wi::neg (arg0, &overflow);
15758 t = force_fit_type (type, val, -1,
15759 overflow | TREE_OVERFLOW (arg0));
15762 break;
15764 case REAL_CST:
15765 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15766 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15767 else
15768 t = arg0;
15769 break;
15771 default:
15772 gcc_unreachable ();
15775 return t;
15778 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15779 constant. TYPE is the type of the result. */
15781 static tree
15782 fold_not_const (const_tree arg0, tree type)
15784 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15786 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
15789 /* Given CODE, a relational operator, the target type, TYPE and two
15790 constant operands OP0 and OP1, return the result of the
15791 relational operation. If the result is not a compile time
15792 constant, then return NULL_TREE. */
15794 static tree
15795 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15797 int result, invert;
15799 /* From here on, the only cases we handle are when the result is
15800 known to be a constant. */
15802 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15804 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15805 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15807 /* Handle the cases where either operand is a NaN. */
15808 if (real_isnan (c0) || real_isnan (c1))
15810 switch (code)
15812 case EQ_EXPR:
15813 case ORDERED_EXPR:
15814 result = 0;
15815 break;
15817 case NE_EXPR:
15818 case UNORDERED_EXPR:
15819 case UNLT_EXPR:
15820 case UNLE_EXPR:
15821 case UNGT_EXPR:
15822 case UNGE_EXPR:
15823 case UNEQ_EXPR:
15824 result = 1;
15825 break;
15827 case LT_EXPR:
15828 case LE_EXPR:
15829 case GT_EXPR:
15830 case GE_EXPR:
15831 case LTGT_EXPR:
15832 if (flag_trapping_math)
15833 return NULL_TREE;
15834 result = 0;
15835 break;
15837 default:
15838 gcc_unreachable ();
15841 return constant_boolean_node (result, type);
15844 return constant_boolean_node (real_compare (code, c0, c1), type);
15847 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15849 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15850 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15851 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15854 /* Handle equality/inequality of complex constants. */
15855 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15857 tree rcond = fold_relational_const (code, type,
15858 TREE_REALPART (op0),
15859 TREE_REALPART (op1));
15860 tree icond = fold_relational_const (code, type,
15861 TREE_IMAGPART (op0),
15862 TREE_IMAGPART (op1));
15863 if (code == EQ_EXPR)
15864 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15865 else if (code == NE_EXPR)
15866 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15867 else
15868 return NULL_TREE;
15871 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15873 unsigned count = VECTOR_CST_NELTS (op0);
15874 tree *elts = XALLOCAVEC (tree, count);
15875 gcc_assert (VECTOR_CST_NELTS (op1) == count
15876 && TYPE_VECTOR_SUBPARTS (type) == count);
15878 for (unsigned i = 0; i < count; i++)
15880 tree elem_type = TREE_TYPE (type);
15881 tree elem0 = VECTOR_CST_ELT (op0, i);
15882 tree elem1 = VECTOR_CST_ELT (op1, i);
15884 tree tem = fold_relational_const (code, elem_type,
15885 elem0, elem1);
15887 if (tem == NULL_TREE)
15888 return NULL_TREE;
15890 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
15893 return build_vector (type, elts);
15896 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15898 To compute GT, swap the arguments and do LT.
15899 To compute GE, do LT and invert the result.
15900 To compute LE, swap the arguments, do LT and invert the result.
15901 To compute NE, do EQ and invert the result.
15903 Therefore, the code below must handle only EQ and LT. */
15905 if (code == LE_EXPR || code == GT_EXPR)
15907 tree tem = op0;
15908 op0 = op1;
15909 op1 = tem;
15910 code = swap_tree_comparison (code);
15913 /* Note that it is safe to invert for real values here because we
15914 have already handled the one case that it matters. */
15916 invert = 0;
15917 if (code == NE_EXPR || code == GE_EXPR)
15919 invert = 1;
15920 code = invert_tree_comparison (code, false);
15923 /* Compute a result for LT or EQ if args permit;
15924 Otherwise return T. */
15925 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15927 if (code == EQ_EXPR)
15928 result = tree_int_cst_equal (op0, op1);
15929 else
15930 result = tree_int_cst_lt (op0, op1);
15932 else
15933 return NULL_TREE;
15935 if (invert)
15936 result ^= 1;
15937 return constant_boolean_node (result, type);
15940 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15941 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15942 itself. */
15944 tree
15945 fold_build_cleanup_point_expr (tree type, tree expr)
15947 /* If the expression does not have side effects then we don't have to wrap
15948 it with a cleanup point expression. */
15949 if (!TREE_SIDE_EFFECTS (expr))
15950 return expr;
15952 /* If the expression is a return, check to see if the expression inside the
15953 return has no side effects or the right hand side of the modify expression
15954 inside the return. If either don't have side effects set we don't need to
15955 wrap the expression in a cleanup point expression. Note we don't check the
15956 left hand side of the modify because it should always be a return decl. */
15957 if (TREE_CODE (expr) == RETURN_EXPR)
15959 tree op = TREE_OPERAND (expr, 0);
15960 if (!op || !TREE_SIDE_EFFECTS (op))
15961 return expr;
15962 op = TREE_OPERAND (op, 1);
15963 if (!TREE_SIDE_EFFECTS (op))
15964 return expr;
15967 return build1 (CLEANUP_POINT_EXPR, type, expr);
15970 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15971 of an indirection through OP0, or NULL_TREE if no simplification is
15972 possible. */
15974 tree
15975 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15977 tree sub = op0;
15978 tree subtype;
15980 STRIP_NOPS (sub);
15981 subtype = TREE_TYPE (sub);
15982 if (!POINTER_TYPE_P (subtype))
15983 return NULL_TREE;
15985 if (TREE_CODE (sub) == ADDR_EXPR)
15987 tree op = TREE_OPERAND (sub, 0);
15988 tree optype = TREE_TYPE (op);
15989 /* *&CONST_DECL -> to the value of the const decl. */
15990 if (TREE_CODE (op) == CONST_DECL)
15991 return DECL_INITIAL (op);
15992 /* *&p => p; make sure to handle *&"str"[cst] here. */
15993 if (type == optype)
15995 tree fop = fold_read_from_constant_string (op);
15996 if (fop)
15997 return fop;
15998 else
15999 return op;
16001 /* *(foo *)&fooarray => fooarray[0] */
16002 else if (TREE_CODE (optype) == ARRAY_TYPE
16003 && type == TREE_TYPE (optype)
16004 && (!in_gimple_form
16005 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16007 tree type_domain = TYPE_DOMAIN (optype);
16008 tree min_val = size_zero_node;
16009 if (type_domain && TYPE_MIN_VALUE (type_domain))
16010 min_val = TYPE_MIN_VALUE (type_domain);
16011 if (in_gimple_form
16012 && TREE_CODE (min_val) != INTEGER_CST)
16013 return NULL_TREE;
16014 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16015 NULL_TREE, NULL_TREE);
16017 /* *(foo *)&complexfoo => __real__ complexfoo */
16018 else if (TREE_CODE (optype) == COMPLEX_TYPE
16019 && type == TREE_TYPE (optype))
16020 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16021 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16022 else if (TREE_CODE (optype) == VECTOR_TYPE
16023 && type == TREE_TYPE (optype))
16025 tree part_width = TYPE_SIZE (type);
16026 tree index = bitsize_int (0);
16027 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16031 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16032 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16034 tree op00 = TREE_OPERAND (sub, 0);
16035 tree op01 = TREE_OPERAND (sub, 1);
16037 STRIP_NOPS (op00);
16038 if (TREE_CODE (op00) == ADDR_EXPR)
16040 tree op00type;
16041 op00 = TREE_OPERAND (op00, 0);
16042 op00type = TREE_TYPE (op00);
16044 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16045 if (TREE_CODE (op00type) == VECTOR_TYPE
16046 && type == TREE_TYPE (op00type))
16048 HOST_WIDE_INT offset = tree_to_shwi (op01);
16049 tree part_width = TYPE_SIZE (type);
16050 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16051 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16052 tree index = bitsize_int (indexi);
16054 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16055 return fold_build3_loc (loc,
16056 BIT_FIELD_REF, type, op00,
16057 part_width, index);
16060 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16061 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16062 && type == TREE_TYPE (op00type))
16064 tree size = TYPE_SIZE_UNIT (type);
16065 if (tree_int_cst_equal (size, op01))
16066 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16068 /* ((foo *)&fooarray)[1] => fooarray[1] */
16069 else if (TREE_CODE (op00type) == ARRAY_TYPE
16070 && type == TREE_TYPE (op00type))
16072 tree type_domain = TYPE_DOMAIN (op00type);
16073 tree min_val = size_zero_node;
16074 if (type_domain && TYPE_MIN_VALUE (type_domain))
16075 min_val = TYPE_MIN_VALUE (type_domain);
16076 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16077 TYPE_SIZE_UNIT (type));
16078 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16079 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16080 NULL_TREE, NULL_TREE);
16085 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16086 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16087 && type == TREE_TYPE (TREE_TYPE (subtype))
16088 && (!in_gimple_form
16089 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16091 tree type_domain;
16092 tree min_val = size_zero_node;
16093 sub = build_fold_indirect_ref_loc (loc, sub);
16094 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16095 if (type_domain && TYPE_MIN_VALUE (type_domain))
16096 min_val = TYPE_MIN_VALUE (type_domain);
16097 if (in_gimple_form
16098 && TREE_CODE (min_val) != INTEGER_CST)
16099 return NULL_TREE;
16100 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16101 NULL_TREE);
16104 return NULL_TREE;
16107 /* Builds an expression for an indirection through T, simplifying some
16108 cases. */
16110 tree
16111 build_fold_indirect_ref_loc (location_t loc, tree t)
16113 tree type = TREE_TYPE (TREE_TYPE (t));
16114 tree sub = fold_indirect_ref_1 (loc, type, t);
16116 if (sub)
16117 return sub;
16119 return build1_loc (loc, INDIRECT_REF, type, t);
16122 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16124 tree
16125 fold_indirect_ref_loc (location_t loc, tree t)
16127 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16129 if (sub)
16130 return sub;
16131 else
16132 return t;
16135 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16136 whose result is ignored. The type of the returned tree need not be
16137 the same as the original expression. */
16139 tree
16140 fold_ignored_result (tree t)
16142 if (!TREE_SIDE_EFFECTS (t))
16143 return integer_zero_node;
16145 for (;;)
16146 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16148 case tcc_unary:
16149 t = TREE_OPERAND (t, 0);
16150 break;
16152 case tcc_binary:
16153 case tcc_comparison:
16154 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16155 t = TREE_OPERAND (t, 0);
16156 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16157 t = TREE_OPERAND (t, 1);
16158 else
16159 return t;
16160 break;
16162 case tcc_expression:
16163 switch (TREE_CODE (t))
16165 case COMPOUND_EXPR:
16166 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16167 return t;
16168 t = TREE_OPERAND (t, 0);
16169 break;
16171 case COND_EXPR:
16172 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16173 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16174 return t;
16175 t = TREE_OPERAND (t, 0);
16176 break;
16178 default:
16179 return t;
16181 break;
16183 default:
16184 return t;
16188 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16190 tree
16191 round_up_loc (location_t loc, tree value, unsigned int divisor)
16193 tree div = NULL_TREE;
16195 if (divisor == 1)
16196 return value;
16198 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16199 have to do anything. Only do this when we are not given a const,
16200 because in that case, this check is more expensive than just
16201 doing it. */
16202 if (TREE_CODE (value) != INTEGER_CST)
16204 div = build_int_cst (TREE_TYPE (value), divisor);
16206 if (multiple_of_p (TREE_TYPE (value), value, div))
16207 return value;
16210 /* If divisor is a power of two, simplify this to bit manipulation. */
16211 if (divisor == (divisor & -divisor))
16213 if (TREE_CODE (value) == INTEGER_CST)
16215 wide_int val = value;
16216 bool overflow_p;
16218 if ((val & (divisor - 1)) == 0)
16219 return value;
16221 overflow_p = TREE_OVERFLOW (value);
16222 val &= ~(divisor - 1);
16223 val += divisor;
16224 if (val == 0)
16225 overflow_p = true;
16227 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16229 else
16231 tree t;
16233 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16234 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16235 t = build_int_cst (TREE_TYPE (value), -divisor);
16236 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16239 else
16241 if (!div)
16242 div = build_int_cst (TREE_TYPE (value), divisor);
16243 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16244 value = size_binop_loc (loc, MULT_EXPR, value, div);
16247 return value;
16250 /* Likewise, but round down. */
16252 tree
16253 round_down_loc (location_t loc, tree value, int divisor)
16255 tree div = NULL_TREE;
16257 gcc_assert (divisor > 0);
16258 if (divisor == 1)
16259 return value;
16261 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16262 have to do anything. Only do this when we are not given a const,
16263 because in that case, this check is more expensive than just
16264 doing it. */
16265 if (TREE_CODE (value) != INTEGER_CST)
16267 div = build_int_cst (TREE_TYPE (value), divisor);
16269 if (multiple_of_p (TREE_TYPE (value), value, div))
16270 return value;
16273 /* If divisor is a power of two, simplify this to bit manipulation. */
16274 if (divisor == (divisor & -divisor))
16276 tree t;
16278 t = build_int_cst (TREE_TYPE (value), -divisor);
16279 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16281 else
16283 if (!div)
16284 div = build_int_cst (TREE_TYPE (value), divisor);
16285 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16286 value = size_binop_loc (loc, MULT_EXPR, value, div);
16289 return value;
16292 /* Returns the pointer to the base of the object addressed by EXP and
16293 extracts the information about the offset of the access, storing it
16294 to PBITPOS and POFFSET. */
16296 static tree
16297 split_address_to_core_and_offset (tree exp,
16298 HOST_WIDE_INT *pbitpos, tree *poffset)
16300 tree core;
16301 machine_mode mode;
16302 int unsignedp, volatilep;
16303 HOST_WIDE_INT bitsize;
16304 location_t loc = EXPR_LOCATION (exp);
16306 if (TREE_CODE (exp) == ADDR_EXPR)
16308 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16309 poffset, &mode, &unsignedp, &volatilep,
16310 false);
16311 core = build_fold_addr_expr_loc (loc, core);
16313 else
16315 core = exp;
16316 *pbitpos = 0;
16317 *poffset = NULL_TREE;
16320 return core;
16323 /* Returns true if addresses of E1 and E2 differ by a constant, false
16324 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16326 bool
16327 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16329 tree core1, core2;
16330 HOST_WIDE_INT bitpos1, bitpos2;
16331 tree toffset1, toffset2, tdiff, type;
16333 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16334 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16336 if (bitpos1 % BITS_PER_UNIT != 0
16337 || bitpos2 % BITS_PER_UNIT != 0
16338 || !operand_equal_p (core1, core2, 0))
16339 return false;
16341 if (toffset1 && toffset2)
16343 type = TREE_TYPE (toffset1);
16344 if (type != TREE_TYPE (toffset2))
16345 toffset2 = fold_convert (type, toffset2);
16347 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16348 if (!cst_and_fits_in_hwi (tdiff))
16349 return false;
16351 *diff = int_cst_value (tdiff);
16353 else if (toffset1 || toffset2)
16355 /* If only one of the offsets is non-constant, the difference cannot
16356 be a constant. */
16357 return false;
16359 else
16360 *diff = 0;
16362 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16363 return true;
16366 /* Simplify the floating point expression EXP when the sign of the
16367 result is not significant. Return NULL_TREE if no simplification
16368 is possible. */
16370 tree
16371 fold_strip_sign_ops (tree exp)
16373 tree arg0, arg1;
16374 location_t loc = EXPR_LOCATION (exp);
16376 switch (TREE_CODE (exp))
16378 case ABS_EXPR:
16379 case NEGATE_EXPR:
16380 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16381 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16383 case MULT_EXPR:
16384 case RDIV_EXPR:
16385 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16386 return NULL_TREE;
16387 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16388 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16389 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16390 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16391 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16392 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16393 break;
16395 case COMPOUND_EXPR:
16396 arg0 = TREE_OPERAND (exp, 0);
16397 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16398 if (arg1)
16399 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16400 break;
16402 case COND_EXPR:
16403 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16404 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16405 if (arg0 || arg1)
16406 return fold_build3_loc (loc,
16407 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16408 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16409 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16410 break;
16412 case CALL_EXPR:
16414 const enum built_in_function fcode = builtin_mathfn_code (exp);
16415 switch (fcode)
16417 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16418 /* Strip copysign function call, return the 1st argument. */
16419 arg0 = CALL_EXPR_ARG (exp, 0);
16420 arg1 = CALL_EXPR_ARG (exp, 1);
16421 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16423 default:
16424 /* Strip sign ops from the argument of "odd" math functions. */
16425 if (negate_mathfn_p (fcode))
16427 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16428 if (arg0)
16429 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16431 break;
16434 break;
16436 default:
16437 break;
16439 return NULL_TREE;