Merge trunk version 211672 into gupc branch.
[official-gcc.git] / gcc / fold-const.c
blobcdebda1225fe66a21cef00a54ea5d39a9b4084d3
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "basic-block.h"
62 #include "tree-ssa-alias.h"
63 #include "internal-fn.h"
64 #include "tree-eh.h"
65 #include "gimple-expr.h"
66 #include "is-a.h"
67 #include "gimple.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
71 #include "builtins.h"
73 /* Nonzero if we are folding constants inside an initializer; zero
74 otherwise. */
75 int folding_initializer = 0;
77 /* The following constants represent a bit based encoding of GCC's
78 comparison operators. This encoding simplifies transformations
79 on relational comparison operators, such as AND and OR. */
80 enum comparison_code {
81 COMPCODE_FALSE = 0,
82 COMPCODE_LT = 1,
83 COMPCODE_EQ = 2,
84 COMPCODE_LE = 3,
85 COMPCODE_GT = 4,
86 COMPCODE_LTGT = 5,
87 COMPCODE_GE = 6,
88 COMPCODE_ORD = 7,
89 COMPCODE_UNORD = 8,
90 COMPCODE_UNLT = 9,
91 COMPCODE_UNEQ = 10,
92 COMPCODE_UNLE = 11,
93 COMPCODE_UNGT = 12,
94 COMPCODE_NE = 13,
95 COMPCODE_UNGE = 14,
96 COMPCODE_TRUE = 15
99 static bool negate_mathfn_p (enum built_in_function);
100 static bool negate_expr_p (tree);
101 static tree negate_expr (tree);
102 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
103 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
104 static tree const_binop (enum tree_code, tree, tree);
105 static enum comparison_code comparison_to_compcode (enum tree_code);
106 static enum tree_code compcode_to_comparison (enum comparison_code);
107 static int operand_equal_for_comparison_p (tree, tree, tree);
108 static int twoval_comparison_p (tree, tree *, tree *, int *);
109 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
110 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
111 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
112 static tree make_bit_field_ref (location_t, tree, tree,
113 HOST_WIDE_INT, HOST_WIDE_INT, int);
114 static tree optimize_bit_field_compare (location_t, enum tree_code,
115 tree, tree, tree);
116 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
117 HOST_WIDE_INT *,
118 enum machine_mode *, int *, int *,
119 tree *, tree *);
120 static tree sign_bit_p (tree, const_tree);
121 static int simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree optimize_minmax_comparison (location_t, enum tree_code,
130 tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
133 static tree fold_binary_op_with_conditional_arg (location_t,
134 enum tree_code, tree,
135 tree, tree,
136 tree, tree, int);
137 static tree fold_mathfn_compare (location_t,
138 enum built_in_function, enum tree_code,
139 tree, tree, tree);
140 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (const_tree, const_tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (const_tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static tree fold_convert_const (enum tree_code, tree, tree);
148 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
149 Otherwise, return LOC. */
151 static location_t
152 expr_location_or (tree t, location_t loc)
154 location_t tloc = EXPR_LOCATION (t);
155 return tloc == UNKNOWN_LOCATION ? loc : tloc;
158 /* Similar to protected_set_expr_location, but never modify x in place,
159 if location can and needs to be set, unshare it. */
161 static inline tree
162 protected_set_expr_location_unshare (tree x, location_t loc)
164 if (CAN_HAVE_LOCATION_P (x)
165 && EXPR_LOCATION (x) != loc
166 && !(TREE_CODE (x) == SAVE_EXPR
167 || TREE_CODE (x) == TARGET_EXPR
168 || TREE_CODE (x) == BIND_EXPR))
170 x = copy_node (x);
171 SET_EXPR_LOCATION (x, loc);
173 return x;
176 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
177 division and returns the quotient. Otherwise returns
178 NULL_TREE. */
180 tree
181 div_if_zero_remainder (const_tree arg1, const_tree arg2)
183 widest_int quo;
185 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
186 SIGNED, &quo))
187 return wide_int_to_tree (TREE_TYPE (arg1), quo);
189 return NULL_TREE;
192 /* This is nonzero if we should defer warnings about undefined
193 overflow. This facility exists because these warnings are a
194 special case. The code to estimate loop iterations does not want
195 to issue any warnings, since it works with expressions which do not
196 occur in user code. Various bits of cleanup code call fold(), but
197 only use the result if it has certain characteristics (e.g., is a
198 constant); that code only wants to issue a warning if the result is
199 used. */
201 static int fold_deferring_overflow_warnings;
203 /* If a warning about undefined overflow is deferred, this is the
204 warning. Note that this may cause us to turn two warnings into
205 one, but that is fine since it is sufficient to only give one
206 warning per expression. */
208 static const char* fold_deferred_overflow_warning;
210 /* If a warning about undefined overflow is deferred, this is the
211 level at which the warning should be emitted. */
213 static enum warn_strict_overflow_code fold_deferred_overflow_code;
215 /* Start deferring overflow warnings. We could use a stack here to
216 permit nested calls, but at present it is not necessary. */
218 void
219 fold_defer_overflow_warnings (void)
221 ++fold_deferring_overflow_warnings;
224 /* Stop deferring overflow warnings. If there is a pending warning,
225 and ISSUE is true, then issue the warning if appropriate. STMT is
226 the statement with which the warning should be associated (used for
227 location information); STMT may be NULL. CODE is the level of the
228 warning--a warn_strict_overflow_code value. This function will use
229 the smaller of CODE and the deferred code when deciding whether to
230 issue the warning. CODE may be zero to mean to always use the
231 deferred code. */
233 void
234 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
236 const char *warnmsg;
237 location_t locus;
239 gcc_assert (fold_deferring_overflow_warnings > 0);
240 --fold_deferring_overflow_warnings;
241 if (fold_deferring_overflow_warnings > 0)
243 if (fold_deferred_overflow_warning != NULL
244 && code != 0
245 && code < (int) fold_deferred_overflow_code)
246 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
247 return;
250 warnmsg = fold_deferred_overflow_warning;
251 fold_deferred_overflow_warning = NULL;
253 if (!issue || warnmsg == NULL)
254 return;
256 if (gimple_no_warning_p (stmt))
257 return;
259 /* Use the smallest code level when deciding to issue the
260 warning. */
261 if (code == 0 || code > (int) fold_deferred_overflow_code)
262 code = fold_deferred_overflow_code;
264 if (!issue_strict_overflow_warning (code))
265 return;
267 if (stmt == NULL)
268 locus = input_location;
269 else
270 locus = gimple_location (stmt);
271 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
274 /* Stop deferring overflow warnings, ignoring any deferred
275 warnings. */
277 void
278 fold_undefer_and_ignore_overflow_warnings (void)
280 fold_undefer_overflow_warnings (false, NULL, 0);
283 /* Whether we are deferring overflow warnings. */
285 bool
286 fold_deferring_overflow_warnings_p (void)
288 return fold_deferring_overflow_warnings > 0;
291 /* This is called when we fold something based on the fact that signed
292 overflow is undefined. */
294 static void
295 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
297 if (fold_deferring_overflow_warnings > 0)
299 if (fold_deferred_overflow_warning == NULL
300 || wc < fold_deferred_overflow_code)
302 fold_deferred_overflow_warning = gmsgid;
303 fold_deferred_overflow_code = wc;
306 else if (issue_strict_overflow_warning (wc))
307 warning (OPT_Wstrict_overflow, gmsgid);
310 /* Return true if the built-in mathematical function specified by CODE
311 is odd, i.e. -f(x) == f(-x). */
313 static bool
314 negate_mathfn_p (enum built_in_function code)
316 switch (code)
318 CASE_FLT_FN (BUILT_IN_ASIN):
319 CASE_FLT_FN (BUILT_IN_ASINH):
320 CASE_FLT_FN (BUILT_IN_ATAN):
321 CASE_FLT_FN (BUILT_IN_ATANH):
322 CASE_FLT_FN (BUILT_IN_CASIN):
323 CASE_FLT_FN (BUILT_IN_CASINH):
324 CASE_FLT_FN (BUILT_IN_CATAN):
325 CASE_FLT_FN (BUILT_IN_CATANH):
326 CASE_FLT_FN (BUILT_IN_CBRT):
327 CASE_FLT_FN (BUILT_IN_CPROJ):
328 CASE_FLT_FN (BUILT_IN_CSIN):
329 CASE_FLT_FN (BUILT_IN_CSINH):
330 CASE_FLT_FN (BUILT_IN_CTAN):
331 CASE_FLT_FN (BUILT_IN_CTANH):
332 CASE_FLT_FN (BUILT_IN_ERF):
333 CASE_FLT_FN (BUILT_IN_LLROUND):
334 CASE_FLT_FN (BUILT_IN_LROUND):
335 CASE_FLT_FN (BUILT_IN_ROUND):
336 CASE_FLT_FN (BUILT_IN_SIN):
337 CASE_FLT_FN (BUILT_IN_SINH):
338 CASE_FLT_FN (BUILT_IN_TAN):
339 CASE_FLT_FN (BUILT_IN_TANH):
340 CASE_FLT_FN (BUILT_IN_TRUNC):
341 return true;
343 CASE_FLT_FN (BUILT_IN_LLRINT):
344 CASE_FLT_FN (BUILT_IN_LRINT):
345 CASE_FLT_FN (BUILT_IN_NEARBYINT):
346 CASE_FLT_FN (BUILT_IN_RINT):
347 return !flag_rounding_math;
349 default:
350 break;
352 return false;
355 /* Check whether we may negate an integer constant T without causing
356 overflow. */
358 bool
359 may_negate_without_overflow_p (const_tree t)
361 tree type;
363 gcc_assert (TREE_CODE (t) == INTEGER_CST);
365 type = TREE_TYPE (t);
366 if (TYPE_UNSIGNED (type))
367 return false;
369 return !wi::only_sign_bit_p (t);
372 /* Determine whether an expression T can be cheaply negated using
373 the function negate_expr without introducing undefined overflow. */
375 static bool
376 negate_expr_p (tree t)
378 tree type;
380 if (t == 0)
381 return false;
383 type = TREE_TYPE (t);
385 STRIP_SIGN_NOPS (t);
386 switch (TREE_CODE (t))
388 case INTEGER_CST:
389 if (TYPE_OVERFLOW_WRAPS (type))
390 return true;
392 /* Check that -CST will not overflow type. */
393 return may_negate_without_overflow_p (t);
394 case BIT_NOT_EXPR:
395 return (INTEGRAL_TYPE_P (type)
396 && TYPE_OVERFLOW_WRAPS (type));
398 case FIXED_CST:
399 case NEGATE_EXPR:
400 return true;
402 case REAL_CST:
403 /* We want to canonicalize to positive real constants. Pretend
404 that only negative ones can be easily negated. */
405 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
407 case COMPLEX_CST:
408 return negate_expr_p (TREE_REALPART (t))
409 && negate_expr_p (TREE_IMAGPART (t));
411 case VECTOR_CST:
413 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
414 return true;
416 int count = TYPE_VECTOR_SUBPARTS (type), i;
418 for (i = 0; i < count; i++)
419 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
420 return false;
422 return true;
425 case COMPLEX_EXPR:
426 return negate_expr_p (TREE_OPERAND (t, 0))
427 && negate_expr_p (TREE_OPERAND (t, 1));
429 case CONJ_EXPR:
430 return negate_expr_p (TREE_OPERAND (t, 0));
432 case PLUS_EXPR:
433 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
434 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
435 return false;
436 /* -(A + B) -> (-B) - A. */
437 if (negate_expr_p (TREE_OPERAND (t, 1))
438 && reorder_operands_p (TREE_OPERAND (t, 0),
439 TREE_OPERAND (t, 1)))
440 return true;
441 /* -(A + B) -> (-A) - B. */
442 return negate_expr_p (TREE_OPERAND (t, 0));
444 case MINUS_EXPR:
445 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
446 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
447 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
448 && reorder_operands_p (TREE_OPERAND (t, 0),
449 TREE_OPERAND (t, 1));
451 case MULT_EXPR:
452 if (TYPE_UNSIGNED (TREE_TYPE (t)))
453 break;
455 /* Fall through. */
457 case RDIV_EXPR:
458 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
459 return negate_expr_p (TREE_OPERAND (t, 1))
460 || negate_expr_p (TREE_OPERAND (t, 0));
461 break;
463 case TRUNC_DIV_EXPR:
464 case ROUND_DIV_EXPR:
465 case EXACT_DIV_EXPR:
466 /* In general we can't negate A / B, because if A is INT_MIN and
467 B is 1, we may turn this into INT_MIN / -1 which is undefined
468 and actually traps on some architectures. But if overflow is
469 undefined, we can negate, because - (INT_MIN / 1) is an
470 overflow. */
471 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
473 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
474 break;
475 /* If overflow is undefined then we have to be careful because
476 we ask whether it's ok to associate the negate with the
477 division which is not ok for example for
478 -((a - b) / c) where (-(a - b)) / c may invoke undefined
479 overflow because of negating INT_MIN. So do not use
480 negate_expr_p here but open-code the two important cases. */
481 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
482 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
483 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
484 return true;
486 else if (negate_expr_p (TREE_OPERAND (t, 0)))
487 return true;
488 return negate_expr_p (TREE_OPERAND (t, 1));
490 case NOP_EXPR:
491 /* Negate -((double)float) as (double)(-float). */
492 if (TREE_CODE (type) == REAL_TYPE)
494 tree tem = strip_float_extensions (t);
495 if (tem != t)
496 return negate_expr_p (tem);
498 break;
500 case CALL_EXPR:
501 /* Negate -f(x) as f(-x). */
502 if (negate_mathfn_p (builtin_mathfn_code (t)))
503 return negate_expr_p (CALL_EXPR_ARG (t, 0));
504 break;
506 case RSHIFT_EXPR:
507 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
508 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
510 tree op1 = TREE_OPERAND (t, 1);
511 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
512 return true;
514 break;
516 default:
517 break;
519 return false;
522 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
523 simplification is possible.
524 If negate_expr_p would return true for T, NULL_TREE will never be
525 returned. */
527 static tree
528 fold_negate_expr (location_t loc, tree t)
530 tree type = TREE_TYPE (t);
531 tree tem;
533 switch (TREE_CODE (t))
535 /* Convert - (~A) to A + 1. */
536 case BIT_NOT_EXPR:
537 if (INTEGRAL_TYPE_P (type))
538 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
539 build_one_cst (type));
540 break;
542 case INTEGER_CST:
543 tem = fold_negate_const (t, type);
544 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
545 || !TYPE_OVERFLOW_TRAPS (type))
546 return tem;
547 break;
549 case REAL_CST:
550 tem = fold_negate_const (t, type);
551 /* Two's complement FP formats, such as c4x, may overflow. */
552 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
553 return tem;
554 break;
556 case FIXED_CST:
557 tem = fold_negate_const (t, type);
558 return tem;
560 case COMPLEX_CST:
562 tree rpart = negate_expr (TREE_REALPART (t));
563 tree ipart = negate_expr (TREE_IMAGPART (t));
565 if ((TREE_CODE (rpart) == REAL_CST
566 && TREE_CODE (ipart) == REAL_CST)
567 || (TREE_CODE (rpart) == INTEGER_CST
568 && TREE_CODE (ipart) == INTEGER_CST))
569 return build_complex (type, rpart, ipart);
571 break;
573 case VECTOR_CST:
575 int count = TYPE_VECTOR_SUBPARTS (type), i;
576 tree *elts = XALLOCAVEC (tree, count);
578 for (i = 0; i < count; i++)
580 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
581 if (elts[i] == NULL_TREE)
582 return NULL_TREE;
585 return build_vector (type, elts);
588 case COMPLEX_EXPR:
589 if (negate_expr_p (t))
590 return fold_build2_loc (loc, COMPLEX_EXPR, type,
591 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
592 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
593 break;
595 case CONJ_EXPR:
596 if (negate_expr_p (t))
597 return fold_build1_loc (loc, CONJ_EXPR, type,
598 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
599 break;
601 case NEGATE_EXPR:
602 return TREE_OPERAND (t, 0);
604 case PLUS_EXPR:
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
606 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
608 /* -(A + B) -> (-B) - A. */
609 if (negate_expr_p (TREE_OPERAND (t, 1))
610 && reorder_operands_p (TREE_OPERAND (t, 0),
611 TREE_OPERAND (t, 1)))
613 tem = negate_expr (TREE_OPERAND (t, 1));
614 return fold_build2_loc (loc, MINUS_EXPR, type,
615 tem, TREE_OPERAND (t, 0));
618 /* -(A + B) -> (-A) - B. */
619 if (negate_expr_p (TREE_OPERAND (t, 0)))
621 tem = negate_expr (TREE_OPERAND (t, 0));
622 return fold_build2_loc (loc, MINUS_EXPR, type,
623 tem, TREE_OPERAND (t, 1));
626 break;
628 case MINUS_EXPR:
629 /* - (A - B) -> B - A */
630 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
631 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
632 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
633 return fold_build2_loc (loc, MINUS_EXPR, type,
634 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
635 break;
637 case MULT_EXPR:
638 if (TYPE_UNSIGNED (type))
639 break;
641 /* Fall through. */
643 case RDIV_EXPR:
644 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
646 tem = TREE_OPERAND (t, 1);
647 if (negate_expr_p (tem))
648 return fold_build2_loc (loc, TREE_CODE (t), type,
649 TREE_OPERAND (t, 0), negate_expr (tem));
650 tem = TREE_OPERAND (t, 0);
651 if (negate_expr_p (tem))
652 return fold_build2_loc (loc, TREE_CODE (t), type,
653 negate_expr (tem), TREE_OPERAND (t, 1));
655 break;
657 case TRUNC_DIV_EXPR:
658 case ROUND_DIV_EXPR:
659 case EXACT_DIV_EXPR:
660 /* In general we can't negate A / B, because if A is INT_MIN and
661 B is 1, we may turn this into INT_MIN / -1 which is undefined
662 and actually traps on some architectures. But if overflow is
663 undefined, we can negate, because - (INT_MIN / 1) is an
664 overflow. */
665 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
667 const char * const warnmsg = G_("assuming signed overflow does not "
668 "occur when negating a division");
669 tem = TREE_OPERAND (t, 1);
670 if (negate_expr_p (tem))
672 if (INTEGRAL_TYPE_P (type)
673 && (TREE_CODE (tem) != INTEGER_CST
674 || integer_onep (tem)))
675 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
676 return fold_build2_loc (loc, TREE_CODE (t), type,
677 TREE_OPERAND (t, 0), negate_expr (tem));
679 /* If overflow is undefined then we have to be careful because
680 we ask whether it's ok to associate the negate with the
681 division which is not ok for example for
682 -((a - b) / c) where (-(a - b)) / c may invoke undefined
683 overflow because of negating INT_MIN. So do not use
684 negate_expr_p here but open-code the two important cases. */
685 tem = TREE_OPERAND (t, 0);
686 if ((INTEGRAL_TYPE_P (type)
687 && (TREE_CODE (tem) == NEGATE_EXPR
688 || (TREE_CODE (tem) == INTEGER_CST
689 && may_negate_without_overflow_p (tem))))
690 || !INTEGRAL_TYPE_P (type))
691 return fold_build2_loc (loc, TREE_CODE (t), type,
692 negate_expr (tem), TREE_OPERAND (t, 1));
694 break;
696 case NOP_EXPR:
697 /* Convert -((double)float) into (double)(-float). */
698 if (TREE_CODE (type) == REAL_TYPE)
700 tem = strip_float_extensions (t);
701 if (tem != t && negate_expr_p (tem))
702 return fold_convert_loc (loc, type, negate_expr (tem));
704 break;
706 case CALL_EXPR:
707 /* Negate -f(x) as f(-x). */
708 if (negate_mathfn_p (builtin_mathfn_code (t))
709 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
711 tree fndecl, arg;
713 fndecl = get_callee_fndecl (t);
714 arg = negate_expr (CALL_EXPR_ARG (t, 0));
715 return build_call_expr_loc (loc, fndecl, 1, arg);
717 break;
719 case RSHIFT_EXPR:
720 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
721 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
723 tree op1 = TREE_OPERAND (t, 1);
724 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
726 tree ntype = TYPE_UNSIGNED (type)
727 ? signed_type_for (type)
728 : unsigned_type_for (type);
729 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
730 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
731 return fold_convert_loc (loc, type, temp);
734 break;
736 default:
737 break;
740 return NULL_TREE;
743 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
744 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
745 return NULL_TREE. */
747 static tree
748 negate_expr (tree t)
750 tree type, tem;
751 location_t loc;
753 if (t == NULL_TREE)
754 return NULL_TREE;
756 loc = EXPR_LOCATION (t);
757 type = TREE_TYPE (t);
758 STRIP_SIGN_NOPS (t);
760 tem = fold_negate_expr (loc, t);
761 if (!tem)
762 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
763 return fold_convert_loc (loc, type, tem);
766 /* Split a tree IN into a constant, literal and variable parts that could be
767 combined with CODE to make IN. "constant" means an expression with
768 TREE_CONSTANT but that isn't an actual constant. CODE must be a
769 commutative arithmetic operation. Store the constant part into *CONP,
770 the literal in *LITP and return the variable part. If a part isn't
771 present, set it to null. If the tree does not decompose in this way,
772 return the entire tree as the variable part and the other parts as null.
774 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
775 case, we negate an operand that was subtracted. Except if it is a
776 literal for which we use *MINUS_LITP instead.
778 If NEGATE_P is true, we are negating all of IN, again except a literal
779 for which we use *MINUS_LITP instead.
781 If IN is itself a literal or constant, return it as appropriate.
783 Note that we do not guarantee that any of the three values will be the
784 same type as IN, but they will have the same signedness and mode. */
786 static tree
787 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
788 tree *minus_litp, int negate_p)
790 tree var = 0;
792 *conp = 0;
793 *litp = 0;
794 *minus_litp = 0;
796 /* Strip any conversions that don't change the machine mode or signedness. */
797 STRIP_SIGN_NOPS (in);
799 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
800 || TREE_CODE (in) == FIXED_CST)
801 *litp = in;
802 else if (TREE_CODE (in) == code
803 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
804 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
805 /* We can associate addition and subtraction together (even
806 though the C standard doesn't say so) for integers because
807 the value is not affected. For reals, the value might be
808 affected, so we can't. */
809 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
810 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
812 tree op0 = TREE_OPERAND (in, 0);
813 tree op1 = TREE_OPERAND (in, 1);
814 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
815 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
817 /* First see if either of the operands is a literal, then a constant. */
818 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
819 || TREE_CODE (op0) == FIXED_CST)
820 *litp = op0, op0 = 0;
821 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
822 || TREE_CODE (op1) == FIXED_CST)
823 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
825 if (op0 != 0 && TREE_CONSTANT (op0))
826 *conp = op0, op0 = 0;
827 else if (op1 != 0 && TREE_CONSTANT (op1))
828 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
830 /* If we haven't dealt with either operand, this is not a case we can
831 decompose. Otherwise, VAR is either of the ones remaining, if any. */
832 if (op0 != 0 && op1 != 0)
833 var = in;
834 else if (op0 != 0)
835 var = op0;
836 else
837 var = op1, neg_var_p = neg1_p;
839 /* Now do any needed negations. */
840 if (neg_litp_p)
841 *minus_litp = *litp, *litp = 0;
842 if (neg_conp_p)
843 *conp = negate_expr (*conp);
844 if (neg_var_p)
845 var = negate_expr (var);
847 else if (TREE_CODE (in) == BIT_NOT_EXPR
848 && code == PLUS_EXPR)
850 /* -X - 1 is folded to ~X, undo that here. */
851 *minus_litp = build_one_cst (TREE_TYPE (in));
852 var = negate_expr (TREE_OPERAND (in, 0));
854 else if (TREE_CONSTANT (in))
855 *conp = in;
856 else
857 var = in;
859 if (negate_p)
861 if (*litp)
862 *minus_litp = *litp, *litp = 0;
863 else if (*minus_litp)
864 *litp = *minus_litp, *minus_litp = 0;
865 *conp = negate_expr (*conp);
866 var = negate_expr (var);
869 return var;
872 /* Re-associate trees split by the above function. T1 and T2 are
873 either expressions to associate or null. Return the new
874 expression, if any. LOC is the location of the new expression. If
875 we build an operation, do it in TYPE and with CODE. */
877 static tree
878 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
880 if (t1 == 0)
881 return t2;
882 else if (t2 == 0)
883 return t1;
885 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
886 try to fold this since we will have infinite recursion. But do
887 deal with any NEGATE_EXPRs. */
888 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
889 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
891 if (code == PLUS_EXPR)
893 if (TREE_CODE (t1) == NEGATE_EXPR)
894 return build2_loc (loc, MINUS_EXPR, type,
895 fold_convert_loc (loc, type, t2),
896 fold_convert_loc (loc, type,
897 TREE_OPERAND (t1, 0)));
898 else if (TREE_CODE (t2) == NEGATE_EXPR)
899 return build2_loc (loc, MINUS_EXPR, type,
900 fold_convert_loc (loc, type, t1),
901 fold_convert_loc (loc, type,
902 TREE_OPERAND (t2, 0)));
903 else if (integer_zerop (t2))
904 return fold_convert_loc (loc, type, t1);
906 else if (code == MINUS_EXPR)
908 if (integer_zerop (t2))
909 return fold_convert_loc (loc, type, t1);
912 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
913 fold_convert_loc (loc, type, t2));
916 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
917 fold_convert_loc (loc, type, t2));
920 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
921 for use in int_const_binop, size_binop and size_diffop. */
923 static bool
924 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
926 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
927 return false;
928 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
929 return false;
931 switch (code)
933 case LSHIFT_EXPR:
934 case RSHIFT_EXPR:
935 case LROTATE_EXPR:
936 case RROTATE_EXPR:
937 return true;
939 default:
940 break;
943 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
944 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
945 && TYPE_MODE (type1) == TYPE_MODE (type2);
949 /* Combine two integer constants ARG1 and ARG2 under operation CODE
950 to produce a new constant. Return NULL_TREE if we don't know how
951 to evaluate CODE at compile-time. */
953 static tree
954 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
955 int overflowable)
957 wide_int res;
958 tree t;
959 tree type = TREE_TYPE (arg1);
960 signop sign = TYPE_SIGN (type);
961 bool overflow = false;
963 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
964 TYPE_SIGN (TREE_TYPE (parg2)));
966 switch (code)
968 case BIT_IOR_EXPR:
969 res = wi::bit_or (arg1, arg2);
970 break;
972 case BIT_XOR_EXPR:
973 res = wi::bit_xor (arg1, arg2);
974 break;
976 case BIT_AND_EXPR:
977 res = wi::bit_and (arg1, arg2);
978 break;
980 case RSHIFT_EXPR:
981 case LSHIFT_EXPR:
982 if (wi::neg_p (arg2))
984 arg2 = -arg2;
985 if (code == RSHIFT_EXPR)
986 code = LSHIFT_EXPR;
987 else
988 code = RSHIFT_EXPR;
991 if (code == RSHIFT_EXPR)
992 /* It's unclear from the C standard whether shifts can overflow.
993 The following code ignores overflow; perhaps a C standard
994 interpretation ruling is needed. */
995 res = wi::rshift (arg1, arg2, sign);
996 else
997 res = wi::lshift (arg1, arg2);
998 break;
1000 case RROTATE_EXPR:
1001 case LROTATE_EXPR:
1002 if (wi::neg_p (arg2))
1004 arg2 = -arg2;
1005 if (code == RROTATE_EXPR)
1006 code = LROTATE_EXPR;
1007 else
1008 code = RROTATE_EXPR;
1011 if (code == RROTATE_EXPR)
1012 res = wi::rrotate (arg1, arg2);
1013 else
1014 res = wi::lrotate (arg1, arg2);
1015 break;
1017 case PLUS_EXPR:
1018 res = wi::add (arg1, arg2, sign, &overflow);
1019 break;
1021 case MINUS_EXPR:
1022 res = wi::sub (arg1, arg2, sign, &overflow);
1023 break;
1025 case MULT_EXPR:
1026 res = wi::mul (arg1, arg2, sign, &overflow);
1027 break;
1029 case MULT_HIGHPART_EXPR:
1030 res = wi::mul_high (arg1, arg2, sign);
1031 break;
1033 case TRUNC_DIV_EXPR:
1034 case EXACT_DIV_EXPR:
1035 if (arg2 == 0)
1036 return NULL_TREE;
1037 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1038 break;
1040 case FLOOR_DIV_EXPR:
1041 if (arg2 == 0)
1042 return NULL_TREE;
1043 res = wi::div_floor (arg1, arg2, sign, &overflow);
1044 break;
1046 case CEIL_DIV_EXPR:
1047 if (arg2 == 0)
1048 return NULL_TREE;
1049 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1050 break;
1052 case ROUND_DIV_EXPR:
1053 if (arg2 == 0)
1054 return NULL_TREE;
1055 res = wi::div_round (arg1, arg2, sign, &overflow);
1056 break;
1058 case TRUNC_MOD_EXPR:
1059 if (arg2 == 0)
1060 return NULL_TREE;
1061 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1062 break;
1064 case FLOOR_MOD_EXPR:
1065 if (arg2 == 0)
1066 return NULL_TREE;
1067 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1068 break;
1070 case CEIL_MOD_EXPR:
1071 if (arg2 == 0)
1072 return NULL_TREE;
1073 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1074 break;
1076 case ROUND_MOD_EXPR:
1077 if (arg2 == 0)
1078 return NULL_TREE;
1079 res = wi::mod_round (arg1, arg2, sign, &overflow);
1080 break;
1082 case MIN_EXPR:
1083 res = wi::min (arg1, arg2, sign);
1084 break;
1086 case MAX_EXPR:
1087 res = wi::max (arg1, arg2, sign);
1088 break;
1090 default:
1091 return NULL_TREE;
1094 t = force_fit_type (type, res, overflowable,
1095 (((sign == SIGNED || overflowable == -1)
1096 && overflow)
1097 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1099 return t;
1102 tree
1103 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1105 return int_const_binop_1 (code, arg1, arg2, 1);
1108 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1109 constant. We assume ARG1 and ARG2 have the same data type, or at least
1110 are the same kind of constant and the same machine mode. Return zero if
1111 combining the constants is not allowed in the current operating mode. */
1113 static tree
1114 const_binop (enum tree_code code, tree arg1, tree arg2)
1116 /* Sanity check for the recursive cases. */
1117 if (!arg1 || !arg2)
1118 return NULL_TREE;
1120 STRIP_NOPS (arg1);
1121 STRIP_NOPS (arg2);
1123 if (TREE_CODE (arg1) == INTEGER_CST)
1124 return int_const_binop (code, arg1, arg2);
1126 if (TREE_CODE (arg1) == REAL_CST)
1128 enum machine_mode mode;
1129 REAL_VALUE_TYPE d1;
1130 REAL_VALUE_TYPE d2;
1131 REAL_VALUE_TYPE value;
1132 REAL_VALUE_TYPE result;
1133 bool inexact;
1134 tree t, type;
1136 /* The following codes are handled by real_arithmetic. */
1137 switch (code)
1139 case PLUS_EXPR:
1140 case MINUS_EXPR:
1141 case MULT_EXPR:
1142 case RDIV_EXPR:
1143 case MIN_EXPR:
1144 case MAX_EXPR:
1145 break;
1147 default:
1148 return NULL_TREE;
1151 d1 = TREE_REAL_CST (arg1);
1152 d2 = TREE_REAL_CST (arg2);
1154 type = TREE_TYPE (arg1);
1155 mode = TYPE_MODE (type);
1157 /* Don't perform operation if we honor signaling NaNs and
1158 either operand is a NaN. */
1159 if (HONOR_SNANS (mode)
1160 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1161 return NULL_TREE;
1163 /* Don't perform operation if it would raise a division
1164 by zero exception. */
1165 if (code == RDIV_EXPR
1166 && REAL_VALUES_EQUAL (d2, dconst0)
1167 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1168 return NULL_TREE;
1170 /* If either operand is a NaN, just return it. Otherwise, set up
1171 for floating-point trap; we return an overflow. */
1172 if (REAL_VALUE_ISNAN (d1))
1173 return arg1;
1174 else if (REAL_VALUE_ISNAN (d2))
1175 return arg2;
1177 inexact = real_arithmetic (&value, code, &d1, &d2);
1178 real_convert (&result, mode, &value);
1180 /* Don't constant fold this floating point operation if
1181 the result has overflowed and flag_trapping_math. */
1182 if (flag_trapping_math
1183 && MODE_HAS_INFINITIES (mode)
1184 && REAL_VALUE_ISINF (result)
1185 && !REAL_VALUE_ISINF (d1)
1186 && !REAL_VALUE_ISINF (d2))
1187 return NULL_TREE;
1189 /* Don't constant fold this floating point operation if the
1190 result may dependent upon the run-time rounding mode and
1191 flag_rounding_math is set, or if GCC's software emulation
1192 is unable to accurately represent the result. */
1193 if ((flag_rounding_math
1194 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1195 && (inexact || !real_identical (&result, &value)))
1196 return NULL_TREE;
1198 t = build_real (type, result);
1200 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1201 return t;
1204 if (TREE_CODE (arg1) == FIXED_CST)
1206 FIXED_VALUE_TYPE f1;
1207 FIXED_VALUE_TYPE f2;
1208 FIXED_VALUE_TYPE result;
1209 tree t, type;
1210 int sat_p;
1211 bool overflow_p;
1213 /* The following codes are handled by fixed_arithmetic. */
1214 switch (code)
1216 case PLUS_EXPR:
1217 case MINUS_EXPR:
1218 case MULT_EXPR:
1219 case TRUNC_DIV_EXPR:
1220 f2 = TREE_FIXED_CST (arg2);
1221 break;
1223 case LSHIFT_EXPR:
1224 case RSHIFT_EXPR:
1226 wide_int w2 = arg2;
1227 f2.data.high = w2.elt (1);
1228 f2.data.low = w2.elt (0);
1229 f2.mode = SImode;
1231 break;
1233 default:
1234 return NULL_TREE;
1237 f1 = TREE_FIXED_CST (arg1);
1238 type = TREE_TYPE (arg1);
1239 sat_p = TYPE_SATURATING (type);
1240 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1241 t = build_fixed (type, result);
1242 /* Propagate overflow flags. */
1243 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1244 TREE_OVERFLOW (t) = 1;
1245 return t;
1248 if (TREE_CODE (arg1) == COMPLEX_CST)
1250 tree type = TREE_TYPE (arg1);
1251 tree r1 = TREE_REALPART (arg1);
1252 tree i1 = TREE_IMAGPART (arg1);
1253 tree r2 = TREE_REALPART (arg2);
1254 tree i2 = TREE_IMAGPART (arg2);
1255 tree real, imag;
1257 switch (code)
1259 case PLUS_EXPR:
1260 case MINUS_EXPR:
1261 real = const_binop (code, r1, r2);
1262 imag = const_binop (code, i1, i2);
1263 break;
1265 case MULT_EXPR:
1266 if (COMPLEX_FLOAT_TYPE_P (type))
1267 return do_mpc_arg2 (arg1, arg2, type,
1268 /* do_nonfinite= */ folding_initializer,
1269 mpc_mul);
1271 real = const_binop (MINUS_EXPR,
1272 const_binop (MULT_EXPR, r1, r2),
1273 const_binop (MULT_EXPR, i1, i2));
1274 imag = const_binop (PLUS_EXPR,
1275 const_binop (MULT_EXPR, r1, i2),
1276 const_binop (MULT_EXPR, i1, r2));
1277 break;
1279 case RDIV_EXPR:
1280 if (COMPLEX_FLOAT_TYPE_P (type))
1281 return do_mpc_arg2 (arg1, arg2, type,
1282 /* do_nonfinite= */ folding_initializer,
1283 mpc_div);
1284 /* Fallthru ... */
1285 case TRUNC_DIV_EXPR:
1286 case CEIL_DIV_EXPR:
1287 case FLOOR_DIV_EXPR:
1288 case ROUND_DIV_EXPR:
1289 if (flag_complex_method == 0)
1291 /* Keep this algorithm in sync with
1292 tree-complex.c:expand_complex_div_straight().
1294 Expand complex division to scalars, straightforward algorithm.
1295 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1296 t = br*br + bi*bi
1298 tree magsquared
1299 = const_binop (PLUS_EXPR,
1300 const_binop (MULT_EXPR, r2, r2),
1301 const_binop (MULT_EXPR, i2, i2));
1302 tree t1
1303 = const_binop (PLUS_EXPR,
1304 const_binop (MULT_EXPR, r1, r2),
1305 const_binop (MULT_EXPR, i1, i2));
1306 tree t2
1307 = const_binop (MINUS_EXPR,
1308 const_binop (MULT_EXPR, i1, r2),
1309 const_binop (MULT_EXPR, r1, i2));
1311 real = const_binop (code, t1, magsquared);
1312 imag = const_binop (code, t2, magsquared);
1314 else
1316 /* Keep this algorithm in sync with
1317 tree-complex.c:expand_complex_div_wide().
1319 Expand complex division to scalars, modified algorithm to minimize
1320 overflow with wide input ranges. */
1321 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1322 fold_abs_const (r2, TREE_TYPE (type)),
1323 fold_abs_const (i2, TREE_TYPE (type)));
1325 if (integer_nonzerop (compare))
1327 /* In the TRUE branch, we compute
1328 ratio = br/bi;
1329 div = (br * ratio) + bi;
1330 tr = (ar * ratio) + ai;
1331 ti = (ai * ratio) - ar;
1332 tr = tr / div;
1333 ti = ti / div; */
1334 tree ratio = const_binop (code, r2, i2);
1335 tree div = const_binop (PLUS_EXPR, i2,
1336 const_binop (MULT_EXPR, r2, ratio));
1337 real = const_binop (MULT_EXPR, r1, ratio);
1338 real = const_binop (PLUS_EXPR, real, i1);
1339 real = const_binop (code, real, div);
1341 imag = const_binop (MULT_EXPR, i1, ratio);
1342 imag = const_binop (MINUS_EXPR, imag, r1);
1343 imag = const_binop (code, imag, div);
1345 else
1347 /* In the FALSE branch, we compute
1348 ratio = d/c;
1349 divisor = (d * ratio) + c;
1350 tr = (b * ratio) + a;
1351 ti = b - (a * ratio);
1352 tr = tr / div;
1353 ti = ti / div; */
1354 tree ratio = const_binop (code, i2, r2);
1355 tree div = const_binop (PLUS_EXPR, r2,
1356 const_binop (MULT_EXPR, i2, ratio));
1358 real = const_binop (MULT_EXPR, i1, ratio);
1359 real = const_binop (PLUS_EXPR, real, r1);
1360 real = const_binop (code, real, div);
1362 imag = const_binop (MULT_EXPR, r1, ratio);
1363 imag = const_binop (MINUS_EXPR, i1, imag);
1364 imag = const_binop (code, imag, div);
1367 break;
1369 default:
1370 return NULL_TREE;
1373 if (real && imag)
1374 return build_complex (type, real, imag);
1377 if (TREE_CODE (arg1) == VECTOR_CST
1378 && TREE_CODE (arg2) == VECTOR_CST)
1380 tree type = TREE_TYPE (arg1);
1381 int count = TYPE_VECTOR_SUBPARTS (type), i;
1382 tree *elts = XALLOCAVEC (tree, count);
1384 for (i = 0; i < count; i++)
1386 tree elem1 = VECTOR_CST_ELT (arg1, i);
1387 tree elem2 = VECTOR_CST_ELT (arg2, i);
1389 elts[i] = const_binop (code, elem1, elem2);
1391 /* It is possible that const_binop cannot handle the given
1392 code and return NULL_TREE */
1393 if (elts[i] == NULL_TREE)
1394 return NULL_TREE;
1397 return build_vector (type, elts);
1400 /* Shifts allow a scalar offset for a vector. */
1401 if (TREE_CODE (arg1) == VECTOR_CST
1402 && TREE_CODE (arg2) == INTEGER_CST)
1404 tree type = TREE_TYPE (arg1);
1405 int count = TYPE_VECTOR_SUBPARTS (type), i;
1406 tree *elts = XALLOCAVEC (tree, count);
1408 if (code == VEC_LSHIFT_EXPR
1409 || code == VEC_RSHIFT_EXPR)
1411 if (!tree_fits_uhwi_p (arg2))
1412 return NULL_TREE;
1414 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1415 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1416 unsigned HOST_WIDE_INT innerc
1417 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1418 if (shiftc >= outerc || (shiftc % innerc) != 0)
1419 return NULL_TREE;
1420 int offset = shiftc / innerc;
1421 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1422 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1423 for !BYTES_BIG_ENDIAN picks first vector element, but
1424 for BYTES_BIG_ENDIAN last element from the vector. */
1425 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1426 offset = -offset;
1427 tree zero = build_zero_cst (TREE_TYPE (type));
1428 for (i = 0; i < count; i++)
1430 if (i + offset < 0 || i + offset >= count)
1431 elts[i] = zero;
1432 else
1433 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1436 else
1437 for (i = 0; i < count; i++)
1439 tree elem1 = VECTOR_CST_ELT (arg1, i);
1441 elts[i] = const_binop (code, elem1, arg2);
1443 /* It is possible that const_binop cannot handle the given
1444 code and return NULL_TREE */
1445 if (elts[i] == NULL_TREE)
1446 return NULL_TREE;
1449 return build_vector (type, elts);
1451 return NULL_TREE;
1454 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1455 indicates which particular sizetype to create. */
1457 tree
1458 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1460 return build_int_cst (sizetype_tab[(int) kind], number);
1463 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1464 is a tree code. The type of the result is taken from the operands.
1465 Both must be equivalent integer types, ala int_binop_types_match_p.
1466 If the operands are constant, so is the result. */
1468 tree
1469 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1471 tree type = TREE_TYPE (arg0);
1473 if (arg0 == error_mark_node || arg1 == error_mark_node)
1474 return error_mark_node;
1476 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1477 TREE_TYPE (arg1)));
1479 /* Handle the special case of two integer constants faster. */
1480 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1482 /* And some specific cases even faster than that. */
1483 if (code == PLUS_EXPR)
1485 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1486 return arg1;
1487 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1488 return arg0;
1490 else if (code == MINUS_EXPR)
1492 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1493 return arg0;
1495 else if (code == MULT_EXPR)
1497 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1498 return arg1;
1501 /* Handle general case of two integer constants. For sizetype
1502 constant calculations we always want to know about overflow,
1503 even in the unsigned case. */
1504 return int_const_binop_1 (code, arg0, arg1, -1);
1507 return fold_build2_loc (loc, code, type, arg0, arg1);
1510 /* Given two values, either both of sizetype or both of bitsizetype,
1511 compute the difference between the two values. Return the value
1512 in signed type corresponding to the type of the operands. */
1514 tree
1515 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1517 tree type = TREE_TYPE (arg0);
1518 tree ctype;
1520 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1521 TREE_TYPE (arg1)));
1523 /* If the type is already signed, just do the simple thing. */
1524 if (!TYPE_UNSIGNED (type))
1525 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1527 if (type == sizetype)
1528 ctype = ssizetype;
1529 else if (type == bitsizetype)
1530 ctype = sbitsizetype;
1531 else
1532 ctype = signed_type_for (type);
1534 /* If either operand is not a constant, do the conversions to the signed
1535 type and subtract. The hardware will do the right thing with any
1536 overflow in the subtraction. */
1537 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1538 return size_binop_loc (loc, MINUS_EXPR,
1539 fold_convert_loc (loc, ctype, arg0),
1540 fold_convert_loc (loc, ctype, arg1));
1542 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1543 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1544 overflow) and negate (which can't either). Special-case a result
1545 of zero while we're here. */
1546 if (tree_int_cst_equal (arg0, arg1))
1547 return build_int_cst (ctype, 0);
1548 else if (tree_int_cst_lt (arg1, arg0))
1549 return fold_convert_loc (loc, ctype,
1550 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1551 else
1552 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1553 fold_convert_loc (loc, ctype,
1554 size_binop_loc (loc,
1555 MINUS_EXPR,
1556 arg1, arg0)));
1559 /* A subroutine of fold_convert_const handling conversions of an
1560 INTEGER_CST to another integer type. */
1562 static tree
1563 fold_convert_const_int_from_int (tree type, const_tree arg1)
1565 /* Given an integer constant, make new constant with new type,
1566 appropriately sign-extended or truncated. Use widest_int
1567 so that any extension is done according ARG1's type. */
1568 return force_fit_type (type, wi::to_widest (arg1),
1569 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1570 TREE_OVERFLOW (arg1));
1573 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1574 to an integer type. */
1576 static tree
1577 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1579 bool overflow = false;
1580 tree t;
1582 /* The following code implements the floating point to integer
1583 conversion rules required by the Java Language Specification,
1584 that IEEE NaNs are mapped to zero and values that overflow
1585 the target precision saturate, i.e. values greater than
1586 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1587 are mapped to INT_MIN. These semantics are allowed by the
1588 C and C++ standards that simply state that the behavior of
1589 FP-to-integer conversion is unspecified upon overflow. */
1591 wide_int val;
1592 REAL_VALUE_TYPE r;
1593 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1595 switch (code)
1597 case FIX_TRUNC_EXPR:
1598 real_trunc (&r, VOIDmode, &x);
1599 break;
1601 default:
1602 gcc_unreachable ();
1605 /* If R is NaN, return zero and show we have an overflow. */
1606 if (REAL_VALUE_ISNAN (r))
1608 overflow = true;
1609 val = wi::zero (TYPE_PRECISION (type));
1612 /* See if R is less than the lower bound or greater than the
1613 upper bound. */
1615 if (! overflow)
1617 tree lt = TYPE_MIN_VALUE (type);
1618 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1619 if (REAL_VALUES_LESS (r, l))
1621 overflow = true;
1622 val = lt;
1626 if (! overflow)
1628 tree ut = TYPE_MAX_VALUE (type);
1629 if (ut)
1631 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1632 if (REAL_VALUES_LESS (u, r))
1634 overflow = true;
1635 val = ut;
1640 if (! overflow)
1641 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1643 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1644 return t;
1647 /* A subroutine of fold_convert_const handling conversions of a
1648 FIXED_CST to an integer type. */
1650 static tree
1651 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1653 tree t;
1654 double_int temp, temp_trunc;
1655 unsigned int mode;
1657 /* Right shift FIXED_CST to temp by fbit. */
1658 temp = TREE_FIXED_CST (arg1).data;
1659 mode = TREE_FIXED_CST (arg1).mode;
1660 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1662 temp = temp.rshift (GET_MODE_FBIT (mode),
1663 HOST_BITS_PER_DOUBLE_INT,
1664 SIGNED_FIXED_POINT_MODE_P (mode));
1666 /* Left shift temp to temp_trunc by fbit. */
1667 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1668 HOST_BITS_PER_DOUBLE_INT,
1669 SIGNED_FIXED_POINT_MODE_P (mode));
1671 else
1673 temp = double_int_zero;
1674 temp_trunc = double_int_zero;
1677 /* If FIXED_CST is negative, we need to round the value toward 0.
1678 By checking if the fractional bits are not zero to add 1 to temp. */
1679 if (SIGNED_FIXED_POINT_MODE_P (mode)
1680 && temp_trunc.is_negative ()
1681 && TREE_FIXED_CST (arg1).data != temp_trunc)
1682 temp += double_int_one;
1684 /* Given a fixed-point constant, make new constant with new type,
1685 appropriately sign-extended or truncated. */
1686 t = force_fit_type (type, temp, -1,
1687 (temp.is_negative ()
1688 && (TYPE_UNSIGNED (type)
1689 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1690 | TREE_OVERFLOW (arg1));
1692 return t;
1695 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1696 to another floating point type. */
1698 static tree
1699 fold_convert_const_real_from_real (tree type, const_tree arg1)
1701 REAL_VALUE_TYPE value;
1702 tree t;
1704 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1705 t = build_real (type, value);
1707 /* If converting an infinity or NAN to a representation that doesn't
1708 have one, set the overflow bit so that we can produce some kind of
1709 error message at the appropriate point if necessary. It's not the
1710 most user-friendly message, but it's better than nothing. */
1711 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1712 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1713 TREE_OVERFLOW (t) = 1;
1714 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1715 && !MODE_HAS_NANS (TYPE_MODE (type)))
1716 TREE_OVERFLOW (t) = 1;
1717 /* Regular overflow, conversion produced an infinity in a mode that
1718 can't represent them. */
1719 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1720 && REAL_VALUE_ISINF (value)
1721 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1722 TREE_OVERFLOW (t) = 1;
1723 else
1724 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1725 return t;
1728 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1729 to a floating point type. */
1731 static tree
1732 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1734 REAL_VALUE_TYPE value;
1735 tree t;
1737 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1738 t = build_real (type, value);
1740 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1741 return t;
1744 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1745 to another fixed-point type. */
1747 static tree
1748 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1750 FIXED_VALUE_TYPE value;
1751 tree t;
1752 bool overflow_p;
1754 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1755 TYPE_SATURATING (type));
1756 t = build_fixed (type, value);
1758 /* Propagate overflow flags. */
1759 if (overflow_p | TREE_OVERFLOW (arg1))
1760 TREE_OVERFLOW (t) = 1;
1761 return t;
1764 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1765 to a fixed-point type. */
1767 static tree
1768 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1770 FIXED_VALUE_TYPE value;
1771 tree t;
1772 bool overflow_p;
1773 double_int di;
1775 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
1777 di.low = TREE_INT_CST_ELT (arg1, 0);
1778 if (TREE_INT_CST_NUNITS (arg1) == 1)
1779 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
1780 else
1781 di.high = TREE_INT_CST_ELT (arg1, 1);
1783 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
1784 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1785 TYPE_SATURATING (type));
1786 t = build_fixed (type, value);
1788 /* Propagate overflow flags. */
1789 if (overflow_p | TREE_OVERFLOW (arg1))
1790 TREE_OVERFLOW (t) = 1;
1791 return t;
1794 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1795 to a fixed-point type. */
1797 static tree
1798 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1800 FIXED_VALUE_TYPE value;
1801 tree t;
1802 bool overflow_p;
1804 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1805 &TREE_REAL_CST (arg1),
1806 TYPE_SATURATING (type));
1807 t = build_fixed (type, value);
1809 /* Propagate overflow flags. */
1810 if (overflow_p | TREE_OVERFLOW (arg1))
1811 TREE_OVERFLOW (t) = 1;
1812 return t;
1815 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1816 type TYPE. If no simplification can be done return NULL_TREE. */
1818 static tree
1819 fold_convert_const (enum tree_code code, tree type, tree arg1)
1821 if (TREE_TYPE (arg1) == type)
1822 return arg1;
1824 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1825 || TREE_CODE (type) == OFFSET_TYPE)
1827 if (TREE_CODE (arg1) == INTEGER_CST)
1828 return fold_convert_const_int_from_int (type, arg1);
1829 else if (TREE_CODE (arg1) == REAL_CST)
1830 return fold_convert_const_int_from_real (code, type, arg1);
1831 else if (TREE_CODE (arg1) == FIXED_CST)
1832 return fold_convert_const_int_from_fixed (type, arg1);
1834 else if (TREE_CODE (type) == REAL_TYPE)
1836 if (TREE_CODE (arg1) == INTEGER_CST)
1837 return build_real_from_int_cst (type, arg1);
1838 else if (TREE_CODE (arg1) == REAL_CST)
1839 return fold_convert_const_real_from_real (type, arg1);
1840 else if (TREE_CODE (arg1) == FIXED_CST)
1841 return fold_convert_const_real_from_fixed (type, arg1);
1843 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1845 if (TREE_CODE (arg1) == FIXED_CST)
1846 return fold_convert_const_fixed_from_fixed (type, arg1);
1847 else if (TREE_CODE (arg1) == INTEGER_CST)
1848 return fold_convert_const_fixed_from_int (type, arg1);
1849 else if (TREE_CODE (arg1) == REAL_CST)
1850 return fold_convert_const_fixed_from_real (type, arg1);
1852 return NULL_TREE;
1855 /* Construct a vector of zero elements of vector type TYPE. */
1857 static tree
1858 build_zero_vector (tree type)
1860 tree t;
1862 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1863 return build_vector_from_val (type, t);
1866 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1868 bool
1869 fold_convertible_p (const_tree type, const_tree arg)
1871 tree orig = TREE_TYPE (arg);
1873 if (type == orig)
1874 return true;
1876 if (TREE_CODE (arg) == ERROR_MARK
1877 || TREE_CODE (type) == ERROR_MARK
1878 || TREE_CODE (orig) == ERROR_MARK)
1879 return false;
1881 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1882 return true;
1884 switch (TREE_CODE (type))
1886 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1887 case POINTER_TYPE: case REFERENCE_TYPE:
1888 case OFFSET_TYPE:
1889 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1890 || TREE_CODE (orig) == OFFSET_TYPE)
1891 return true;
1892 return (TREE_CODE (orig) == VECTOR_TYPE
1893 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1895 case REAL_TYPE:
1896 case FIXED_POINT_TYPE:
1897 case COMPLEX_TYPE:
1898 case VECTOR_TYPE:
1899 case VOID_TYPE:
1900 return TREE_CODE (type) == TREE_CODE (orig);
1902 default:
1903 return false;
1907 /* Convert expression ARG to type TYPE. Used by the middle-end for
1908 simple conversions in preference to calling the front-end's convert. */
1910 tree
1911 fold_convert_loc (location_t loc, tree type, tree arg)
1913 tree orig = TREE_TYPE (arg);
1914 tree tem;
1916 if (type == orig)
1917 return arg;
1919 if (TREE_CODE (arg) == ERROR_MARK
1920 || TREE_CODE (type) == ERROR_MARK
1921 || TREE_CODE (orig) == ERROR_MARK)
1922 return error_mark_node;
1924 switch (TREE_CODE (type))
1926 case POINTER_TYPE:
1927 case REFERENCE_TYPE:
1928 /* Handle conversions between pointers to different address spaces. */
1929 if (POINTER_TYPE_P (orig)
1930 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1931 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1932 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1933 /* fall through */
1935 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1936 case OFFSET_TYPE:
1937 if (TREE_CODE (arg) == INTEGER_CST)
1939 tem = fold_convert_const (NOP_EXPR, type, arg);
1940 if (tem != NULL_TREE)
1941 return tem;
1943 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1944 || TREE_CODE (orig) == OFFSET_TYPE)
1945 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1946 if (TREE_CODE (orig) == COMPLEX_TYPE)
1947 return fold_convert_loc (loc, type,
1948 fold_build1_loc (loc, REALPART_EXPR,
1949 TREE_TYPE (orig), arg));
1950 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1951 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1952 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1954 case REAL_TYPE:
1955 if (TREE_CODE (arg) == INTEGER_CST)
1957 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1958 if (tem != NULL_TREE)
1959 return tem;
1961 else if (TREE_CODE (arg) == REAL_CST)
1963 tem = fold_convert_const (NOP_EXPR, type, arg);
1964 if (tem != NULL_TREE)
1965 return tem;
1967 else if (TREE_CODE (arg) == FIXED_CST)
1969 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1970 if (tem != NULL_TREE)
1971 return tem;
1974 switch (TREE_CODE (orig))
1976 case INTEGER_TYPE:
1977 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1978 case POINTER_TYPE: case REFERENCE_TYPE:
1979 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1981 case REAL_TYPE:
1982 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1984 case FIXED_POINT_TYPE:
1985 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1987 case COMPLEX_TYPE:
1988 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1989 return fold_convert_loc (loc, type, tem);
1991 default:
1992 gcc_unreachable ();
1995 case FIXED_POINT_TYPE:
1996 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1997 || TREE_CODE (arg) == REAL_CST)
1999 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2000 if (tem != NULL_TREE)
2001 goto fold_convert_exit;
2004 switch (TREE_CODE (orig))
2006 case FIXED_POINT_TYPE:
2007 case INTEGER_TYPE:
2008 case ENUMERAL_TYPE:
2009 case BOOLEAN_TYPE:
2010 case REAL_TYPE:
2011 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2013 case COMPLEX_TYPE:
2014 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2015 return fold_convert_loc (loc, type, tem);
2017 default:
2018 gcc_unreachable ();
2021 case COMPLEX_TYPE:
2022 switch (TREE_CODE (orig))
2024 case INTEGER_TYPE:
2025 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2026 case POINTER_TYPE: case REFERENCE_TYPE:
2027 case REAL_TYPE:
2028 case FIXED_POINT_TYPE:
2029 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2030 fold_convert_loc (loc, TREE_TYPE (type), arg),
2031 fold_convert_loc (loc, TREE_TYPE (type),
2032 integer_zero_node));
2033 case COMPLEX_TYPE:
2035 tree rpart, ipart;
2037 if (TREE_CODE (arg) == COMPLEX_EXPR)
2039 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2040 TREE_OPERAND (arg, 0));
2041 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2042 TREE_OPERAND (arg, 1));
2043 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2046 arg = save_expr (arg);
2047 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2048 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2049 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2050 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2051 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2054 default:
2055 gcc_unreachable ();
2058 case VECTOR_TYPE:
2059 if (integer_zerop (arg))
2060 return build_zero_vector (type);
2061 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2062 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2063 || TREE_CODE (orig) == VECTOR_TYPE);
2064 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2066 case VOID_TYPE:
2067 tem = fold_ignored_result (arg);
2068 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2070 default:
2071 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2072 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2073 gcc_unreachable ();
2075 fold_convert_exit:
2076 protected_set_expr_location_unshare (tem, loc);
2077 return tem;
2080 /* Return false if expr can be assumed not to be an lvalue, true
2081 otherwise. */
2083 static bool
2084 maybe_lvalue_p (const_tree x)
2086 /* We only need to wrap lvalue tree codes. */
2087 switch (TREE_CODE (x))
2089 case VAR_DECL:
2090 case PARM_DECL:
2091 case RESULT_DECL:
2092 case LABEL_DECL:
2093 case FUNCTION_DECL:
2094 case SSA_NAME:
2096 case COMPONENT_REF:
2097 case MEM_REF:
2098 case INDIRECT_REF:
2099 case ARRAY_REF:
2100 case ARRAY_RANGE_REF:
2101 case BIT_FIELD_REF:
2102 case OBJ_TYPE_REF:
2104 case REALPART_EXPR:
2105 case IMAGPART_EXPR:
2106 case PREINCREMENT_EXPR:
2107 case PREDECREMENT_EXPR:
2108 case SAVE_EXPR:
2109 case TRY_CATCH_EXPR:
2110 case WITH_CLEANUP_EXPR:
2111 case COMPOUND_EXPR:
2112 case MODIFY_EXPR:
2113 case TARGET_EXPR:
2114 case COND_EXPR:
2115 case BIND_EXPR:
2116 break;
2118 default:
2119 /* Assume the worst for front-end tree codes. */
2120 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2121 break;
2122 return false;
2125 return true;
2128 /* Return an expr equal to X but certainly not valid as an lvalue. */
2130 tree
2131 non_lvalue_loc (location_t loc, tree x)
2133 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2134 us. */
2135 if (in_gimple_form)
2136 return x;
2138 if (! maybe_lvalue_p (x))
2139 return x;
2140 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2143 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2144 Zero means allow extended lvalues. */
2146 int pedantic_lvalues;
2148 /* When pedantic, return an expr equal to X but certainly not valid as a
2149 pedantic lvalue. Otherwise, return X. */
2151 static tree
2152 pedantic_non_lvalue_loc (location_t loc, tree x)
2154 if (pedantic_lvalues)
2155 return non_lvalue_loc (loc, x);
2157 return protected_set_expr_location_unshare (x, loc);
2160 /* Given a tree comparison code, return the code that is the logical inverse.
2161 It is generally not safe to do this for floating-point comparisons, except
2162 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2163 ERROR_MARK in this case. */
2165 enum tree_code
2166 invert_tree_comparison (enum tree_code code, bool honor_nans)
2168 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2169 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2170 return ERROR_MARK;
2172 switch (code)
2174 case EQ_EXPR:
2175 return NE_EXPR;
2176 case NE_EXPR:
2177 return EQ_EXPR;
2178 case GT_EXPR:
2179 return honor_nans ? UNLE_EXPR : LE_EXPR;
2180 case GE_EXPR:
2181 return honor_nans ? UNLT_EXPR : LT_EXPR;
2182 case LT_EXPR:
2183 return honor_nans ? UNGE_EXPR : GE_EXPR;
2184 case LE_EXPR:
2185 return honor_nans ? UNGT_EXPR : GT_EXPR;
2186 case LTGT_EXPR:
2187 return UNEQ_EXPR;
2188 case UNEQ_EXPR:
2189 return LTGT_EXPR;
2190 case UNGT_EXPR:
2191 return LE_EXPR;
2192 case UNGE_EXPR:
2193 return LT_EXPR;
2194 case UNLT_EXPR:
2195 return GE_EXPR;
2196 case UNLE_EXPR:
2197 return GT_EXPR;
2198 case ORDERED_EXPR:
2199 return UNORDERED_EXPR;
2200 case UNORDERED_EXPR:
2201 return ORDERED_EXPR;
2202 default:
2203 gcc_unreachable ();
2207 /* Similar, but return the comparison that results if the operands are
2208 swapped. This is safe for floating-point. */
2210 enum tree_code
2211 swap_tree_comparison (enum tree_code code)
2213 switch (code)
2215 case EQ_EXPR:
2216 case NE_EXPR:
2217 case ORDERED_EXPR:
2218 case UNORDERED_EXPR:
2219 case LTGT_EXPR:
2220 case UNEQ_EXPR:
2221 return code;
2222 case GT_EXPR:
2223 return LT_EXPR;
2224 case GE_EXPR:
2225 return LE_EXPR;
2226 case LT_EXPR:
2227 return GT_EXPR;
2228 case LE_EXPR:
2229 return GE_EXPR;
2230 case UNGT_EXPR:
2231 return UNLT_EXPR;
2232 case UNGE_EXPR:
2233 return UNLE_EXPR;
2234 case UNLT_EXPR:
2235 return UNGT_EXPR;
2236 case UNLE_EXPR:
2237 return UNGE_EXPR;
2238 default:
2239 gcc_unreachable ();
2244 /* Convert a comparison tree code from an enum tree_code representation
2245 into a compcode bit-based encoding. This function is the inverse of
2246 compcode_to_comparison. */
2248 static enum comparison_code
2249 comparison_to_compcode (enum tree_code code)
2251 switch (code)
2253 case LT_EXPR:
2254 return COMPCODE_LT;
2255 case EQ_EXPR:
2256 return COMPCODE_EQ;
2257 case LE_EXPR:
2258 return COMPCODE_LE;
2259 case GT_EXPR:
2260 return COMPCODE_GT;
2261 case NE_EXPR:
2262 return COMPCODE_NE;
2263 case GE_EXPR:
2264 return COMPCODE_GE;
2265 case ORDERED_EXPR:
2266 return COMPCODE_ORD;
2267 case UNORDERED_EXPR:
2268 return COMPCODE_UNORD;
2269 case UNLT_EXPR:
2270 return COMPCODE_UNLT;
2271 case UNEQ_EXPR:
2272 return COMPCODE_UNEQ;
2273 case UNLE_EXPR:
2274 return COMPCODE_UNLE;
2275 case UNGT_EXPR:
2276 return COMPCODE_UNGT;
2277 case LTGT_EXPR:
2278 return COMPCODE_LTGT;
2279 case UNGE_EXPR:
2280 return COMPCODE_UNGE;
2281 default:
2282 gcc_unreachable ();
2286 /* Convert a compcode bit-based encoding of a comparison operator back
2287 to GCC's enum tree_code representation. This function is the
2288 inverse of comparison_to_compcode. */
2290 static enum tree_code
2291 compcode_to_comparison (enum comparison_code code)
2293 switch (code)
2295 case COMPCODE_LT:
2296 return LT_EXPR;
2297 case COMPCODE_EQ:
2298 return EQ_EXPR;
2299 case COMPCODE_LE:
2300 return LE_EXPR;
2301 case COMPCODE_GT:
2302 return GT_EXPR;
2303 case COMPCODE_NE:
2304 return NE_EXPR;
2305 case COMPCODE_GE:
2306 return GE_EXPR;
2307 case COMPCODE_ORD:
2308 return ORDERED_EXPR;
2309 case COMPCODE_UNORD:
2310 return UNORDERED_EXPR;
2311 case COMPCODE_UNLT:
2312 return UNLT_EXPR;
2313 case COMPCODE_UNEQ:
2314 return UNEQ_EXPR;
2315 case COMPCODE_UNLE:
2316 return UNLE_EXPR;
2317 case COMPCODE_UNGT:
2318 return UNGT_EXPR;
2319 case COMPCODE_LTGT:
2320 return LTGT_EXPR;
2321 case COMPCODE_UNGE:
2322 return UNGE_EXPR;
2323 default:
2324 gcc_unreachable ();
2328 /* Return a tree for the comparison which is the combination of
2329 doing the AND or OR (depending on CODE) of the two operations LCODE
2330 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2331 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2332 if this makes the transformation invalid. */
2334 tree
2335 combine_comparisons (location_t loc,
2336 enum tree_code code, enum tree_code lcode,
2337 enum tree_code rcode, tree truth_type,
2338 tree ll_arg, tree lr_arg)
2340 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2341 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2342 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2343 int compcode;
2345 switch (code)
2347 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2348 compcode = lcompcode & rcompcode;
2349 break;
2351 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2352 compcode = lcompcode | rcompcode;
2353 break;
2355 default:
2356 return NULL_TREE;
2359 if (!honor_nans)
2361 /* Eliminate unordered comparisons, as well as LTGT and ORD
2362 which are not used unless the mode has NaNs. */
2363 compcode &= ~COMPCODE_UNORD;
2364 if (compcode == COMPCODE_LTGT)
2365 compcode = COMPCODE_NE;
2366 else if (compcode == COMPCODE_ORD)
2367 compcode = COMPCODE_TRUE;
2369 else if (flag_trapping_math)
2371 /* Check that the original operation and the optimized ones will trap
2372 under the same condition. */
2373 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2374 && (lcompcode != COMPCODE_EQ)
2375 && (lcompcode != COMPCODE_ORD);
2376 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2377 && (rcompcode != COMPCODE_EQ)
2378 && (rcompcode != COMPCODE_ORD);
2379 bool trap = (compcode & COMPCODE_UNORD) == 0
2380 && (compcode != COMPCODE_EQ)
2381 && (compcode != COMPCODE_ORD);
2383 /* In a short-circuited boolean expression the LHS might be
2384 such that the RHS, if evaluated, will never trap. For
2385 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2386 if neither x nor y is NaN. (This is a mixed blessing: for
2387 example, the expression above will never trap, hence
2388 optimizing it to x < y would be invalid). */
2389 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2390 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2391 rtrap = false;
2393 /* If the comparison was short-circuited, and only the RHS
2394 trapped, we may now generate a spurious trap. */
2395 if (rtrap && !ltrap
2396 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2397 return NULL_TREE;
2399 /* If we changed the conditions that cause a trap, we lose. */
2400 if ((ltrap || rtrap) != trap)
2401 return NULL_TREE;
2404 if (compcode == COMPCODE_TRUE)
2405 return constant_boolean_node (true, truth_type);
2406 else if (compcode == COMPCODE_FALSE)
2407 return constant_boolean_node (false, truth_type);
2408 else
2410 enum tree_code tcode;
2412 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2413 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2417 /* Return nonzero if two operands (typically of the same tree node)
2418 are necessarily equal. If either argument has side-effects this
2419 function returns zero. FLAGS modifies behavior as follows:
2421 If OEP_ONLY_CONST is set, only return nonzero for constants.
2422 This function tests whether the operands are indistinguishable;
2423 it does not test whether they are equal using C's == operation.
2424 The distinction is important for IEEE floating point, because
2425 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2426 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2428 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2429 even though it may hold multiple values during a function.
2430 This is because a GCC tree node guarantees that nothing else is
2431 executed between the evaluation of its "operands" (which may often
2432 be evaluated in arbitrary order). Hence if the operands themselves
2433 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2434 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2435 unset means assuming isochronic (or instantaneous) tree equivalence.
2436 Unless comparing arbitrary expression trees, such as from different
2437 statements, this flag can usually be left unset.
2439 If OEP_PURE_SAME is set, then pure functions with identical arguments
2440 are considered the same. It is used when the caller has other ways
2441 to ensure that global memory is unchanged in between. */
2444 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2446 /* If either is ERROR_MARK, they aren't equal. */
2447 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2448 || TREE_TYPE (arg0) == error_mark_node
2449 || TREE_TYPE (arg1) == error_mark_node)
2450 return 0;
2452 /* Similar, if either does not have a type (like a released SSA name),
2453 they aren't equal. */
2454 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2455 return 0;
2457 /* Check equality of integer constants before bailing out due to
2458 precision differences. */
2459 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2460 return tree_int_cst_equal (arg0, arg1);
2462 /* If both types don't have the same signedness, then we can't consider
2463 them equal. We must check this before the STRIP_NOPS calls
2464 because they may change the signedness of the arguments. As pointers
2465 strictly don't have a signedness, require either two pointers or
2466 two non-pointers as well. */
2467 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2468 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2469 return 0;
2471 /* We cannot consider pointers to different address space equal. */
2472 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2473 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2474 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2475 return 0;
2477 /* If both types don't have the same precision, then it is not safe
2478 to strip NOPs. */
2479 if (element_precision (TREE_TYPE (arg0))
2480 != element_precision (TREE_TYPE (arg1)))
2481 return 0;
2483 STRIP_NOPS (arg0);
2484 STRIP_NOPS (arg1);
2486 /* In case both args are comparisons but with different comparison
2487 code, try to swap the comparison operands of one arg to produce
2488 a match and compare that variant. */
2489 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2490 && COMPARISON_CLASS_P (arg0)
2491 && COMPARISON_CLASS_P (arg1))
2493 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2495 if (TREE_CODE (arg0) == swap_code)
2496 return operand_equal_p (TREE_OPERAND (arg0, 0),
2497 TREE_OPERAND (arg1, 1), flags)
2498 && operand_equal_p (TREE_OPERAND (arg0, 1),
2499 TREE_OPERAND (arg1, 0), flags);
2502 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2503 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2504 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2505 return 0;
2507 /* This is needed for conversions and for COMPONENT_REF.
2508 Might as well play it safe and always test this. */
2509 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2510 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2511 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2512 return 0;
2514 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2515 We don't care about side effects in that case because the SAVE_EXPR
2516 takes care of that for us. In all other cases, two expressions are
2517 equal if they have no side effects. If we have two identical
2518 expressions with side effects that should be treated the same due
2519 to the only side effects being identical SAVE_EXPR's, that will
2520 be detected in the recursive calls below.
2521 If we are taking an invariant address of two identical objects
2522 they are necessarily equal as well. */
2523 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2524 && (TREE_CODE (arg0) == SAVE_EXPR
2525 || (flags & OEP_CONSTANT_ADDRESS_OF)
2526 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2527 return 1;
2529 /* Next handle constant cases, those for which we can return 1 even
2530 if ONLY_CONST is set. */
2531 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2532 switch (TREE_CODE (arg0))
2534 case INTEGER_CST:
2535 return tree_int_cst_equal (arg0, arg1);
2537 case FIXED_CST:
2538 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2539 TREE_FIXED_CST (arg1));
2541 case REAL_CST:
2542 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2543 TREE_REAL_CST (arg1)))
2544 return 1;
2547 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2549 /* If we do not distinguish between signed and unsigned zero,
2550 consider them equal. */
2551 if (real_zerop (arg0) && real_zerop (arg1))
2552 return 1;
2554 return 0;
2556 case VECTOR_CST:
2558 unsigned i;
2560 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2561 return 0;
2563 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2565 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2566 VECTOR_CST_ELT (arg1, i), flags))
2567 return 0;
2569 return 1;
2572 case COMPLEX_CST:
2573 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2574 flags)
2575 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2576 flags));
2578 case STRING_CST:
2579 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2580 && ! memcmp (TREE_STRING_POINTER (arg0),
2581 TREE_STRING_POINTER (arg1),
2582 TREE_STRING_LENGTH (arg0)));
2584 case ADDR_EXPR:
2585 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2586 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2587 ? OEP_CONSTANT_ADDRESS_OF : 0);
2588 default:
2589 break;
2592 if (flags & OEP_ONLY_CONST)
2593 return 0;
2595 /* Define macros to test an operand from arg0 and arg1 for equality and a
2596 variant that allows null and views null as being different from any
2597 non-null value. In the latter case, if either is null, the both
2598 must be; otherwise, do the normal comparison. */
2599 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2600 TREE_OPERAND (arg1, N), flags)
2602 #define OP_SAME_WITH_NULL(N) \
2603 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2604 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2606 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2608 case tcc_unary:
2609 /* Two conversions are equal only if signedness and modes match. */
2610 switch (TREE_CODE (arg0))
2612 CASE_CONVERT:
2613 case FIX_TRUNC_EXPR:
2614 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2615 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2616 return 0;
2617 break;
2618 default:
2619 break;
2622 return OP_SAME (0);
2625 case tcc_comparison:
2626 case tcc_binary:
2627 if (OP_SAME (0) && OP_SAME (1))
2628 return 1;
2630 /* For commutative ops, allow the other order. */
2631 return (commutative_tree_code (TREE_CODE (arg0))
2632 && operand_equal_p (TREE_OPERAND (arg0, 0),
2633 TREE_OPERAND (arg1, 1), flags)
2634 && operand_equal_p (TREE_OPERAND (arg0, 1),
2635 TREE_OPERAND (arg1, 0), flags));
2637 case tcc_reference:
2638 /* If either of the pointer (or reference) expressions we are
2639 dereferencing contain a side effect, these cannot be equal,
2640 but their addresses can be. */
2641 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2642 && (TREE_SIDE_EFFECTS (arg0)
2643 || TREE_SIDE_EFFECTS (arg1)))
2644 return 0;
2646 switch (TREE_CODE (arg0))
2648 case INDIRECT_REF:
2649 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2650 return OP_SAME (0);
2652 case REALPART_EXPR:
2653 case IMAGPART_EXPR:
2654 return OP_SAME (0);
2656 case TARGET_MEM_REF:
2657 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2658 /* Require equal extra operands and then fall through to MEM_REF
2659 handling of the two common operands. */
2660 if (!OP_SAME_WITH_NULL (2)
2661 || !OP_SAME_WITH_NULL (3)
2662 || !OP_SAME_WITH_NULL (4))
2663 return 0;
2664 /* Fallthru. */
2665 case MEM_REF:
2666 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2667 /* Require equal access sizes, and similar pointer types.
2668 We can have incomplete types for array references of
2669 variable-sized arrays from the Fortran frontend
2670 though. Also verify the types are compatible. */
2671 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2672 || (TYPE_SIZE (TREE_TYPE (arg0))
2673 && TYPE_SIZE (TREE_TYPE (arg1))
2674 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2675 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2676 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2677 && alias_ptr_types_compatible_p
2678 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2679 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2680 && OP_SAME (0) && OP_SAME (1));
2682 case ARRAY_REF:
2683 case ARRAY_RANGE_REF:
2684 /* Operands 2 and 3 may be null.
2685 Compare the array index by value if it is constant first as we
2686 may have different types but same value here. */
2687 if (!OP_SAME (0))
2688 return 0;
2689 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2690 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2691 TREE_OPERAND (arg1, 1))
2692 || OP_SAME (1))
2693 && OP_SAME_WITH_NULL (2)
2694 && OP_SAME_WITH_NULL (3));
2696 case COMPONENT_REF:
2697 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2698 may be NULL when we're called to compare MEM_EXPRs. */
2699 if (!OP_SAME_WITH_NULL (0)
2700 || !OP_SAME (1))
2701 return 0;
2702 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2703 return OP_SAME_WITH_NULL (2);
2705 case BIT_FIELD_REF:
2706 if (!OP_SAME (0))
2707 return 0;
2708 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2709 return OP_SAME (1) && OP_SAME (2);
2711 default:
2712 return 0;
2715 case tcc_expression:
2716 switch (TREE_CODE (arg0))
2718 case ADDR_EXPR:
2719 case TRUTH_NOT_EXPR:
2720 return OP_SAME (0);
2722 case TRUTH_ANDIF_EXPR:
2723 case TRUTH_ORIF_EXPR:
2724 return OP_SAME (0) && OP_SAME (1);
2726 case FMA_EXPR:
2727 case WIDEN_MULT_PLUS_EXPR:
2728 case WIDEN_MULT_MINUS_EXPR:
2729 if (!OP_SAME (2))
2730 return 0;
2731 /* The multiplcation operands are commutative. */
2732 /* FALLTHRU */
2734 case TRUTH_AND_EXPR:
2735 case TRUTH_OR_EXPR:
2736 case TRUTH_XOR_EXPR:
2737 if (OP_SAME (0) && OP_SAME (1))
2738 return 1;
2740 /* Otherwise take into account this is a commutative operation. */
2741 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2742 TREE_OPERAND (arg1, 1), flags)
2743 && operand_equal_p (TREE_OPERAND (arg0, 1),
2744 TREE_OPERAND (arg1, 0), flags));
2746 case COND_EXPR:
2747 case VEC_COND_EXPR:
2748 case DOT_PROD_EXPR:
2749 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2751 default:
2752 return 0;
2755 case tcc_vl_exp:
2756 switch (TREE_CODE (arg0))
2758 case CALL_EXPR:
2759 /* If the CALL_EXPRs call different functions, then they
2760 clearly can not be equal. */
2761 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2762 flags))
2763 return 0;
2766 unsigned int cef = call_expr_flags (arg0);
2767 if (flags & OEP_PURE_SAME)
2768 cef &= ECF_CONST | ECF_PURE;
2769 else
2770 cef &= ECF_CONST;
2771 if (!cef)
2772 return 0;
2775 /* Now see if all the arguments are the same. */
2777 const_call_expr_arg_iterator iter0, iter1;
2778 const_tree a0, a1;
2779 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2780 a1 = first_const_call_expr_arg (arg1, &iter1);
2781 a0 && a1;
2782 a0 = next_const_call_expr_arg (&iter0),
2783 a1 = next_const_call_expr_arg (&iter1))
2784 if (! operand_equal_p (a0, a1, flags))
2785 return 0;
2787 /* If we get here and both argument lists are exhausted
2788 then the CALL_EXPRs are equal. */
2789 return ! (a0 || a1);
2791 default:
2792 return 0;
2795 case tcc_declaration:
2796 /* Consider __builtin_sqrt equal to sqrt. */
2797 return (TREE_CODE (arg0) == FUNCTION_DECL
2798 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2799 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2800 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2802 default:
2803 return 0;
2806 #undef OP_SAME
2807 #undef OP_SAME_WITH_NULL
2810 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2811 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2813 When in doubt, return 0. */
2815 static int
2816 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2818 int unsignedp1, unsignedpo;
2819 tree primarg0, primarg1, primother;
2820 unsigned int correct_width;
2822 if (operand_equal_p (arg0, arg1, 0))
2823 return 1;
2825 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2826 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2827 return 0;
2829 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2830 and see if the inner values are the same. This removes any
2831 signedness comparison, which doesn't matter here. */
2832 primarg0 = arg0, primarg1 = arg1;
2833 STRIP_NOPS (primarg0);
2834 STRIP_NOPS (primarg1);
2835 if (operand_equal_p (primarg0, primarg1, 0))
2836 return 1;
2838 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2839 actual comparison operand, ARG0.
2841 First throw away any conversions to wider types
2842 already present in the operands. */
2844 primarg1 = get_narrower (arg1, &unsignedp1);
2845 primother = get_narrower (other, &unsignedpo);
2847 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2848 if (unsignedp1 == unsignedpo
2849 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2850 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2852 tree type = TREE_TYPE (arg0);
2854 /* Make sure shorter operand is extended the right way
2855 to match the longer operand. */
2856 primarg1 = fold_convert (signed_or_unsigned_type_for
2857 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2859 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2860 return 1;
2863 return 0;
2866 /* See if ARG is an expression that is either a comparison or is performing
2867 arithmetic on comparisons. The comparisons must only be comparing
2868 two different values, which will be stored in *CVAL1 and *CVAL2; if
2869 they are nonzero it means that some operands have already been found.
2870 No variables may be used anywhere else in the expression except in the
2871 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2872 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2874 If this is true, return 1. Otherwise, return zero. */
2876 static int
2877 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2879 enum tree_code code = TREE_CODE (arg);
2880 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2882 /* We can handle some of the tcc_expression cases here. */
2883 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2884 tclass = tcc_unary;
2885 else if (tclass == tcc_expression
2886 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2887 || code == COMPOUND_EXPR))
2888 tclass = tcc_binary;
2890 else if (tclass == tcc_expression && code == SAVE_EXPR
2891 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2893 /* If we've already found a CVAL1 or CVAL2, this expression is
2894 two complex to handle. */
2895 if (*cval1 || *cval2)
2896 return 0;
2898 tclass = tcc_unary;
2899 *save_p = 1;
2902 switch (tclass)
2904 case tcc_unary:
2905 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2907 case tcc_binary:
2908 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2909 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2910 cval1, cval2, save_p));
2912 case tcc_constant:
2913 return 1;
2915 case tcc_expression:
2916 if (code == COND_EXPR)
2917 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2918 cval1, cval2, save_p)
2919 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2920 cval1, cval2, save_p)
2921 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2922 cval1, cval2, save_p));
2923 return 0;
2925 case tcc_comparison:
2926 /* First see if we can handle the first operand, then the second. For
2927 the second operand, we know *CVAL1 can't be zero. It must be that
2928 one side of the comparison is each of the values; test for the
2929 case where this isn't true by failing if the two operands
2930 are the same. */
2932 if (operand_equal_p (TREE_OPERAND (arg, 0),
2933 TREE_OPERAND (arg, 1), 0))
2934 return 0;
2936 if (*cval1 == 0)
2937 *cval1 = TREE_OPERAND (arg, 0);
2938 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2940 else if (*cval2 == 0)
2941 *cval2 = TREE_OPERAND (arg, 0);
2942 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2944 else
2945 return 0;
2947 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2949 else if (*cval2 == 0)
2950 *cval2 = TREE_OPERAND (arg, 1);
2951 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2953 else
2954 return 0;
2956 return 1;
2958 default:
2959 return 0;
2963 /* ARG is a tree that is known to contain just arithmetic operations and
2964 comparisons. Evaluate the operations in the tree substituting NEW0 for
2965 any occurrence of OLD0 as an operand of a comparison and likewise for
2966 NEW1 and OLD1. */
2968 static tree
2969 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2970 tree old1, tree new1)
2972 tree type = TREE_TYPE (arg);
2973 enum tree_code code = TREE_CODE (arg);
2974 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2976 /* We can handle some of the tcc_expression cases here. */
2977 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2978 tclass = tcc_unary;
2979 else if (tclass == tcc_expression
2980 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2981 tclass = tcc_binary;
2983 switch (tclass)
2985 case tcc_unary:
2986 return fold_build1_loc (loc, code, type,
2987 eval_subst (loc, TREE_OPERAND (arg, 0),
2988 old0, new0, old1, new1));
2990 case tcc_binary:
2991 return fold_build2_loc (loc, code, type,
2992 eval_subst (loc, TREE_OPERAND (arg, 0),
2993 old0, new0, old1, new1),
2994 eval_subst (loc, TREE_OPERAND (arg, 1),
2995 old0, new0, old1, new1));
2997 case tcc_expression:
2998 switch (code)
3000 case SAVE_EXPR:
3001 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3002 old1, new1);
3004 case COMPOUND_EXPR:
3005 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3006 old1, new1);
3008 case COND_EXPR:
3009 return fold_build3_loc (loc, code, type,
3010 eval_subst (loc, TREE_OPERAND (arg, 0),
3011 old0, new0, old1, new1),
3012 eval_subst (loc, TREE_OPERAND (arg, 1),
3013 old0, new0, old1, new1),
3014 eval_subst (loc, TREE_OPERAND (arg, 2),
3015 old0, new0, old1, new1));
3016 default:
3017 break;
3019 /* Fall through - ??? */
3021 case tcc_comparison:
3023 tree arg0 = TREE_OPERAND (arg, 0);
3024 tree arg1 = TREE_OPERAND (arg, 1);
3026 /* We need to check both for exact equality and tree equality. The
3027 former will be true if the operand has a side-effect. In that
3028 case, we know the operand occurred exactly once. */
3030 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3031 arg0 = new0;
3032 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3033 arg0 = new1;
3035 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3036 arg1 = new0;
3037 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3038 arg1 = new1;
3040 return fold_build2_loc (loc, code, type, arg0, arg1);
3043 default:
3044 return arg;
3048 /* Return a tree for the case when the result of an expression is RESULT
3049 converted to TYPE and OMITTED was previously an operand of the expression
3050 but is now not needed (e.g., we folded OMITTED * 0).
3052 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3053 the conversion of RESULT to TYPE. */
3055 tree
3056 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3058 tree t = fold_convert_loc (loc, type, result);
3060 /* If the resulting operand is an empty statement, just return the omitted
3061 statement casted to void. */
3062 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3063 return build1_loc (loc, NOP_EXPR, void_type_node,
3064 fold_ignored_result (omitted));
3066 if (TREE_SIDE_EFFECTS (omitted))
3067 return build2_loc (loc, COMPOUND_EXPR, type,
3068 fold_ignored_result (omitted), t);
3070 return non_lvalue_loc (loc, t);
3073 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3075 static tree
3076 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3077 tree omitted)
3079 tree t = fold_convert_loc (loc, type, result);
3081 /* If the resulting operand is an empty statement, just return the omitted
3082 statement casted to void. */
3083 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3084 return build1_loc (loc, NOP_EXPR, void_type_node,
3085 fold_ignored_result (omitted));
3087 if (TREE_SIDE_EFFECTS (omitted))
3088 return build2_loc (loc, COMPOUND_EXPR, type,
3089 fold_ignored_result (omitted), t);
3091 return pedantic_non_lvalue_loc (loc, t);
3094 /* Return a tree for the case when the result of an expression is RESULT
3095 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3096 of the expression but are now not needed.
3098 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3099 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3100 evaluated before OMITTED2. Otherwise, if neither has side effects,
3101 just do the conversion of RESULT to TYPE. */
3103 tree
3104 omit_two_operands_loc (location_t loc, tree type, tree result,
3105 tree omitted1, tree omitted2)
3107 tree t = fold_convert_loc (loc, type, result);
3109 if (TREE_SIDE_EFFECTS (omitted2))
3110 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3111 if (TREE_SIDE_EFFECTS (omitted1))
3112 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3114 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3118 /* Return a simplified tree node for the truth-negation of ARG. This
3119 never alters ARG itself. We assume that ARG is an operation that
3120 returns a truth value (0 or 1).
3122 FIXME: one would think we would fold the result, but it causes
3123 problems with the dominator optimizer. */
3125 static tree
3126 fold_truth_not_expr (location_t loc, tree arg)
3128 tree type = TREE_TYPE (arg);
3129 enum tree_code code = TREE_CODE (arg);
3130 location_t loc1, loc2;
3132 /* If this is a comparison, we can simply invert it, except for
3133 floating-point non-equality comparisons, in which case we just
3134 enclose a TRUTH_NOT_EXPR around what we have. */
3136 if (TREE_CODE_CLASS (code) == tcc_comparison)
3138 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3139 if (FLOAT_TYPE_P (op_type)
3140 && flag_trapping_math
3141 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3142 && code != NE_EXPR && code != EQ_EXPR)
3143 return NULL_TREE;
3145 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3146 if (code == ERROR_MARK)
3147 return NULL_TREE;
3149 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3150 TREE_OPERAND (arg, 1));
3153 switch (code)
3155 case INTEGER_CST:
3156 return constant_boolean_node (integer_zerop (arg), type);
3158 case TRUTH_AND_EXPR:
3159 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3160 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3161 return build2_loc (loc, TRUTH_OR_EXPR, type,
3162 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3163 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3165 case TRUTH_OR_EXPR:
3166 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3167 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3168 return build2_loc (loc, TRUTH_AND_EXPR, type,
3169 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3170 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3172 case TRUTH_XOR_EXPR:
3173 /* Here we can invert either operand. We invert the first operand
3174 unless the second operand is a TRUTH_NOT_EXPR in which case our
3175 result is the XOR of the first operand with the inside of the
3176 negation of the second operand. */
3178 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3179 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3180 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3181 else
3182 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3183 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3184 TREE_OPERAND (arg, 1));
3186 case TRUTH_ANDIF_EXPR:
3187 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3188 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3189 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3190 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3191 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3193 case TRUTH_ORIF_EXPR:
3194 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3195 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3196 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3197 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3198 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3200 case TRUTH_NOT_EXPR:
3201 return TREE_OPERAND (arg, 0);
3203 case COND_EXPR:
3205 tree arg1 = TREE_OPERAND (arg, 1);
3206 tree arg2 = TREE_OPERAND (arg, 2);
3208 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3209 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3211 /* A COND_EXPR may have a throw as one operand, which
3212 then has void type. Just leave void operands
3213 as they are. */
3214 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3215 VOID_TYPE_P (TREE_TYPE (arg1))
3216 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3217 VOID_TYPE_P (TREE_TYPE (arg2))
3218 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3221 case COMPOUND_EXPR:
3222 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3223 return build2_loc (loc, COMPOUND_EXPR, type,
3224 TREE_OPERAND (arg, 0),
3225 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3227 case NON_LVALUE_EXPR:
3228 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3229 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3231 CASE_CONVERT:
3232 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3233 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3235 /* ... fall through ... */
3237 case FLOAT_EXPR:
3238 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3239 return build1_loc (loc, TREE_CODE (arg), type,
3240 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3242 case BIT_AND_EXPR:
3243 if (!integer_onep (TREE_OPERAND (arg, 1)))
3244 return NULL_TREE;
3245 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3247 case SAVE_EXPR:
3248 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3250 case CLEANUP_POINT_EXPR:
3251 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3252 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3253 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3255 default:
3256 return NULL_TREE;
3260 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3261 assume that ARG is an operation that returns a truth value (0 or 1
3262 for scalars, 0 or -1 for vectors). Return the folded expression if
3263 folding is successful. Otherwise, return NULL_TREE. */
3265 static tree
3266 fold_invert_truthvalue (location_t loc, tree arg)
3268 tree type = TREE_TYPE (arg);
3269 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3270 ? BIT_NOT_EXPR
3271 : TRUTH_NOT_EXPR,
3272 type, arg);
3275 /* Return a simplified tree node for the truth-negation of ARG. This
3276 never alters ARG itself. We assume that ARG is an operation that
3277 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3279 tree
3280 invert_truthvalue_loc (location_t loc, tree arg)
3282 if (TREE_CODE (arg) == ERROR_MARK)
3283 return arg;
3285 tree type = TREE_TYPE (arg);
3286 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3287 ? BIT_NOT_EXPR
3288 : TRUTH_NOT_EXPR,
3289 type, arg);
3292 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3293 operands are another bit-wise operation with a common input. If so,
3294 distribute the bit operations to save an operation and possibly two if
3295 constants are involved. For example, convert
3296 (A | B) & (A | C) into A | (B & C)
3297 Further simplification will occur if B and C are constants.
3299 If this optimization cannot be done, 0 will be returned. */
3301 static tree
3302 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3303 tree arg0, tree arg1)
3305 tree common;
3306 tree left, right;
3308 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3309 || TREE_CODE (arg0) == code
3310 || (TREE_CODE (arg0) != BIT_AND_EXPR
3311 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3312 return 0;
3314 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3316 common = TREE_OPERAND (arg0, 0);
3317 left = TREE_OPERAND (arg0, 1);
3318 right = TREE_OPERAND (arg1, 1);
3320 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3322 common = TREE_OPERAND (arg0, 0);
3323 left = TREE_OPERAND (arg0, 1);
3324 right = TREE_OPERAND (arg1, 0);
3326 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3328 common = TREE_OPERAND (arg0, 1);
3329 left = TREE_OPERAND (arg0, 0);
3330 right = TREE_OPERAND (arg1, 1);
3332 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3334 common = TREE_OPERAND (arg0, 1);
3335 left = TREE_OPERAND (arg0, 0);
3336 right = TREE_OPERAND (arg1, 0);
3338 else
3339 return 0;
3341 common = fold_convert_loc (loc, type, common);
3342 left = fold_convert_loc (loc, type, left);
3343 right = fold_convert_loc (loc, type, right);
3344 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3345 fold_build2_loc (loc, code, type, left, right));
3348 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3349 with code CODE. This optimization is unsafe. */
3350 static tree
3351 distribute_real_division (location_t loc, enum tree_code code, tree type,
3352 tree arg0, tree arg1)
3354 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3355 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3357 /* (A / C) +- (B / C) -> (A +- B) / C. */
3358 if (mul0 == mul1
3359 && operand_equal_p (TREE_OPERAND (arg0, 1),
3360 TREE_OPERAND (arg1, 1), 0))
3361 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3362 fold_build2_loc (loc, code, type,
3363 TREE_OPERAND (arg0, 0),
3364 TREE_OPERAND (arg1, 0)),
3365 TREE_OPERAND (arg0, 1));
3367 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3368 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3369 TREE_OPERAND (arg1, 0), 0)
3370 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3371 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3373 REAL_VALUE_TYPE r0, r1;
3374 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3375 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3376 if (!mul0)
3377 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3378 if (!mul1)
3379 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3380 real_arithmetic (&r0, code, &r0, &r1);
3381 return fold_build2_loc (loc, MULT_EXPR, type,
3382 TREE_OPERAND (arg0, 0),
3383 build_real (type, r0));
3386 return NULL_TREE;
3389 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3390 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3392 static tree
3393 make_bit_field_ref (location_t loc, tree inner, tree type,
3394 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3396 tree result, bftype;
3398 if (bitpos == 0)
3400 tree size = TYPE_SIZE (TREE_TYPE (inner));
3401 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3402 || POINTER_TYPE_P (TREE_TYPE (inner)))
3403 && tree_fits_shwi_p (size)
3404 && tree_to_shwi (size) == bitsize)
3405 return fold_convert_loc (loc, type, inner);
3408 bftype = type;
3409 if (TYPE_PRECISION (bftype) != bitsize
3410 || TYPE_UNSIGNED (bftype) == !unsignedp)
3411 bftype = build_nonstandard_integer_type (bitsize, 0);
3413 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3414 size_int (bitsize), bitsize_int (bitpos));
3416 if (bftype != type)
3417 result = fold_convert_loc (loc, type, result);
3419 return result;
3422 /* Optimize a bit-field compare.
3424 There are two cases: First is a compare against a constant and the
3425 second is a comparison of two items where the fields are at the same
3426 bit position relative to the start of a chunk (byte, halfword, word)
3427 large enough to contain it. In these cases we can avoid the shift
3428 implicit in bitfield extractions.
3430 For constants, we emit a compare of the shifted constant with the
3431 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3432 compared. For two fields at the same position, we do the ANDs with the
3433 similar mask and compare the result of the ANDs.
3435 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3436 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3437 are the left and right operands of the comparison, respectively.
3439 If the optimization described above can be done, we return the resulting
3440 tree. Otherwise we return zero. */
3442 static tree
3443 optimize_bit_field_compare (location_t loc, enum tree_code code,
3444 tree compare_type, tree lhs, tree rhs)
3446 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3447 tree type = TREE_TYPE (lhs);
3448 tree unsigned_type;
3449 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3450 enum machine_mode lmode, rmode, nmode;
3451 int lunsignedp, runsignedp;
3452 int lvolatilep = 0, rvolatilep = 0;
3453 tree linner, rinner = NULL_TREE;
3454 tree mask;
3455 tree offset;
3457 /* Get all the information about the extractions being done. If the bit size
3458 if the same as the size of the underlying object, we aren't doing an
3459 extraction at all and so can do nothing. We also don't want to
3460 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3461 then will no longer be able to replace it. */
3462 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3463 &lunsignedp, &lvolatilep, false);
3464 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3465 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3466 return 0;
3468 if (!const_p)
3470 /* If this is not a constant, we can only do something if bit positions,
3471 sizes, and signedness are the same. */
3472 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3473 &runsignedp, &rvolatilep, false);
3475 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3476 || lunsignedp != runsignedp || offset != 0
3477 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3478 return 0;
3481 /* See if we can find a mode to refer to this field. We should be able to,
3482 but fail if we can't. */
3483 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3484 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3485 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3486 TYPE_ALIGN (TREE_TYPE (rinner))),
3487 word_mode, false);
3488 if (nmode == VOIDmode)
3489 return 0;
3491 /* Set signed and unsigned types of the precision of this mode for the
3492 shifts below. */
3493 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3495 /* Compute the bit position and size for the new reference and our offset
3496 within it. If the new reference is the same size as the original, we
3497 won't optimize anything, so return zero. */
3498 nbitsize = GET_MODE_BITSIZE (nmode);
3499 nbitpos = lbitpos & ~ (nbitsize - 1);
3500 lbitpos -= nbitpos;
3501 if (nbitsize == lbitsize)
3502 return 0;
3504 if (BYTES_BIG_ENDIAN)
3505 lbitpos = nbitsize - lbitsize - lbitpos;
3507 /* Make the mask to be used against the extracted field. */
3508 mask = build_int_cst_type (unsigned_type, -1);
3509 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3510 mask = const_binop (RSHIFT_EXPR, mask,
3511 size_int (nbitsize - lbitsize - lbitpos));
3513 if (! const_p)
3514 /* If not comparing with constant, just rework the comparison
3515 and return. */
3516 return fold_build2_loc (loc, code, compare_type,
3517 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3518 make_bit_field_ref (loc, linner,
3519 unsigned_type,
3520 nbitsize, nbitpos,
3522 mask),
3523 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3524 make_bit_field_ref (loc, rinner,
3525 unsigned_type,
3526 nbitsize, nbitpos,
3528 mask));
3530 /* Otherwise, we are handling the constant case. See if the constant is too
3531 big for the field. Warn and return a tree of for 0 (false) if so. We do
3532 this not only for its own sake, but to avoid having to test for this
3533 error case below. If we didn't, we might generate wrong code.
3535 For unsigned fields, the constant shifted right by the field length should
3536 be all zero. For signed fields, the high-order bits should agree with
3537 the sign bit. */
3539 if (lunsignedp)
3541 if (wi::lrshift (rhs, lbitsize) != 0)
3543 warning (0, "comparison is always %d due to width of bit-field",
3544 code == NE_EXPR);
3545 return constant_boolean_node (code == NE_EXPR, compare_type);
3548 else
3550 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3551 if (tem != 0 && tem != -1)
3553 warning (0, "comparison is always %d due to width of bit-field",
3554 code == NE_EXPR);
3555 return constant_boolean_node (code == NE_EXPR, compare_type);
3559 /* Single-bit compares should always be against zero. */
3560 if (lbitsize == 1 && ! integer_zerop (rhs))
3562 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3563 rhs = build_int_cst (type, 0);
3566 /* Make a new bitfield reference, shift the constant over the
3567 appropriate number of bits and mask it with the computed mask
3568 (in case this was a signed field). If we changed it, make a new one. */
3569 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3571 rhs = const_binop (BIT_AND_EXPR,
3572 const_binop (LSHIFT_EXPR,
3573 fold_convert_loc (loc, unsigned_type, rhs),
3574 size_int (lbitpos)),
3575 mask);
3577 lhs = build2_loc (loc, code, compare_type,
3578 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3579 return lhs;
3582 /* Subroutine for fold_truth_andor_1: decode a field reference.
3584 If EXP is a comparison reference, we return the innermost reference.
3586 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3587 set to the starting bit number.
3589 If the innermost field can be completely contained in a mode-sized
3590 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3592 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3593 otherwise it is not changed.
3595 *PUNSIGNEDP is set to the signedness of the field.
3597 *PMASK is set to the mask used. This is either contained in a
3598 BIT_AND_EXPR or derived from the width of the field.
3600 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3602 Return 0 if this is not a component reference or is one that we can't
3603 do anything with. */
3605 static tree
3606 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3607 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3608 int *punsignedp, int *pvolatilep,
3609 tree *pmask, tree *pand_mask)
3611 tree outer_type = 0;
3612 tree and_mask = 0;
3613 tree mask, inner, offset;
3614 tree unsigned_type;
3615 unsigned int precision;
3617 /* All the optimizations using this function assume integer fields.
3618 There are problems with FP fields since the type_for_size call
3619 below can fail for, e.g., XFmode. */
3620 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3621 return 0;
3623 /* We are interested in the bare arrangement of bits, so strip everything
3624 that doesn't affect the machine mode. However, record the type of the
3625 outermost expression if it may matter below. */
3626 if (CONVERT_EXPR_P (exp)
3627 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3628 outer_type = TREE_TYPE (exp);
3629 STRIP_NOPS (exp);
3631 if (TREE_CODE (exp) == BIT_AND_EXPR)
3633 and_mask = TREE_OPERAND (exp, 1);
3634 exp = TREE_OPERAND (exp, 0);
3635 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3636 if (TREE_CODE (and_mask) != INTEGER_CST)
3637 return 0;
3640 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3641 punsignedp, pvolatilep, false);
3642 if ((inner == exp && and_mask == 0)
3643 || *pbitsize < 0 || offset != 0
3644 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3645 return 0;
3647 /* If the number of bits in the reference is the same as the bitsize of
3648 the outer type, then the outer type gives the signedness. Otherwise
3649 (in case of a small bitfield) the signedness is unchanged. */
3650 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3651 *punsignedp = TYPE_UNSIGNED (outer_type);
3653 /* Compute the mask to access the bitfield. */
3654 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3655 precision = TYPE_PRECISION (unsigned_type);
3657 mask = build_int_cst_type (unsigned_type, -1);
3659 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3660 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3662 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3663 if (and_mask != 0)
3664 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3665 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3667 *pmask = mask;
3668 *pand_mask = and_mask;
3669 return inner;
3672 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3673 bit positions and MASK is SIGNED. */
3675 static int
3676 all_ones_mask_p (const_tree mask, unsigned int size)
3678 tree type = TREE_TYPE (mask);
3679 unsigned int precision = TYPE_PRECISION (type);
3681 /* If this function returns true when the type of the mask is
3682 UNSIGNED, then there will be errors. In particular see
3683 gcc.c-torture/execute/990326-1.c. There does not appear to be
3684 any documentation paper trail as to why this is so. But the pre
3685 wide-int worked with that restriction and it has been preserved
3686 here. */
3687 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3688 return false;
3690 return wi::mask (size, false, precision) == mask;
3693 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3694 represents the sign bit of EXP's type. If EXP represents a sign
3695 or zero extension, also test VAL against the unextended type.
3696 The return value is the (sub)expression whose sign bit is VAL,
3697 or NULL_TREE otherwise. */
3699 static tree
3700 sign_bit_p (tree exp, const_tree val)
3702 int width;
3703 tree t;
3705 /* Tree EXP must have an integral type. */
3706 t = TREE_TYPE (exp);
3707 if (! INTEGRAL_TYPE_P (t))
3708 return NULL_TREE;
3710 /* Tree VAL must be an integer constant. */
3711 if (TREE_CODE (val) != INTEGER_CST
3712 || TREE_OVERFLOW (val))
3713 return NULL_TREE;
3715 width = TYPE_PRECISION (t);
3716 if (wi::only_sign_bit_p (val, width))
3717 return exp;
3719 /* Handle extension from a narrower type. */
3720 if (TREE_CODE (exp) == NOP_EXPR
3721 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3722 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3724 return NULL_TREE;
3727 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3728 to be evaluated unconditionally. */
3730 static int
3731 simple_operand_p (const_tree exp)
3733 /* Strip any conversions that don't change the machine mode. */
3734 STRIP_NOPS (exp);
3736 return (CONSTANT_CLASS_P (exp)
3737 || TREE_CODE (exp) == SSA_NAME
3738 || (DECL_P (exp)
3739 && ! TREE_ADDRESSABLE (exp)
3740 && ! TREE_THIS_VOLATILE (exp)
3741 && ! DECL_NONLOCAL (exp)
3742 /* Don't regard global variables as simple. They may be
3743 allocated in ways unknown to the compiler (shared memory,
3744 #pragma weak, etc). */
3745 && ! TREE_PUBLIC (exp)
3746 && ! DECL_EXTERNAL (exp)
3747 /* Weakrefs are not safe to be read, since they can be NULL.
3748 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3749 have DECL_WEAK flag set. */
3750 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3751 /* Loading a static variable is unduly expensive, but global
3752 registers aren't expensive. */
3753 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3756 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3757 to be evaluated unconditionally.
3758 I addition to simple_operand_p, we assume that comparisons, conversions,
3759 and logic-not operations are simple, if their operands are simple, too. */
3761 static bool
3762 simple_operand_p_2 (tree exp)
3764 enum tree_code code;
3766 if (TREE_SIDE_EFFECTS (exp)
3767 || tree_could_trap_p (exp))
3768 return false;
3770 while (CONVERT_EXPR_P (exp))
3771 exp = TREE_OPERAND (exp, 0);
3773 code = TREE_CODE (exp);
3775 if (TREE_CODE_CLASS (code) == tcc_comparison)
3776 return (simple_operand_p (TREE_OPERAND (exp, 0))
3777 && simple_operand_p (TREE_OPERAND (exp, 1)));
3779 if (code == TRUTH_NOT_EXPR)
3780 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3782 return simple_operand_p (exp);
3786 /* The following functions are subroutines to fold_range_test and allow it to
3787 try to change a logical combination of comparisons into a range test.
3789 For example, both
3790 X == 2 || X == 3 || X == 4 || X == 5
3792 X >= 2 && X <= 5
3793 are converted to
3794 (unsigned) (X - 2) <= 3
3796 We describe each set of comparisons as being either inside or outside
3797 a range, using a variable named like IN_P, and then describe the
3798 range with a lower and upper bound. If one of the bounds is omitted,
3799 it represents either the highest or lowest value of the type.
3801 In the comments below, we represent a range by two numbers in brackets
3802 preceded by a "+" to designate being inside that range, or a "-" to
3803 designate being outside that range, so the condition can be inverted by
3804 flipping the prefix. An omitted bound is represented by a "-". For
3805 example, "- [-, 10]" means being outside the range starting at the lowest
3806 possible value and ending at 10, in other words, being greater than 10.
3807 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3808 always false.
3810 We set up things so that the missing bounds are handled in a consistent
3811 manner so neither a missing bound nor "true" and "false" need to be
3812 handled using a special case. */
3814 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3815 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3816 and UPPER1_P are nonzero if the respective argument is an upper bound
3817 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3818 must be specified for a comparison. ARG1 will be converted to ARG0's
3819 type if both are specified. */
3821 static tree
3822 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3823 tree arg1, int upper1_p)
3825 tree tem;
3826 int result;
3827 int sgn0, sgn1;
3829 /* If neither arg represents infinity, do the normal operation.
3830 Else, if not a comparison, return infinity. Else handle the special
3831 comparison rules. Note that most of the cases below won't occur, but
3832 are handled for consistency. */
3834 if (arg0 != 0 && arg1 != 0)
3836 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3837 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3838 STRIP_NOPS (tem);
3839 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3842 if (TREE_CODE_CLASS (code) != tcc_comparison)
3843 return 0;
3845 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3846 for neither. In real maths, we cannot assume open ended ranges are
3847 the same. But, this is computer arithmetic, where numbers are finite.
3848 We can therefore make the transformation of any unbounded range with
3849 the value Z, Z being greater than any representable number. This permits
3850 us to treat unbounded ranges as equal. */
3851 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3852 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3853 switch (code)
3855 case EQ_EXPR:
3856 result = sgn0 == sgn1;
3857 break;
3858 case NE_EXPR:
3859 result = sgn0 != sgn1;
3860 break;
3861 case LT_EXPR:
3862 result = sgn0 < sgn1;
3863 break;
3864 case LE_EXPR:
3865 result = sgn0 <= sgn1;
3866 break;
3867 case GT_EXPR:
3868 result = sgn0 > sgn1;
3869 break;
3870 case GE_EXPR:
3871 result = sgn0 >= sgn1;
3872 break;
3873 default:
3874 gcc_unreachable ();
3877 return constant_boolean_node (result, type);
3880 /* Helper routine for make_range. Perform one step for it, return
3881 new expression if the loop should continue or NULL_TREE if it should
3882 stop. */
3884 tree
3885 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3886 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3887 bool *strict_overflow_p)
3889 tree arg0_type = TREE_TYPE (arg0);
3890 tree n_low, n_high, low = *p_low, high = *p_high;
3891 int in_p = *p_in_p, n_in_p;
3893 switch (code)
3895 case TRUTH_NOT_EXPR:
3896 /* We can only do something if the range is testing for zero. */
3897 if (low == NULL_TREE || high == NULL_TREE
3898 || ! integer_zerop (low) || ! integer_zerop (high))
3899 return NULL_TREE;
3900 *p_in_p = ! in_p;
3901 return arg0;
3903 case EQ_EXPR: case NE_EXPR:
3904 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3905 /* We can only do something if the range is testing for zero
3906 and if the second operand is an integer constant. Note that
3907 saying something is "in" the range we make is done by
3908 complementing IN_P since it will set in the initial case of
3909 being not equal to zero; "out" is leaving it alone. */
3910 if (low == NULL_TREE || high == NULL_TREE
3911 || ! integer_zerop (low) || ! integer_zerop (high)
3912 || TREE_CODE (arg1) != INTEGER_CST)
3913 return NULL_TREE;
3915 switch (code)
3917 case NE_EXPR: /* - [c, c] */
3918 low = high = arg1;
3919 break;
3920 case EQ_EXPR: /* + [c, c] */
3921 in_p = ! in_p, low = high = arg1;
3922 break;
3923 case GT_EXPR: /* - [-, c] */
3924 low = 0, high = arg1;
3925 break;
3926 case GE_EXPR: /* + [c, -] */
3927 in_p = ! in_p, low = arg1, high = 0;
3928 break;
3929 case LT_EXPR: /* - [c, -] */
3930 low = arg1, high = 0;
3931 break;
3932 case LE_EXPR: /* + [-, c] */
3933 in_p = ! in_p, low = 0, high = arg1;
3934 break;
3935 default:
3936 gcc_unreachable ();
3939 /* If this is an unsigned comparison, we also know that EXP is
3940 greater than or equal to zero. We base the range tests we make
3941 on that fact, so we record it here so we can parse existing
3942 range tests. We test arg0_type since often the return type
3943 of, e.g. EQ_EXPR, is boolean. */
3944 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3946 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3947 in_p, low, high, 1,
3948 build_int_cst (arg0_type, 0),
3949 NULL_TREE))
3950 return NULL_TREE;
3952 in_p = n_in_p, low = n_low, high = n_high;
3954 /* If the high bound is missing, but we have a nonzero low
3955 bound, reverse the range so it goes from zero to the low bound
3956 minus 1. */
3957 if (high == 0 && low && ! integer_zerop (low))
3959 in_p = ! in_p;
3960 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3961 build_int_cst (TREE_TYPE (low), 1), 0);
3962 low = build_int_cst (arg0_type, 0);
3966 *p_low = low;
3967 *p_high = high;
3968 *p_in_p = in_p;
3969 return arg0;
3971 case NEGATE_EXPR:
3972 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3973 low and high are non-NULL, then normalize will DTRT. */
3974 if (!TYPE_UNSIGNED (arg0_type)
3975 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3977 if (low == NULL_TREE)
3978 low = TYPE_MIN_VALUE (arg0_type);
3979 if (high == NULL_TREE)
3980 high = TYPE_MAX_VALUE (arg0_type);
3983 /* (-x) IN [a,b] -> x in [-b, -a] */
3984 n_low = range_binop (MINUS_EXPR, exp_type,
3985 build_int_cst (exp_type, 0),
3986 0, high, 1);
3987 n_high = range_binop (MINUS_EXPR, exp_type,
3988 build_int_cst (exp_type, 0),
3989 0, low, 0);
3990 if (n_high != 0 && TREE_OVERFLOW (n_high))
3991 return NULL_TREE;
3992 goto normalize;
3994 case BIT_NOT_EXPR:
3995 /* ~ X -> -X - 1 */
3996 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3997 build_int_cst (exp_type, 1));
3999 case PLUS_EXPR:
4000 case MINUS_EXPR:
4001 if (TREE_CODE (arg1) != INTEGER_CST)
4002 return NULL_TREE;
4004 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4005 move a constant to the other side. */
4006 if (!TYPE_UNSIGNED (arg0_type)
4007 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4008 return NULL_TREE;
4010 /* If EXP is signed, any overflow in the computation is undefined,
4011 so we don't worry about it so long as our computations on
4012 the bounds don't overflow. For unsigned, overflow is defined
4013 and this is exactly the right thing. */
4014 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4015 arg0_type, low, 0, arg1, 0);
4016 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4017 arg0_type, high, 1, arg1, 0);
4018 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4019 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4020 return NULL_TREE;
4022 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4023 *strict_overflow_p = true;
4025 normalize:
4026 /* Check for an unsigned range which has wrapped around the maximum
4027 value thus making n_high < n_low, and normalize it. */
4028 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4030 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4031 build_int_cst (TREE_TYPE (n_high), 1), 0);
4032 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4033 build_int_cst (TREE_TYPE (n_low), 1), 0);
4035 /* If the range is of the form +/- [ x+1, x ], we won't
4036 be able to normalize it. But then, it represents the
4037 whole range or the empty set, so make it
4038 +/- [ -, - ]. */
4039 if (tree_int_cst_equal (n_low, low)
4040 && tree_int_cst_equal (n_high, high))
4041 low = high = 0;
4042 else
4043 in_p = ! in_p;
4045 else
4046 low = n_low, high = n_high;
4048 *p_low = low;
4049 *p_high = high;
4050 *p_in_p = in_p;
4051 return arg0;
4053 CASE_CONVERT:
4054 case NON_LVALUE_EXPR:
4055 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4056 return NULL_TREE;
4058 if (! INTEGRAL_TYPE_P (arg0_type)
4059 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4060 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4061 return NULL_TREE;
4063 n_low = low, n_high = high;
4065 if (n_low != 0)
4066 n_low = fold_convert_loc (loc, arg0_type, n_low);
4068 if (n_high != 0)
4069 n_high = fold_convert_loc (loc, arg0_type, n_high);
4071 /* If we're converting arg0 from an unsigned type, to exp,
4072 a signed type, we will be doing the comparison as unsigned.
4073 The tests above have already verified that LOW and HIGH
4074 are both positive.
4076 So we have to ensure that we will handle large unsigned
4077 values the same way that the current signed bounds treat
4078 negative values. */
4080 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4082 tree high_positive;
4083 tree equiv_type;
4084 /* For fixed-point modes, we need to pass the saturating flag
4085 as the 2nd parameter. */
4086 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4087 equiv_type
4088 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4089 TYPE_SATURATING (arg0_type));
4090 else
4091 equiv_type
4092 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4094 /* A range without an upper bound is, naturally, unbounded.
4095 Since convert would have cropped a very large value, use
4096 the max value for the destination type. */
4097 high_positive
4098 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4099 : TYPE_MAX_VALUE (arg0_type);
4101 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4102 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4103 fold_convert_loc (loc, arg0_type,
4104 high_positive),
4105 build_int_cst (arg0_type, 1));
4107 /* If the low bound is specified, "and" the range with the
4108 range for which the original unsigned value will be
4109 positive. */
4110 if (low != 0)
4112 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4113 1, fold_convert_loc (loc, arg0_type,
4114 integer_zero_node),
4115 high_positive))
4116 return NULL_TREE;
4118 in_p = (n_in_p == in_p);
4120 else
4122 /* Otherwise, "or" the range with the range of the input
4123 that will be interpreted as negative. */
4124 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4125 1, fold_convert_loc (loc, arg0_type,
4126 integer_zero_node),
4127 high_positive))
4128 return NULL_TREE;
4130 in_p = (in_p != n_in_p);
4134 *p_low = n_low;
4135 *p_high = n_high;
4136 *p_in_p = in_p;
4137 return arg0;
4139 default:
4140 return NULL_TREE;
4144 /* Given EXP, a logical expression, set the range it is testing into
4145 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4146 actually being tested. *PLOW and *PHIGH will be made of the same
4147 type as the returned expression. If EXP is not a comparison, we
4148 will most likely not be returning a useful value and range. Set
4149 *STRICT_OVERFLOW_P to true if the return value is only valid
4150 because signed overflow is undefined; otherwise, do not change
4151 *STRICT_OVERFLOW_P. */
4153 tree
4154 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4155 bool *strict_overflow_p)
4157 enum tree_code code;
4158 tree arg0, arg1 = NULL_TREE;
4159 tree exp_type, nexp;
4160 int in_p;
4161 tree low, high;
4162 location_t loc = EXPR_LOCATION (exp);
4164 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4165 and see if we can refine the range. Some of the cases below may not
4166 happen, but it doesn't seem worth worrying about this. We "continue"
4167 the outer loop when we've changed something; otherwise we "break"
4168 the switch, which will "break" the while. */
4170 in_p = 0;
4171 low = high = build_int_cst (TREE_TYPE (exp), 0);
4173 while (1)
4175 code = TREE_CODE (exp);
4176 exp_type = TREE_TYPE (exp);
4177 arg0 = NULL_TREE;
4179 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4181 if (TREE_OPERAND_LENGTH (exp) > 0)
4182 arg0 = TREE_OPERAND (exp, 0);
4183 if (TREE_CODE_CLASS (code) == tcc_binary
4184 || TREE_CODE_CLASS (code) == tcc_comparison
4185 || (TREE_CODE_CLASS (code) == tcc_expression
4186 && TREE_OPERAND_LENGTH (exp) > 1))
4187 arg1 = TREE_OPERAND (exp, 1);
4189 if (arg0 == NULL_TREE)
4190 break;
4192 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4193 &high, &in_p, strict_overflow_p);
4194 if (nexp == NULL_TREE)
4195 break;
4196 exp = nexp;
4199 /* If EXP is a constant, we can evaluate whether this is true or false. */
4200 if (TREE_CODE (exp) == INTEGER_CST)
4202 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4203 exp, 0, low, 0))
4204 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4205 exp, 1, high, 1)));
4206 low = high = 0;
4207 exp = 0;
4210 *pin_p = in_p, *plow = low, *phigh = high;
4211 return exp;
4214 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4215 type, TYPE, return an expression to test if EXP is in (or out of, depending
4216 on IN_P) the range. Return 0 if the test couldn't be created. */
4218 tree
4219 build_range_check (location_t loc, tree type, tree exp, int in_p,
4220 tree low, tree high)
4222 tree etype = TREE_TYPE (exp), value;
4224 #ifdef HAVE_canonicalize_funcptr_for_compare
4225 /* Disable this optimization for function pointer expressions
4226 on targets that require function pointer canonicalization. */
4227 if (HAVE_canonicalize_funcptr_for_compare
4228 && TREE_CODE (etype) == POINTER_TYPE
4229 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4230 return NULL_TREE;
4231 #endif
4233 if (! in_p)
4235 value = build_range_check (loc, type, exp, 1, low, high);
4236 if (value != 0)
4237 return invert_truthvalue_loc (loc, value);
4239 return 0;
4242 if (low == 0 && high == 0)
4243 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4245 if (low == 0)
4246 return fold_build2_loc (loc, LE_EXPR, type, exp,
4247 fold_convert_loc (loc, etype, high));
4249 if (high == 0)
4250 return fold_build2_loc (loc, GE_EXPR, type, exp,
4251 fold_convert_loc (loc, etype, low));
4253 if (operand_equal_p (low, high, 0))
4254 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4255 fold_convert_loc (loc, etype, low));
4257 if (integer_zerop (low))
4259 if (! TYPE_UNSIGNED (etype))
4261 etype = unsigned_type_for (etype);
4262 high = fold_convert_loc (loc, etype, high);
4263 exp = fold_convert_loc (loc, etype, exp);
4265 return build_range_check (loc, type, exp, 1, 0, high);
4268 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4269 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4271 int prec = TYPE_PRECISION (etype);
4273 if (wi::mask (prec - 1, false, prec) == high)
4275 if (TYPE_UNSIGNED (etype))
4277 tree signed_etype = signed_type_for (etype);
4278 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4279 etype
4280 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4281 else
4282 etype = signed_etype;
4283 exp = fold_convert_loc (loc, etype, exp);
4285 return fold_build2_loc (loc, GT_EXPR, type, exp,
4286 build_int_cst (etype, 0));
4290 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4291 This requires wrap-around arithmetics for the type of the expression.
4292 First make sure that arithmetics in this type is valid, then make sure
4293 that it wraps around. */
4294 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4295 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4296 TYPE_UNSIGNED (etype));
4298 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4300 tree utype, minv, maxv;
4302 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4303 for the type in question, as we rely on this here. */
4304 utype = unsigned_type_for (etype);
4305 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4306 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4307 build_int_cst (TREE_TYPE (maxv), 1), 1);
4308 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4310 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4311 minv, 1, maxv, 1)))
4312 etype = utype;
4313 else
4314 return 0;
4317 high = fold_convert_loc (loc, etype, high);
4318 low = fold_convert_loc (loc, etype, low);
4319 exp = fold_convert_loc (loc, etype, exp);
4321 value = const_binop (MINUS_EXPR, high, low);
4324 if (POINTER_TYPE_P (etype))
4326 if (value != 0 && !TREE_OVERFLOW (value))
4328 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4329 return build_range_check (loc, type,
4330 fold_build_pointer_plus_loc (loc, exp, low),
4331 1, build_int_cst (etype, 0), value);
4333 return 0;
4336 if (value != 0 && !TREE_OVERFLOW (value))
4337 return build_range_check (loc, type,
4338 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4339 1, build_int_cst (etype, 0), value);
4341 return 0;
4344 /* Return the predecessor of VAL in its type, handling the infinite case. */
4346 static tree
4347 range_predecessor (tree val)
4349 tree type = TREE_TYPE (val);
4351 if (INTEGRAL_TYPE_P (type)
4352 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4353 return 0;
4354 else
4355 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4356 build_int_cst (TREE_TYPE (val), 1), 0);
4359 /* Return the successor of VAL in its type, handling the infinite case. */
4361 static tree
4362 range_successor (tree val)
4364 tree type = TREE_TYPE (val);
4366 if (INTEGRAL_TYPE_P (type)
4367 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4368 return 0;
4369 else
4370 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4371 build_int_cst (TREE_TYPE (val), 1), 0);
4374 /* Given two ranges, see if we can merge them into one. Return 1 if we
4375 can, 0 if we can't. Set the output range into the specified parameters. */
4377 bool
4378 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4379 tree high0, int in1_p, tree low1, tree high1)
4381 int no_overlap;
4382 int subset;
4383 int temp;
4384 tree tem;
4385 int in_p;
4386 tree low, high;
4387 int lowequal = ((low0 == 0 && low1 == 0)
4388 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4389 low0, 0, low1, 0)));
4390 int highequal = ((high0 == 0 && high1 == 0)
4391 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4392 high0, 1, high1, 1)));
4394 /* Make range 0 be the range that starts first, or ends last if they
4395 start at the same value. Swap them if it isn't. */
4396 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4397 low0, 0, low1, 0))
4398 || (lowequal
4399 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4400 high1, 1, high0, 1))))
4402 temp = in0_p, in0_p = in1_p, in1_p = temp;
4403 tem = low0, low0 = low1, low1 = tem;
4404 tem = high0, high0 = high1, high1 = tem;
4407 /* Now flag two cases, whether the ranges are disjoint or whether the
4408 second range is totally subsumed in the first. Note that the tests
4409 below are simplified by the ones above. */
4410 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4411 high0, 1, low1, 0));
4412 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4413 high1, 1, high0, 1));
4415 /* We now have four cases, depending on whether we are including or
4416 excluding the two ranges. */
4417 if (in0_p && in1_p)
4419 /* If they don't overlap, the result is false. If the second range
4420 is a subset it is the result. Otherwise, the range is from the start
4421 of the second to the end of the first. */
4422 if (no_overlap)
4423 in_p = 0, low = high = 0;
4424 else if (subset)
4425 in_p = 1, low = low1, high = high1;
4426 else
4427 in_p = 1, low = low1, high = high0;
4430 else if (in0_p && ! in1_p)
4432 /* If they don't overlap, the result is the first range. If they are
4433 equal, the result is false. If the second range is a subset of the
4434 first, and the ranges begin at the same place, we go from just after
4435 the end of the second range to the end of the first. If the second
4436 range is not a subset of the first, or if it is a subset and both
4437 ranges end at the same place, the range starts at the start of the
4438 first range and ends just before the second range.
4439 Otherwise, we can't describe this as a single range. */
4440 if (no_overlap)
4441 in_p = 1, low = low0, high = high0;
4442 else if (lowequal && highequal)
4443 in_p = 0, low = high = 0;
4444 else if (subset && lowequal)
4446 low = range_successor (high1);
4447 high = high0;
4448 in_p = 1;
4449 if (low == 0)
4451 /* We are in the weird situation where high0 > high1 but
4452 high1 has no successor. Punt. */
4453 return 0;
4456 else if (! subset || highequal)
4458 low = low0;
4459 high = range_predecessor (low1);
4460 in_p = 1;
4461 if (high == 0)
4463 /* low0 < low1 but low1 has no predecessor. Punt. */
4464 return 0;
4467 else
4468 return 0;
4471 else if (! in0_p && in1_p)
4473 /* If they don't overlap, the result is the second range. If the second
4474 is a subset of the first, the result is false. Otherwise,
4475 the range starts just after the first range and ends at the
4476 end of the second. */
4477 if (no_overlap)
4478 in_p = 1, low = low1, high = high1;
4479 else if (subset || highequal)
4480 in_p = 0, low = high = 0;
4481 else
4483 low = range_successor (high0);
4484 high = high1;
4485 in_p = 1;
4486 if (low == 0)
4488 /* high1 > high0 but high0 has no successor. Punt. */
4489 return 0;
4494 else
4496 /* The case where we are excluding both ranges. Here the complex case
4497 is if they don't overlap. In that case, the only time we have a
4498 range is if they are adjacent. If the second is a subset of the
4499 first, the result is the first. Otherwise, the range to exclude
4500 starts at the beginning of the first range and ends at the end of the
4501 second. */
4502 if (no_overlap)
4504 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4505 range_successor (high0),
4506 1, low1, 0)))
4507 in_p = 0, low = low0, high = high1;
4508 else
4510 /* Canonicalize - [min, x] into - [-, x]. */
4511 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4512 switch (TREE_CODE (TREE_TYPE (low0)))
4514 case ENUMERAL_TYPE:
4515 if (TYPE_PRECISION (TREE_TYPE (low0))
4516 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4517 break;
4518 /* FALLTHROUGH */
4519 case INTEGER_TYPE:
4520 if (tree_int_cst_equal (low0,
4521 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4522 low0 = 0;
4523 break;
4524 case POINTER_TYPE:
4525 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4526 && integer_zerop (low0))
4527 low0 = 0;
4528 break;
4529 default:
4530 break;
4533 /* Canonicalize - [x, max] into - [x, -]. */
4534 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4535 switch (TREE_CODE (TREE_TYPE (high1)))
4537 case ENUMERAL_TYPE:
4538 if (TYPE_PRECISION (TREE_TYPE (high1))
4539 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4540 break;
4541 /* FALLTHROUGH */
4542 case INTEGER_TYPE:
4543 if (tree_int_cst_equal (high1,
4544 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4545 high1 = 0;
4546 break;
4547 case POINTER_TYPE:
4548 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4549 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4550 high1, 1,
4551 build_int_cst (TREE_TYPE (high1), 1),
4552 1)))
4553 high1 = 0;
4554 break;
4555 default:
4556 break;
4559 /* The ranges might be also adjacent between the maximum and
4560 minimum values of the given type. For
4561 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4562 return + [x + 1, y - 1]. */
4563 if (low0 == 0 && high1 == 0)
4565 low = range_successor (high0);
4566 high = range_predecessor (low1);
4567 if (low == 0 || high == 0)
4568 return 0;
4570 in_p = 1;
4572 else
4573 return 0;
4576 else if (subset)
4577 in_p = 0, low = low0, high = high0;
4578 else
4579 in_p = 0, low = low0, high = high1;
4582 *pin_p = in_p, *plow = low, *phigh = high;
4583 return 1;
4587 /* Subroutine of fold, looking inside expressions of the form
4588 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4589 of the COND_EXPR. This function is being used also to optimize
4590 A op B ? C : A, by reversing the comparison first.
4592 Return a folded expression whose code is not a COND_EXPR
4593 anymore, or NULL_TREE if no folding opportunity is found. */
4595 static tree
4596 fold_cond_expr_with_comparison (location_t loc, tree type,
4597 tree arg0, tree arg1, tree arg2)
4599 enum tree_code comp_code = TREE_CODE (arg0);
4600 tree arg00 = TREE_OPERAND (arg0, 0);
4601 tree arg01 = TREE_OPERAND (arg0, 1);
4602 tree arg1_type = TREE_TYPE (arg1);
4603 tree tem;
4605 STRIP_NOPS (arg1);
4606 STRIP_NOPS (arg2);
4608 /* If we have A op 0 ? A : -A, consider applying the following
4609 transformations:
4611 A == 0? A : -A same as -A
4612 A != 0? A : -A same as A
4613 A >= 0? A : -A same as abs (A)
4614 A > 0? A : -A same as abs (A)
4615 A <= 0? A : -A same as -abs (A)
4616 A < 0? A : -A same as -abs (A)
4618 None of these transformations work for modes with signed
4619 zeros. If A is +/-0, the first two transformations will
4620 change the sign of the result (from +0 to -0, or vice
4621 versa). The last four will fix the sign of the result,
4622 even though the original expressions could be positive or
4623 negative, depending on the sign of A.
4625 Note that all these transformations are correct if A is
4626 NaN, since the two alternatives (A and -A) are also NaNs. */
4627 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4628 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4629 ? real_zerop (arg01)
4630 : integer_zerop (arg01))
4631 && ((TREE_CODE (arg2) == NEGATE_EXPR
4632 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4633 /* In the case that A is of the form X-Y, '-A' (arg2) may
4634 have already been folded to Y-X, check for that. */
4635 || (TREE_CODE (arg1) == MINUS_EXPR
4636 && TREE_CODE (arg2) == MINUS_EXPR
4637 && operand_equal_p (TREE_OPERAND (arg1, 0),
4638 TREE_OPERAND (arg2, 1), 0)
4639 && operand_equal_p (TREE_OPERAND (arg1, 1),
4640 TREE_OPERAND (arg2, 0), 0))))
4641 switch (comp_code)
4643 case EQ_EXPR:
4644 case UNEQ_EXPR:
4645 tem = fold_convert_loc (loc, arg1_type, arg1);
4646 return pedantic_non_lvalue_loc (loc,
4647 fold_convert_loc (loc, type,
4648 negate_expr (tem)));
4649 case NE_EXPR:
4650 case LTGT_EXPR:
4651 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4652 case UNGE_EXPR:
4653 case UNGT_EXPR:
4654 if (flag_trapping_math)
4655 break;
4656 /* Fall through. */
4657 case GE_EXPR:
4658 case GT_EXPR:
4659 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4660 arg1 = fold_convert_loc (loc, signed_type_for
4661 (TREE_TYPE (arg1)), arg1);
4662 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4663 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4664 case UNLE_EXPR:
4665 case UNLT_EXPR:
4666 if (flag_trapping_math)
4667 break;
4668 case LE_EXPR:
4669 case LT_EXPR:
4670 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4671 arg1 = fold_convert_loc (loc, signed_type_for
4672 (TREE_TYPE (arg1)), arg1);
4673 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4674 return negate_expr (fold_convert_loc (loc, type, tem));
4675 default:
4676 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4677 break;
4680 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4681 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4682 both transformations are correct when A is NaN: A != 0
4683 is then true, and A == 0 is false. */
4685 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4686 && integer_zerop (arg01) && integer_zerop (arg2))
4688 if (comp_code == NE_EXPR)
4689 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4690 else if (comp_code == EQ_EXPR)
4691 return build_zero_cst (type);
4694 /* Try some transformations of A op B ? A : B.
4696 A == B? A : B same as B
4697 A != B? A : B same as A
4698 A >= B? A : B same as max (A, B)
4699 A > B? A : B same as max (B, A)
4700 A <= B? A : B same as min (A, B)
4701 A < B? A : B same as min (B, A)
4703 As above, these transformations don't work in the presence
4704 of signed zeros. For example, if A and B are zeros of
4705 opposite sign, the first two transformations will change
4706 the sign of the result. In the last four, the original
4707 expressions give different results for (A=+0, B=-0) and
4708 (A=-0, B=+0), but the transformed expressions do not.
4710 The first two transformations are correct if either A or B
4711 is a NaN. In the first transformation, the condition will
4712 be false, and B will indeed be chosen. In the case of the
4713 second transformation, the condition A != B will be true,
4714 and A will be chosen.
4716 The conversions to max() and min() are not correct if B is
4717 a number and A is not. The conditions in the original
4718 expressions will be false, so all four give B. The min()
4719 and max() versions would give a NaN instead. */
4720 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4721 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4722 /* Avoid these transformations if the COND_EXPR may be used
4723 as an lvalue in the C++ front-end. PR c++/19199. */
4724 && (in_gimple_form
4725 || VECTOR_TYPE_P (type)
4726 || (strcmp (lang_hooks.name, "GNU C++") != 0
4727 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4728 || ! maybe_lvalue_p (arg1)
4729 || ! maybe_lvalue_p (arg2)))
4731 tree comp_op0 = arg00;
4732 tree comp_op1 = arg01;
4733 tree comp_type = TREE_TYPE (comp_op0);
4735 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4736 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4738 comp_type = type;
4739 comp_op0 = arg1;
4740 comp_op1 = arg2;
4743 switch (comp_code)
4745 case EQ_EXPR:
4746 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4747 case NE_EXPR:
4748 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4749 case LE_EXPR:
4750 case LT_EXPR:
4751 case UNLE_EXPR:
4752 case UNLT_EXPR:
4753 /* In C++ a ?: expression can be an lvalue, so put the
4754 operand which will be used if they are equal first
4755 so that we can convert this back to the
4756 corresponding COND_EXPR. */
4757 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4759 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4760 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4761 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4762 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4763 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4764 comp_op1, comp_op0);
4765 return pedantic_non_lvalue_loc (loc,
4766 fold_convert_loc (loc, type, tem));
4768 break;
4769 case GE_EXPR:
4770 case GT_EXPR:
4771 case UNGE_EXPR:
4772 case UNGT_EXPR:
4773 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4775 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4776 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4777 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4778 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4779 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4780 comp_op1, comp_op0);
4781 return pedantic_non_lvalue_loc (loc,
4782 fold_convert_loc (loc, type, tem));
4784 break;
4785 case UNEQ_EXPR:
4786 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4787 return pedantic_non_lvalue_loc (loc,
4788 fold_convert_loc (loc, type, arg2));
4789 break;
4790 case LTGT_EXPR:
4791 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4792 return pedantic_non_lvalue_loc (loc,
4793 fold_convert_loc (loc, type, arg1));
4794 break;
4795 default:
4796 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4797 break;
4801 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4802 we might still be able to simplify this. For example,
4803 if C1 is one less or one more than C2, this might have started
4804 out as a MIN or MAX and been transformed by this function.
4805 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4807 if (INTEGRAL_TYPE_P (type)
4808 && TREE_CODE (arg01) == INTEGER_CST
4809 && TREE_CODE (arg2) == INTEGER_CST)
4810 switch (comp_code)
4812 case EQ_EXPR:
4813 if (TREE_CODE (arg1) == INTEGER_CST)
4814 break;
4815 /* We can replace A with C1 in this case. */
4816 arg1 = fold_convert_loc (loc, type, arg01);
4817 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4819 case LT_EXPR:
4820 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4821 MIN_EXPR, to preserve the signedness of the comparison. */
4822 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4823 OEP_ONLY_CONST)
4824 && operand_equal_p (arg01,
4825 const_binop (PLUS_EXPR, arg2,
4826 build_int_cst (type, 1)),
4827 OEP_ONLY_CONST))
4829 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4830 fold_convert_loc (loc, TREE_TYPE (arg00),
4831 arg2));
4832 return pedantic_non_lvalue_loc (loc,
4833 fold_convert_loc (loc, type, tem));
4835 break;
4837 case LE_EXPR:
4838 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4839 as above. */
4840 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4841 OEP_ONLY_CONST)
4842 && operand_equal_p (arg01,
4843 const_binop (MINUS_EXPR, arg2,
4844 build_int_cst (type, 1)),
4845 OEP_ONLY_CONST))
4847 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4848 fold_convert_loc (loc, TREE_TYPE (arg00),
4849 arg2));
4850 return pedantic_non_lvalue_loc (loc,
4851 fold_convert_loc (loc, type, tem));
4853 break;
4855 case GT_EXPR:
4856 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4857 MAX_EXPR, to preserve the signedness of the comparison. */
4858 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4859 OEP_ONLY_CONST)
4860 && operand_equal_p (arg01,
4861 const_binop (MINUS_EXPR, arg2,
4862 build_int_cst (type, 1)),
4863 OEP_ONLY_CONST))
4865 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4866 fold_convert_loc (loc, TREE_TYPE (arg00),
4867 arg2));
4868 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4870 break;
4872 case GE_EXPR:
4873 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4874 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4875 OEP_ONLY_CONST)
4876 && operand_equal_p (arg01,
4877 const_binop (PLUS_EXPR, arg2,
4878 build_int_cst (type, 1)),
4879 OEP_ONLY_CONST))
4881 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4882 fold_convert_loc (loc, TREE_TYPE (arg00),
4883 arg2));
4884 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4886 break;
4887 case NE_EXPR:
4888 break;
4889 default:
4890 gcc_unreachable ();
4893 return NULL_TREE;
4898 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4899 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4900 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4901 false) >= 2)
4902 #endif
4904 /* EXP is some logical combination of boolean tests. See if we can
4905 merge it into some range test. Return the new tree if so. */
4907 static tree
4908 fold_range_test (location_t loc, enum tree_code code, tree type,
4909 tree op0, tree op1)
4911 int or_op = (code == TRUTH_ORIF_EXPR
4912 || code == TRUTH_OR_EXPR);
4913 int in0_p, in1_p, in_p;
4914 tree low0, low1, low, high0, high1, high;
4915 bool strict_overflow_p = false;
4916 tree tem, lhs, rhs;
4917 const char * const warnmsg = G_("assuming signed overflow does not occur "
4918 "when simplifying range test");
4920 if (!INTEGRAL_TYPE_P (type))
4921 return 0;
4923 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4924 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4926 /* If this is an OR operation, invert both sides; we will invert
4927 again at the end. */
4928 if (or_op)
4929 in0_p = ! in0_p, in1_p = ! in1_p;
4931 /* If both expressions are the same, if we can merge the ranges, and we
4932 can build the range test, return it or it inverted. If one of the
4933 ranges is always true or always false, consider it to be the same
4934 expression as the other. */
4935 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4936 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4937 in1_p, low1, high1)
4938 && 0 != (tem = (build_range_check (loc, type,
4939 lhs != 0 ? lhs
4940 : rhs != 0 ? rhs : integer_zero_node,
4941 in_p, low, high))))
4943 if (strict_overflow_p)
4944 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4945 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4948 /* On machines where the branch cost is expensive, if this is a
4949 short-circuited branch and the underlying object on both sides
4950 is the same, make a non-short-circuit operation. */
4951 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4952 && lhs != 0 && rhs != 0
4953 && (code == TRUTH_ANDIF_EXPR
4954 || code == TRUTH_ORIF_EXPR)
4955 && operand_equal_p (lhs, rhs, 0))
4957 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4958 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4959 which cases we can't do this. */
4960 if (simple_operand_p (lhs))
4961 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4962 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4963 type, op0, op1);
4965 else if (!lang_hooks.decls.global_bindings_p ()
4966 && !CONTAINS_PLACEHOLDER_P (lhs))
4968 tree common = save_expr (lhs);
4970 if (0 != (lhs = build_range_check (loc, type, common,
4971 or_op ? ! in0_p : in0_p,
4972 low0, high0))
4973 && (0 != (rhs = build_range_check (loc, type, common,
4974 or_op ? ! in1_p : in1_p,
4975 low1, high1))))
4977 if (strict_overflow_p)
4978 fold_overflow_warning (warnmsg,
4979 WARN_STRICT_OVERFLOW_COMPARISON);
4980 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4981 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4982 type, lhs, rhs);
4987 return 0;
4990 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4991 bit value. Arrange things so the extra bits will be set to zero if and
4992 only if C is signed-extended to its full width. If MASK is nonzero,
4993 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4995 static tree
4996 unextend (tree c, int p, int unsignedp, tree mask)
4998 tree type = TREE_TYPE (c);
4999 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5000 tree temp;
5002 if (p == modesize || unsignedp)
5003 return c;
5005 /* We work by getting just the sign bit into the low-order bit, then
5006 into the high-order bit, then sign-extend. We then XOR that value
5007 with C. */
5008 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5010 /* We must use a signed type in order to get an arithmetic right shift.
5011 However, we must also avoid introducing accidental overflows, so that
5012 a subsequent call to integer_zerop will work. Hence we must
5013 do the type conversion here. At this point, the constant is either
5014 zero or one, and the conversion to a signed type can never overflow.
5015 We could get an overflow if this conversion is done anywhere else. */
5016 if (TYPE_UNSIGNED (type))
5017 temp = fold_convert (signed_type_for (type), temp);
5019 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5020 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5021 if (mask != 0)
5022 temp = const_binop (BIT_AND_EXPR, temp,
5023 fold_convert (TREE_TYPE (c), mask));
5024 /* If necessary, convert the type back to match the type of C. */
5025 if (TYPE_UNSIGNED (type))
5026 temp = fold_convert (type, temp);
5028 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5031 /* For an expression that has the form
5032 (A && B) || ~B
5034 (A || B) && ~B,
5035 we can drop one of the inner expressions and simplify to
5036 A || ~B
5038 A && ~B
5039 LOC is the location of the resulting expression. OP is the inner
5040 logical operation; the left-hand side in the examples above, while CMPOP
5041 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5042 removing a condition that guards another, as in
5043 (A != NULL && A->...) || A == NULL
5044 which we must not transform. If RHS_ONLY is true, only eliminate the
5045 right-most operand of the inner logical operation. */
5047 static tree
5048 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5049 bool rhs_only)
5051 tree type = TREE_TYPE (cmpop);
5052 enum tree_code code = TREE_CODE (cmpop);
5053 enum tree_code truthop_code = TREE_CODE (op);
5054 tree lhs = TREE_OPERAND (op, 0);
5055 tree rhs = TREE_OPERAND (op, 1);
5056 tree orig_lhs = lhs, orig_rhs = rhs;
5057 enum tree_code rhs_code = TREE_CODE (rhs);
5058 enum tree_code lhs_code = TREE_CODE (lhs);
5059 enum tree_code inv_code;
5061 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5062 return NULL_TREE;
5064 if (TREE_CODE_CLASS (code) != tcc_comparison)
5065 return NULL_TREE;
5067 if (rhs_code == truthop_code)
5069 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5070 if (newrhs != NULL_TREE)
5072 rhs = newrhs;
5073 rhs_code = TREE_CODE (rhs);
5076 if (lhs_code == truthop_code && !rhs_only)
5078 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5079 if (newlhs != NULL_TREE)
5081 lhs = newlhs;
5082 lhs_code = TREE_CODE (lhs);
5086 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5087 if (inv_code == rhs_code
5088 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5089 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5090 return lhs;
5091 if (!rhs_only && inv_code == lhs_code
5092 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5093 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5094 return rhs;
5095 if (rhs != orig_rhs || lhs != orig_lhs)
5096 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5097 lhs, rhs);
5098 return NULL_TREE;
5101 /* Find ways of folding logical expressions of LHS and RHS:
5102 Try to merge two comparisons to the same innermost item.
5103 Look for range tests like "ch >= '0' && ch <= '9'".
5104 Look for combinations of simple terms on machines with expensive branches
5105 and evaluate the RHS unconditionally.
5107 For example, if we have p->a == 2 && p->b == 4 and we can make an
5108 object large enough to span both A and B, we can do this with a comparison
5109 against the object ANDed with the a mask.
5111 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5112 operations to do this with one comparison.
5114 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5115 function and the one above.
5117 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5118 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5120 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5121 two operands.
5123 We return the simplified tree or 0 if no optimization is possible. */
5125 static tree
5126 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5127 tree lhs, tree rhs)
5129 /* If this is the "or" of two comparisons, we can do something if
5130 the comparisons are NE_EXPR. If this is the "and", we can do something
5131 if the comparisons are EQ_EXPR. I.e.,
5132 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5134 WANTED_CODE is this operation code. For single bit fields, we can
5135 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5136 comparison for one-bit fields. */
5138 enum tree_code wanted_code;
5139 enum tree_code lcode, rcode;
5140 tree ll_arg, lr_arg, rl_arg, rr_arg;
5141 tree ll_inner, lr_inner, rl_inner, rr_inner;
5142 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5143 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5144 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5145 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5146 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5147 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5148 enum machine_mode lnmode, rnmode;
5149 tree ll_mask, lr_mask, rl_mask, rr_mask;
5150 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5151 tree l_const, r_const;
5152 tree lntype, rntype, result;
5153 HOST_WIDE_INT first_bit, end_bit;
5154 int volatilep;
5156 /* Start by getting the comparison codes. Fail if anything is volatile.
5157 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5158 it were surrounded with a NE_EXPR. */
5160 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5161 return 0;
5163 lcode = TREE_CODE (lhs);
5164 rcode = TREE_CODE (rhs);
5166 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5168 lhs = build2 (NE_EXPR, truth_type, lhs,
5169 build_int_cst (TREE_TYPE (lhs), 0));
5170 lcode = NE_EXPR;
5173 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5175 rhs = build2 (NE_EXPR, truth_type, rhs,
5176 build_int_cst (TREE_TYPE (rhs), 0));
5177 rcode = NE_EXPR;
5180 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5181 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5182 return 0;
5184 ll_arg = TREE_OPERAND (lhs, 0);
5185 lr_arg = TREE_OPERAND (lhs, 1);
5186 rl_arg = TREE_OPERAND (rhs, 0);
5187 rr_arg = TREE_OPERAND (rhs, 1);
5189 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5190 if (simple_operand_p (ll_arg)
5191 && simple_operand_p (lr_arg))
5193 if (operand_equal_p (ll_arg, rl_arg, 0)
5194 && operand_equal_p (lr_arg, rr_arg, 0))
5196 result = combine_comparisons (loc, code, lcode, rcode,
5197 truth_type, ll_arg, lr_arg);
5198 if (result)
5199 return result;
5201 else if (operand_equal_p (ll_arg, rr_arg, 0)
5202 && operand_equal_p (lr_arg, rl_arg, 0))
5204 result = combine_comparisons (loc, code, lcode,
5205 swap_tree_comparison (rcode),
5206 truth_type, ll_arg, lr_arg);
5207 if (result)
5208 return result;
5212 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5213 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5215 /* If the RHS can be evaluated unconditionally and its operands are
5216 simple, it wins to evaluate the RHS unconditionally on machines
5217 with expensive branches. In this case, this isn't a comparison
5218 that can be merged. */
5220 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5221 false) >= 2
5222 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5223 && simple_operand_p (rl_arg)
5224 && simple_operand_p (rr_arg))
5226 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5227 if (code == TRUTH_OR_EXPR
5228 && lcode == NE_EXPR && integer_zerop (lr_arg)
5229 && rcode == NE_EXPR && integer_zerop (rr_arg)
5230 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5231 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5232 return build2_loc (loc, NE_EXPR, truth_type,
5233 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5234 ll_arg, rl_arg),
5235 build_int_cst (TREE_TYPE (ll_arg), 0));
5237 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5238 if (code == TRUTH_AND_EXPR
5239 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5240 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5241 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5242 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5243 return build2_loc (loc, EQ_EXPR, truth_type,
5244 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5245 ll_arg, rl_arg),
5246 build_int_cst (TREE_TYPE (ll_arg), 0));
5249 /* See if the comparisons can be merged. Then get all the parameters for
5250 each side. */
5252 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5253 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5254 return 0;
5256 volatilep = 0;
5257 ll_inner = decode_field_reference (loc, ll_arg,
5258 &ll_bitsize, &ll_bitpos, &ll_mode,
5259 &ll_unsignedp, &volatilep, &ll_mask,
5260 &ll_and_mask);
5261 lr_inner = decode_field_reference (loc, lr_arg,
5262 &lr_bitsize, &lr_bitpos, &lr_mode,
5263 &lr_unsignedp, &volatilep, &lr_mask,
5264 &lr_and_mask);
5265 rl_inner = decode_field_reference (loc, rl_arg,
5266 &rl_bitsize, &rl_bitpos, &rl_mode,
5267 &rl_unsignedp, &volatilep, &rl_mask,
5268 &rl_and_mask);
5269 rr_inner = decode_field_reference (loc, rr_arg,
5270 &rr_bitsize, &rr_bitpos, &rr_mode,
5271 &rr_unsignedp, &volatilep, &rr_mask,
5272 &rr_and_mask);
5274 /* It must be true that the inner operation on the lhs of each
5275 comparison must be the same if we are to be able to do anything.
5276 Then see if we have constants. If not, the same must be true for
5277 the rhs's. */
5278 if (volatilep || ll_inner == 0 || rl_inner == 0
5279 || ! operand_equal_p (ll_inner, rl_inner, 0))
5280 return 0;
5282 if (TREE_CODE (lr_arg) == INTEGER_CST
5283 && TREE_CODE (rr_arg) == INTEGER_CST)
5284 l_const = lr_arg, r_const = rr_arg;
5285 else if (lr_inner == 0 || rr_inner == 0
5286 || ! operand_equal_p (lr_inner, rr_inner, 0))
5287 return 0;
5288 else
5289 l_const = r_const = 0;
5291 /* If either comparison code is not correct for our logical operation,
5292 fail. However, we can convert a one-bit comparison against zero into
5293 the opposite comparison against that bit being set in the field. */
5295 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5296 if (lcode != wanted_code)
5298 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5300 /* Make the left operand unsigned, since we are only interested
5301 in the value of one bit. Otherwise we are doing the wrong
5302 thing below. */
5303 ll_unsignedp = 1;
5304 l_const = ll_mask;
5306 else
5307 return 0;
5310 /* This is analogous to the code for l_const above. */
5311 if (rcode != wanted_code)
5313 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5315 rl_unsignedp = 1;
5316 r_const = rl_mask;
5318 else
5319 return 0;
5322 /* See if we can find a mode that contains both fields being compared on
5323 the left. If we can't, fail. Otherwise, update all constants and masks
5324 to be relative to a field of that size. */
5325 first_bit = MIN (ll_bitpos, rl_bitpos);
5326 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5327 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5328 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5329 volatilep);
5330 if (lnmode == VOIDmode)
5331 return 0;
5333 lnbitsize = GET_MODE_BITSIZE (lnmode);
5334 lnbitpos = first_bit & ~ (lnbitsize - 1);
5335 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5336 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5338 if (BYTES_BIG_ENDIAN)
5340 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5341 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5344 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5345 size_int (xll_bitpos));
5346 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5347 size_int (xrl_bitpos));
5349 if (l_const)
5351 l_const = fold_convert_loc (loc, lntype, l_const);
5352 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5353 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5354 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5355 fold_build1_loc (loc, BIT_NOT_EXPR,
5356 lntype, ll_mask))))
5358 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5360 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5363 if (r_const)
5365 r_const = fold_convert_loc (loc, lntype, r_const);
5366 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5367 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5368 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5369 fold_build1_loc (loc, BIT_NOT_EXPR,
5370 lntype, rl_mask))))
5372 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5374 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5378 /* If the right sides are not constant, do the same for it. Also,
5379 disallow this optimization if a size or signedness mismatch occurs
5380 between the left and right sides. */
5381 if (l_const == 0)
5383 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5384 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5385 /* Make sure the two fields on the right
5386 correspond to the left without being swapped. */
5387 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5388 return 0;
5390 first_bit = MIN (lr_bitpos, rr_bitpos);
5391 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5392 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5393 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5394 volatilep);
5395 if (rnmode == VOIDmode)
5396 return 0;
5398 rnbitsize = GET_MODE_BITSIZE (rnmode);
5399 rnbitpos = first_bit & ~ (rnbitsize - 1);
5400 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5401 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5403 if (BYTES_BIG_ENDIAN)
5405 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5406 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5409 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5410 rntype, lr_mask),
5411 size_int (xlr_bitpos));
5412 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5413 rntype, rr_mask),
5414 size_int (xrr_bitpos));
5416 /* Make a mask that corresponds to both fields being compared.
5417 Do this for both items being compared. If the operands are the
5418 same size and the bits being compared are in the same position
5419 then we can do this by masking both and comparing the masked
5420 results. */
5421 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5422 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5423 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5425 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5426 ll_unsignedp || rl_unsignedp);
5427 if (! all_ones_mask_p (ll_mask, lnbitsize))
5428 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5430 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5431 lr_unsignedp || rr_unsignedp);
5432 if (! all_ones_mask_p (lr_mask, rnbitsize))
5433 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5435 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5438 /* There is still another way we can do something: If both pairs of
5439 fields being compared are adjacent, we may be able to make a wider
5440 field containing them both.
5442 Note that we still must mask the lhs/rhs expressions. Furthermore,
5443 the mask must be shifted to account for the shift done by
5444 make_bit_field_ref. */
5445 if ((ll_bitsize + ll_bitpos == rl_bitpos
5446 && lr_bitsize + lr_bitpos == rr_bitpos)
5447 || (ll_bitpos == rl_bitpos + rl_bitsize
5448 && lr_bitpos == rr_bitpos + rr_bitsize))
5450 tree type;
5452 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5453 ll_bitsize + rl_bitsize,
5454 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5455 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5456 lr_bitsize + rr_bitsize,
5457 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5459 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5460 size_int (MIN (xll_bitpos, xrl_bitpos)));
5461 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5462 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5464 /* Convert to the smaller type before masking out unwanted bits. */
5465 type = lntype;
5466 if (lntype != rntype)
5468 if (lnbitsize > rnbitsize)
5470 lhs = fold_convert_loc (loc, rntype, lhs);
5471 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5472 type = rntype;
5474 else if (lnbitsize < rnbitsize)
5476 rhs = fold_convert_loc (loc, lntype, rhs);
5477 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5478 type = lntype;
5482 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5483 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5485 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5486 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5488 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5491 return 0;
5494 /* Handle the case of comparisons with constants. If there is something in
5495 common between the masks, those bits of the constants must be the same.
5496 If not, the condition is always false. Test for this to avoid generating
5497 incorrect code below. */
5498 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5499 if (! integer_zerop (result)
5500 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5501 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5503 if (wanted_code == NE_EXPR)
5505 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5506 return constant_boolean_node (true, truth_type);
5508 else
5510 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5511 return constant_boolean_node (false, truth_type);
5515 /* Construct the expression we will return. First get the component
5516 reference we will make. Unless the mask is all ones the width of
5517 that field, perform the mask operation. Then compare with the
5518 merged constant. */
5519 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5520 ll_unsignedp || rl_unsignedp);
5522 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5523 if (! all_ones_mask_p (ll_mask, lnbitsize))
5524 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5526 return build2_loc (loc, wanted_code, truth_type, result,
5527 const_binop (BIT_IOR_EXPR, l_const, r_const));
5530 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5531 constant. */
5533 static tree
5534 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5535 tree op0, tree op1)
5537 tree arg0 = op0;
5538 enum tree_code op_code;
5539 tree comp_const;
5540 tree minmax_const;
5541 int consts_equal, consts_lt;
5542 tree inner;
5544 STRIP_SIGN_NOPS (arg0);
5546 op_code = TREE_CODE (arg0);
5547 minmax_const = TREE_OPERAND (arg0, 1);
5548 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5549 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5550 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5551 inner = TREE_OPERAND (arg0, 0);
5553 /* If something does not permit us to optimize, return the original tree. */
5554 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5555 || TREE_CODE (comp_const) != INTEGER_CST
5556 || TREE_OVERFLOW (comp_const)
5557 || TREE_CODE (minmax_const) != INTEGER_CST
5558 || TREE_OVERFLOW (minmax_const))
5559 return NULL_TREE;
5561 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5562 and GT_EXPR, doing the rest with recursive calls using logical
5563 simplifications. */
5564 switch (code)
5566 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5568 tree tem
5569 = optimize_minmax_comparison (loc,
5570 invert_tree_comparison (code, false),
5571 type, op0, op1);
5572 if (tem)
5573 return invert_truthvalue_loc (loc, tem);
5574 return NULL_TREE;
5577 case GE_EXPR:
5578 return
5579 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5580 optimize_minmax_comparison
5581 (loc, EQ_EXPR, type, arg0, comp_const),
5582 optimize_minmax_comparison
5583 (loc, GT_EXPR, type, arg0, comp_const));
5585 case EQ_EXPR:
5586 if (op_code == MAX_EXPR && consts_equal)
5587 /* MAX (X, 0) == 0 -> X <= 0 */
5588 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5590 else if (op_code == MAX_EXPR && consts_lt)
5591 /* MAX (X, 0) == 5 -> X == 5 */
5592 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5594 else if (op_code == MAX_EXPR)
5595 /* MAX (X, 0) == -1 -> false */
5596 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5598 else if (consts_equal)
5599 /* MIN (X, 0) == 0 -> X >= 0 */
5600 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5602 else if (consts_lt)
5603 /* MIN (X, 0) == 5 -> false */
5604 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5606 else
5607 /* MIN (X, 0) == -1 -> X == -1 */
5608 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5610 case GT_EXPR:
5611 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5612 /* MAX (X, 0) > 0 -> X > 0
5613 MAX (X, 0) > 5 -> X > 5 */
5614 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5616 else if (op_code == MAX_EXPR)
5617 /* MAX (X, 0) > -1 -> true */
5618 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5620 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5621 /* MIN (X, 0) > 0 -> false
5622 MIN (X, 0) > 5 -> false */
5623 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5625 else
5626 /* MIN (X, 0) > -1 -> X > -1 */
5627 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5629 default:
5630 return NULL_TREE;
5634 /* T is an integer expression that is being multiplied, divided, or taken a
5635 modulus (CODE says which and what kind of divide or modulus) by a
5636 constant C. See if we can eliminate that operation by folding it with
5637 other operations already in T. WIDE_TYPE, if non-null, is a type that
5638 should be used for the computation if wider than our type.
5640 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5641 (X * 2) + (Y * 4). We must, however, be assured that either the original
5642 expression would not overflow or that overflow is undefined for the type
5643 in the language in question.
5645 If we return a non-null expression, it is an equivalent form of the
5646 original computation, but need not be in the original type.
5648 We set *STRICT_OVERFLOW_P to true if the return values depends on
5649 signed overflow being undefined. Otherwise we do not change
5650 *STRICT_OVERFLOW_P. */
5652 static tree
5653 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5654 bool *strict_overflow_p)
5656 /* To avoid exponential search depth, refuse to allow recursion past
5657 three levels. Beyond that (1) it's highly unlikely that we'll find
5658 something interesting and (2) we've probably processed it before
5659 when we built the inner expression. */
5661 static int depth;
5662 tree ret;
5664 if (depth > 3)
5665 return NULL;
5667 depth++;
5668 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5669 depth--;
5671 return ret;
5674 static tree
5675 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5676 bool *strict_overflow_p)
5678 tree type = TREE_TYPE (t);
5679 enum tree_code tcode = TREE_CODE (t);
5680 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5681 > GET_MODE_SIZE (TYPE_MODE (type)))
5682 ? wide_type : type);
5683 tree t1, t2;
5684 int same_p = tcode == code;
5685 tree op0 = NULL_TREE, op1 = NULL_TREE;
5686 bool sub_strict_overflow_p;
5688 /* Don't deal with constants of zero here; they confuse the code below. */
5689 if (integer_zerop (c))
5690 return NULL_TREE;
5692 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5693 op0 = TREE_OPERAND (t, 0);
5695 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5696 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5698 /* Note that we need not handle conditional operations here since fold
5699 already handles those cases. So just do arithmetic here. */
5700 switch (tcode)
5702 case INTEGER_CST:
5703 /* For a constant, we can always simplify if we are a multiply
5704 or (for divide and modulus) if it is a multiple of our constant. */
5705 if (code == MULT_EXPR
5706 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5707 return const_binop (code, fold_convert (ctype, t),
5708 fold_convert (ctype, c));
5709 break;
5711 CASE_CONVERT: case NON_LVALUE_EXPR:
5712 /* If op0 is an expression ... */
5713 if ((COMPARISON_CLASS_P (op0)
5714 || UNARY_CLASS_P (op0)
5715 || BINARY_CLASS_P (op0)
5716 || VL_EXP_CLASS_P (op0)
5717 || EXPRESSION_CLASS_P (op0))
5718 /* ... and has wrapping overflow, and its type is smaller
5719 than ctype, then we cannot pass through as widening. */
5720 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5721 && (TYPE_PRECISION (ctype)
5722 > TYPE_PRECISION (TREE_TYPE (op0))))
5723 /* ... or this is a truncation (t is narrower than op0),
5724 then we cannot pass through this narrowing. */
5725 || (TYPE_PRECISION (type)
5726 < TYPE_PRECISION (TREE_TYPE (op0)))
5727 /* ... or signedness changes for division or modulus,
5728 then we cannot pass through this conversion. */
5729 || (code != MULT_EXPR
5730 && (TYPE_UNSIGNED (ctype)
5731 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5732 /* ... or has undefined overflow while the converted to
5733 type has not, we cannot do the operation in the inner type
5734 as that would introduce undefined overflow. */
5735 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5736 && !TYPE_OVERFLOW_UNDEFINED (type))))
5737 break;
5739 /* Pass the constant down and see if we can make a simplification. If
5740 we can, replace this expression with the inner simplification for
5741 possible later conversion to our or some other type. */
5742 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5743 && TREE_CODE (t2) == INTEGER_CST
5744 && !TREE_OVERFLOW (t2)
5745 && (0 != (t1 = extract_muldiv (op0, t2, code,
5746 code == MULT_EXPR
5747 ? ctype : NULL_TREE,
5748 strict_overflow_p))))
5749 return t1;
5750 break;
5752 case ABS_EXPR:
5753 /* If widening the type changes it from signed to unsigned, then we
5754 must avoid building ABS_EXPR itself as unsigned. */
5755 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5757 tree cstype = (*signed_type_for) (ctype);
5758 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5759 != 0)
5761 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5762 return fold_convert (ctype, t1);
5764 break;
5766 /* If the constant is negative, we cannot simplify this. */
5767 if (tree_int_cst_sgn (c) == -1)
5768 break;
5769 /* FALLTHROUGH */
5770 case NEGATE_EXPR:
5771 /* For division and modulus, type can't be unsigned, as e.g.
5772 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5773 For signed types, even with wrapping overflow, this is fine. */
5774 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5775 break;
5776 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5777 != 0)
5778 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5779 break;
5781 case MIN_EXPR: case MAX_EXPR:
5782 /* If widening the type changes the signedness, then we can't perform
5783 this optimization as that changes the result. */
5784 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5785 break;
5787 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5788 sub_strict_overflow_p = false;
5789 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5790 &sub_strict_overflow_p)) != 0
5791 && (t2 = extract_muldiv (op1, c, code, wide_type,
5792 &sub_strict_overflow_p)) != 0)
5794 if (tree_int_cst_sgn (c) < 0)
5795 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5796 if (sub_strict_overflow_p)
5797 *strict_overflow_p = true;
5798 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5799 fold_convert (ctype, t2));
5801 break;
5803 case LSHIFT_EXPR: case RSHIFT_EXPR:
5804 /* If the second operand is constant, this is a multiplication
5805 or floor division, by a power of two, so we can treat it that
5806 way unless the multiplier or divisor overflows. Signed
5807 left-shift overflow is implementation-defined rather than
5808 undefined in C90, so do not convert signed left shift into
5809 multiplication. */
5810 if (TREE_CODE (op1) == INTEGER_CST
5811 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5812 /* const_binop may not detect overflow correctly,
5813 so check for it explicitly here. */
5814 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
5815 && 0 != (t1 = fold_convert (ctype,
5816 const_binop (LSHIFT_EXPR,
5817 size_one_node,
5818 op1)))
5819 && !TREE_OVERFLOW (t1))
5820 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5821 ? MULT_EXPR : FLOOR_DIV_EXPR,
5822 ctype,
5823 fold_convert (ctype, op0),
5824 t1),
5825 c, code, wide_type, strict_overflow_p);
5826 break;
5828 case PLUS_EXPR: case MINUS_EXPR:
5829 /* See if we can eliminate the operation on both sides. If we can, we
5830 can return a new PLUS or MINUS. If we can't, the only remaining
5831 cases where we can do anything are if the second operand is a
5832 constant. */
5833 sub_strict_overflow_p = false;
5834 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5835 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5836 if (t1 != 0 && t2 != 0
5837 && (code == MULT_EXPR
5838 /* If not multiplication, we can only do this if both operands
5839 are divisible by c. */
5840 || (multiple_of_p (ctype, op0, c)
5841 && multiple_of_p (ctype, op1, c))))
5843 if (sub_strict_overflow_p)
5844 *strict_overflow_p = true;
5845 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5846 fold_convert (ctype, t2));
5849 /* If this was a subtraction, negate OP1 and set it to be an addition.
5850 This simplifies the logic below. */
5851 if (tcode == MINUS_EXPR)
5853 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5854 /* If OP1 was not easily negatable, the constant may be OP0. */
5855 if (TREE_CODE (op0) == INTEGER_CST)
5857 tree tem = op0;
5858 op0 = op1;
5859 op1 = tem;
5860 tem = t1;
5861 t1 = t2;
5862 t2 = tem;
5866 if (TREE_CODE (op1) != INTEGER_CST)
5867 break;
5869 /* If either OP1 or C are negative, this optimization is not safe for
5870 some of the division and remainder types while for others we need
5871 to change the code. */
5872 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5874 if (code == CEIL_DIV_EXPR)
5875 code = FLOOR_DIV_EXPR;
5876 else if (code == FLOOR_DIV_EXPR)
5877 code = CEIL_DIV_EXPR;
5878 else if (code != MULT_EXPR
5879 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5880 break;
5883 /* If it's a multiply or a division/modulus operation of a multiple
5884 of our constant, do the operation and verify it doesn't overflow. */
5885 if (code == MULT_EXPR
5886 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5888 op1 = const_binop (code, fold_convert (ctype, op1),
5889 fold_convert (ctype, c));
5890 /* We allow the constant to overflow with wrapping semantics. */
5891 if (op1 == 0
5892 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5893 break;
5895 else
5896 break;
5898 /* If we have an unsigned type, we cannot widen the operation since it
5899 will change the result if the original computation overflowed. */
5900 if (TYPE_UNSIGNED (ctype) && ctype != type)
5901 break;
5903 /* If we were able to eliminate our operation from the first side,
5904 apply our operation to the second side and reform the PLUS. */
5905 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5906 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5908 /* The last case is if we are a multiply. In that case, we can
5909 apply the distributive law to commute the multiply and addition
5910 if the multiplication of the constants doesn't overflow
5911 and overflow is defined. With undefined overflow
5912 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5913 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5914 return fold_build2 (tcode, ctype,
5915 fold_build2 (code, ctype,
5916 fold_convert (ctype, op0),
5917 fold_convert (ctype, c)),
5918 op1);
5920 break;
5922 case MULT_EXPR:
5923 /* We have a special case here if we are doing something like
5924 (C * 8) % 4 since we know that's zero. */
5925 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5926 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5927 /* If the multiplication can overflow we cannot optimize this. */
5928 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5929 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5930 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5932 *strict_overflow_p = true;
5933 return omit_one_operand (type, integer_zero_node, op0);
5936 /* ... fall through ... */
5938 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5939 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5940 /* If we can extract our operation from the LHS, do so and return a
5941 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5942 do something only if the second operand is a constant. */
5943 if (same_p
5944 && (t1 = extract_muldiv (op0, c, code, wide_type,
5945 strict_overflow_p)) != 0)
5946 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5947 fold_convert (ctype, op1));
5948 else if (tcode == MULT_EXPR && code == MULT_EXPR
5949 && (t1 = extract_muldiv (op1, c, code, wide_type,
5950 strict_overflow_p)) != 0)
5951 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5952 fold_convert (ctype, t1));
5953 else if (TREE_CODE (op1) != INTEGER_CST)
5954 return 0;
5956 /* If these are the same operation types, we can associate them
5957 assuming no overflow. */
5958 if (tcode == code)
5960 bool overflow_p = false;
5961 bool overflow_mul_p;
5962 signop sign = TYPE_SIGN (ctype);
5963 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
5964 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
5965 if (overflow_mul_p
5966 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
5967 overflow_p = true;
5968 if (!overflow_p)
5969 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5970 wide_int_to_tree (ctype, mul));
5973 /* If these operations "cancel" each other, we have the main
5974 optimizations of this pass, which occur when either constant is a
5975 multiple of the other, in which case we replace this with either an
5976 operation or CODE or TCODE.
5978 If we have an unsigned type, we cannot do this since it will change
5979 the result if the original computation overflowed. */
5980 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5981 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5982 || (tcode == MULT_EXPR
5983 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5984 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5985 && code != MULT_EXPR)))
5987 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
5989 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5990 *strict_overflow_p = true;
5991 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5992 fold_convert (ctype,
5993 const_binop (TRUNC_DIV_EXPR,
5994 op1, c)));
5996 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
5998 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5999 *strict_overflow_p = true;
6000 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6001 fold_convert (ctype,
6002 const_binop (TRUNC_DIV_EXPR,
6003 c, op1)));
6006 break;
6008 default:
6009 break;
6012 return 0;
6015 /* Return a node which has the indicated constant VALUE (either 0 or
6016 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6017 and is of the indicated TYPE. */
6019 tree
6020 constant_boolean_node (bool value, tree type)
6022 if (type == integer_type_node)
6023 return value ? integer_one_node : integer_zero_node;
6024 else if (type == boolean_type_node)
6025 return value ? boolean_true_node : boolean_false_node;
6026 else if (TREE_CODE (type) == VECTOR_TYPE)
6027 return build_vector_from_val (type,
6028 build_int_cst (TREE_TYPE (type),
6029 value ? -1 : 0));
6030 else
6031 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6035 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6036 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6037 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6038 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6039 COND is the first argument to CODE; otherwise (as in the example
6040 given here), it is the second argument. TYPE is the type of the
6041 original expression. Return NULL_TREE if no simplification is
6042 possible. */
6044 static tree
6045 fold_binary_op_with_conditional_arg (location_t loc,
6046 enum tree_code code,
6047 tree type, tree op0, tree op1,
6048 tree cond, tree arg, int cond_first_p)
6050 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6051 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6052 tree test, true_value, false_value;
6053 tree lhs = NULL_TREE;
6054 tree rhs = NULL_TREE;
6055 enum tree_code cond_code = COND_EXPR;
6057 if (TREE_CODE (cond) == COND_EXPR
6058 || TREE_CODE (cond) == VEC_COND_EXPR)
6060 test = TREE_OPERAND (cond, 0);
6061 true_value = TREE_OPERAND (cond, 1);
6062 false_value = TREE_OPERAND (cond, 2);
6063 /* If this operand throws an expression, then it does not make
6064 sense to try to perform a logical or arithmetic operation
6065 involving it. */
6066 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6067 lhs = true_value;
6068 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6069 rhs = false_value;
6071 else
6073 tree testtype = TREE_TYPE (cond);
6074 test = cond;
6075 true_value = constant_boolean_node (true, testtype);
6076 false_value = constant_boolean_node (false, testtype);
6079 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6080 cond_code = VEC_COND_EXPR;
6082 /* This transformation is only worthwhile if we don't have to wrap ARG
6083 in a SAVE_EXPR and the operation can be simplified without recursing
6084 on at least one of the branches once its pushed inside the COND_EXPR. */
6085 if (!TREE_CONSTANT (arg)
6086 && (TREE_SIDE_EFFECTS (arg)
6087 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6088 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6089 return NULL_TREE;
6091 arg = fold_convert_loc (loc, arg_type, arg);
6092 if (lhs == 0)
6094 true_value = fold_convert_loc (loc, cond_type, true_value);
6095 if (cond_first_p)
6096 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6097 else
6098 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6100 if (rhs == 0)
6102 false_value = fold_convert_loc (loc, cond_type, false_value);
6103 if (cond_first_p)
6104 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6105 else
6106 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6109 /* Check that we have simplified at least one of the branches. */
6110 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6111 return NULL_TREE;
6113 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6117 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6119 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6120 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6121 ADDEND is the same as X.
6123 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6124 and finite. The problematic cases are when X is zero, and its mode
6125 has signed zeros. In the case of rounding towards -infinity,
6126 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6127 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6129 bool
6130 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6132 if (!real_zerop (addend))
6133 return false;
6135 /* Don't allow the fold with -fsignaling-nans. */
6136 if (HONOR_SNANS (TYPE_MODE (type)))
6137 return false;
6139 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6140 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6141 return true;
6143 /* In a vector or complex, we would need to check the sign of all zeros. */
6144 if (TREE_CODE (addend) != REAL_CST)
6145 return false;
6147 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6148 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6149 negate = !negate;
6151 /* The mode has signed zeros, and we have to honor their sign.
6152 In this situation, there is only one case we can return true for.
6153 X - 0 is the same as X unless rounding towards -infinity is
6154 supported. */
6155 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6158 /* Subroutine of fold() that checks comparisons of built-in math
6159 functions against real constants.
6161 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6162 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6163 is the type of the result and ARG0 and ARG1 are the operands of the
6164 comparison. ARG1 must be a TREE_REAL_CST.
6166 The function returns the constant folded tree if a simplification
6167 can be made, and NULL_TREE otherwise. */
6169 static tree
6170 fold_mathfn_compare (location_t loc,
6171 enum built_in_function fcode, enum tree_code code,
6172 tree type, tree arg0, tree arg1)
6174 REAL_VALUE_TYPE c;
6176 if (BUILTIN_SQRT_P (fcode))
6178 tree arg = CALL_EXPR_ARG (arg0, 0);
6179 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6181 c = TREE_REAL_CST (arg1);
6182 if (REAL_VALUE_NEGATIVE (c))
6184 /* sqrt(x) < y is always false, if y is negative. */
6185 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6186 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6188 /* sqrt(x) > y is always true, if y is negative and we
6189 don't care about NaNs, i.e. negative values of x. */
6190 if (code == NE_EXPR || !HONOR_NANS (mode))
6191 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6193 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6194 return fold_build2_loc (loc, GE_EXPR, type, arg,
6195 build_real (TREE_TYPE (arg), dconst0));
6197 else if (code == GT_EXPR || code == GE_EXPR)
6199 REAL_VALUE_TYPE c2;
6201 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6202 real_convert (&c2, mode, &c2);
6204 if (REAL_VALUE_ISINF (c2))
6206 /* sqrt(x) > y is x == +Inf, when y is very large. */
6207 if (HONOR_INFINITIES (mode))
6208 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6209 build_real (TREE_TYPE (arg), c2));
6211 /* sqrt(x) > y is always false, when y is very large
6212 and we don't care about infinities. */
6213 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6216 /* sqrt(x) > c is the same as x > c*c. */
6217 return fold_build2_loc (loc, code, type, arg,
6218 build_real (TREE_TYPE (arg), c2));
6220 else if (code == LT_EXPR || code == LE_EXPR)
6222 REAL_VALUE_TYPE c2;
6224 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6225 real_convert (&c2, mode, &c2);
6227 if (REAL_VALUE_ISINF (c2))
6229 /* sqrt(x) < y is always true, when y is a very large
6230 value and we don't care about NaNs or Infinities. */
6231 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6232 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6234 /* sqrt(x) < y is x != +Inf when y is very large and we
6235 don't care about NaNs. */
6236 if (! HONOR_NANS (mode))
6237 return fold_build2_loc (loc, NE_EXPR, type, arg,
6238 build_real (TREE_TYPE (arg), c2));
6240 /* sqrt(x) < y is x >= 0 when y is very large and we
6241 don't care about Infinities. */
6242 if (! HONOR_INFINITIES (mode))
6243 return fold_build2_loc (loc, GE_EXPR, type, arg,
6244 build_real (TREE_TYPE (arg), dconst0));
6246 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6247 arg = save_expr (arg);
6248 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6249 fold_build2_loc (loc, GE_EXPR, type, arg,
6250 build_real (TREE_TYPE (arg),
6251 dconst0)),
6252 fold_build2_loc (loc, NE_EXPR, type, arg,
6253 build_real (TREE_TYPE (arg),
6254 c2)));
6257 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6258 if (! HONOR_NANS (mode))
6259 return fold_build2_loc (loc, code, type, arg,
6260 build_real (TREE_TYPE (arg), c2));
6262 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6263 arg = save_expr (arg);
6264 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6265 fold_build2_loc (loc, GE_EXPR, type, arg,
6266 build_real (TREE_TYPE (arg),
6267 dconst0)),
6268 fold_build2_loc (loc, code, type, arg,
6269 build_real (TREE_TYPE (arg),
6270 c2)));
6274 return NULL_TREE;
6277 /* Subroutine of fold() that optimizes comparisons against Infinities,
6278 either +Inf or -Inf.
6280 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6281 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6282 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6284 The function returns the constant folded tree if a simplification
6285 can be made, and NULL_TREE otherwise. */
6287 static tree
6288 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6289 tree arg0, tree arg1)
6291 enum machine_mode mode;
6292 REAL_VALUE_TYPE max;
6293 tree temp;
6294 bool neg;
6296 mode = TYPE_MODE (TREE_TYPE (arg0));
6298 /* For negative infinity swap the sense of the comparison. */
6299 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6300 if (neg)
6301 code = swap_tree_comparison (code);
6303 switch (code)
6305 case GT_EXPR:
6306 /* x > +Inf is always false, if with ignore sNANs. */
6307 if (HONOR_SNANS (mode))
6308 return NULL_TREE;
6309 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6311 case LE_EXPR:
6312 /* x <= +Inf is always true, if we don't case about NaNs. */
6313 if (! HONOR_NANS (mode))
6314 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6316 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6317 arg0 = save_expr (arg0);
6318 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6320 case EQ_EXPR:
6321 case GE_EXPR:
6322 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6323 real_maxval (&max, neg, mode);
6324 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6325 arg0, build_real (TREE_TYPE (arg0), max));
6327 case LT_EXPR:
6328 /* x < +Inf is always equal to x <= DBL_MAX. */
6329 real_maxval (&max, neg, mode);
6330 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6331 arg0, build_real (TREE_TYPE (arg0), max));
6333 case NE_EXPR:
6334 /* x != +Inf is always equal to !(x > DBL_MAX). */
6335 real_maxval (&max, neg, mode);
6336 if (! HONOR_NANS (mode))
6337 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6338 arg0, build_real (TREE_TYPE (arg0), max));
6340 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6341 arg0, build_real (TREE_TYPE (arg0), max));
6342 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6344 default:
6345 break;
6348 return NULL_TREE;
6351 /* Subroutine of fold() that optimizes comparisons of a division by
6352 a nonzero integer constant against an integer constant, i.e.
6353 X/C1 op C2.
6355 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6356 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6357 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6359 The function returns the constant folded tree if a simplification
6360 can be made, and NULL_TREE otherwise. */
6362 static tree
6363 fold_div_compare (location_t loc,
6364 enum tree_code code, tree type, tree arg0, tree arg1)
6366 tree prod, tmp, hi, lo;
6367 tree arg00 = TREE_OPERAND (arg0, 0);
6368 tree arg01 = TREE_OPERAND (arg0, 1);
6369 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6370 bool neg_overflow = false;
6371 bool overflow;
6373 /* We have to do this the hard way to detect unsigned overflow.
6374 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6375 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6376 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6377 neg_overflow = false;
6379 if (sign == UNSIGNED)
6381 tmp = int_const_binop (MINUS_EXPR, arg01,
6382 build_int_cst (TREE_TYPE (arg01), 1));
6383 lo = prod;
6385 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6386 val = wi::add (prod, tmp, sign, &overflow);
6387 hi = force_fit_type (TREE_TYPE (arg00), val,
6388 -1, overflow | TREE_OVERFLOW (prod));
6390 else if (tree_int_cst_sgn (arg01) >= 0)
6392 tmp = int_const_binop (MINUS_EXPR, arg01,
6393 build_int_cst (TREE_TYPE (arg01), 1));
6394 switch (tree_int_cst_sgn (arg1))
6396 case -1:
6397 neg_overflow = true;
6398 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6399 hi = prod;
6400 break;
6402 case 0:
6403 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6404 hi = tmp;
6405 break;
6407 case 1:
6408 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6409 lo = prod;
6410 break;
6412 default:
6413 gcc_unreachable ();
6416 else
6418 /* A negative divisor reverses the relational operators. */
6419 code = swap_tree_comparison (code);
6421 tmp = int_const_binop (PLUS_EXPR, arg01,
6422 build_int_cst (TREE_TYPE (arg01), 1));
6423 switch (tree_int_cst_sgn (arg1))
6425 case -1:
6426 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6427 lo = prod;
6428 break;
6430 case 0:
6431 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6432 lo = tmp;
6433 break;
6435 case 1:
6436 neg_overflow = true;
6437 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6438 hi = prod;
6439 break;
6441 default:
6442 gcc_unreachable ();
6446 switch (code)
6448 case EQ_EXPR:
6449 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6450 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6451 if (TREE_OVERFLOW (hi))
6452 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6453 if (TREE_OVERFLOW (lo))
6454 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6455 return build_range_check (loc, type, arg00, 1, lo, hi);
6457 case NE_EXPR:
6458 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6459 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6460 if (TREE_OVERFLOW (hi))
6461 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6462 if (TREE_OVERFLOW (lo))
6463 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6464 return build_range_check (loc, type, arg00, 0, lo, hi);
6466 case LT_EXPR:
6467 if (TREE_OVERFLOW (lo))
6469 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6470 return omit_one_operand_loc (loc, type, tmp, arg00);
6472 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6474 case LE_EXPR:
6475 if (TREE_OVERFLOW (hi))
6477 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6478 return omit_one_operand_loc (loc, type, tmp, arg00);
6480 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6482 case GT_EXPR:
6483 if (TREE_OVERFLOW (hi))
6485 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6486 return omit_one_operand_loc (loc, type, tmp, arg00);
6488 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6490 case GE_EXPR:
6491 if (TREE_OVERFLOW (lo))
6493 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6494 return omit_one_operand_loc (loc, type, tmp, arg00);
6496 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6498 default:
6499 break;
6502 return NULL_TREE;
6506 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6507 equality/inequality test, then return a simplified form of the test
6508 using a sign testing. Otherwise return NULL. TYPE is the desired
6509 result type. */
6511 static tree
6512 fold_single_bit_test_into_sign_test (location_t loc,
6513 enum tree_code code, tree arg0, tree arg1,
6514 tree result_type)
6516 /* If this is testing a single bit, we can optimize the test. */
6517 if ((code == NE_EXPR || code == EQ_EXPR)
6518 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6519 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6521 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6522 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6523 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6525 if (arg00 != NULL_TREE
6526 /* This is only a win if casting to a signed type is cheap,
6527 i.e. when arg00's type is not a partial mode. */
6528 && TYPE_PRECISION (TREE_TYPE (arg00))
6529 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6531 tree stype = signed_type_for (TREE_TYPE (arg00));
6532 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6533 result_type,
6534 fold_convert_loc (loc, stype, arg00),
6535 build_int_cst (stype, 0));
6539 return NULL_TREE;
6542 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6543 equality/inequality test, then return a simplified form of
6544 the test using shifts and logical operations. Otherwise return
6545 NULL. TYPE is the desired result type. */
6547 tree
6548 fold_single_bit_test (location_t loc, enum tree_code code,
6549 tree arg0, tree arg1, tree result_type)
6551 /* If this is testing a single bit, we can optimize the test. */
6552 if ((code == NE_EXPR || code == EQ_EXPR)
6553 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6554 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6556 tree inner = TREE_OPERAND (arg0, 0);
6557 tree type = TREE_TYPE (arg0);
6558 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6559 enum machine_mode operand_mode = TYPE_MODE (type);
6560 int ops_unsigned;
6561 tree signed_type, unsigned_type, intermediate_type;
6562 tree tem, one;
6564 /* First, see if we can fold the single bit test into a sign-bit
6565 test. */
6566 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6567 result_type);
6568 if (tem)
6569 return tem;
6571 /* Otherwise we have (A & C) != 0 where C is a single bit,
6572 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6573 Similarly for (A & C) == 0. */
6575 /* If INNER is a right shift of a constant and it plus BITNUM does
6576 not overflow, adjust BITNUM and INNER. */
6577 if (TREE_CODE (inner) == RSHIFT_EXPR
6578 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6579 && bitnum < TYPE_PRECISION (type)
6580 && wi::ltu_p (TREE_OPERAND (inner, 1),
6581 TYPE_PRECISION (type) - bitnum))
6583 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6584 inner = TREE_OPERAND (inner, 0);
6587 /* If we are going to be able to omit the AND below, we must do our
6588 operations as unsigned. If we must use the AND, we have a choice.
6589 Normally unsigned is faster, but for some machines signed is. */
6590 #ifdef LOAD_EXTEND_OP
6591 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6592 && !flag_syntax_only) ? 0 : 1;
6593 #else
6594 ops_unsigned = 1;
6595 #endif
6597 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6598 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6599 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6600 inner = fold_convert_loc (loc, intermediate_type, inner);
6602 if (bitnum != 0)
6603 inner = build2 (RSHIFT_EXPR, intermediate_type,
6604 inner, size_int (bitnum));
6606 one = build_int_cst (intermediate_type, 1);
6608 if (code == EQ_EXPR)
6609 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6611 /* Put the AND last so it can combine with more things. */
6612 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6614 /* Make sure to return the proper type. */
6615 inner = fold_convert_loc (loc, result_type, inner);
6617 return inner;
6619 return NULL_TREE;
6622 /* Check whether we are allowed to reorder operands arg0 and arg1,
6623 such that the evaluation of arg1 occurs before arg0. */
6625 static bool
6626 reorder_operands_p (const_tree arg0, const_tree arg1)
6628 if (! flag_evaluation_order)
6629 return true;
6630 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6631 return true;
6632 return ! TREE_SIDE_EFFECTS (arg0)
6633 && ! TREE_SIDE_EFFECTS (arg1);
6636 /* Test whether it is preferable two swap two operands, ARG0 and
6637 ARG1, for example because ARG0 is an integer constant and ARG1
6638 isn't. If REORDER is true, only recommend swapping if we can
6639 evaluate the operands in reverse order. */
6641 bool
6642 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6644 STRIP_SIGN_NOPS (arg0);
6645 STRIP_SIGN_NOPS (arg1);
6647 if (TREE_CODE (arg1) == INTEGER_CST)
6648 return 0;
6649 if (TREE_CODE (arg0) == INTEGER_CST)
6650 return 1;
6652 if (TREE_CODE (arg1) == REAL_CST)
6653 return 0;
6654 if (TREE_CODE (arg0) == REAL_CST)
6655 return 1;
6657 if (TREE_CODE (arg1) == FIXED_CST)
6658 return 0;
6659 if (TREE_CODE (arg0) == FIXED_CST)
6660 return 1;
6662 if (TREE_CODE (arg1) == COMPLEX_CST)
6663 return 0;
6664 if (TREE_CODE (arg0) == COMPLEX_CST)
6665 return 1;
6667 if (TREE_CONSTANT (arg1))
6668 return 0;
6669 if (TREE_CONSTANT (arg0))
6670 return 1;
6672 if (optimize_function_for_size_p (cfun))
6673 return 0;
6675 if (reorder && flag_evaluation_order
6676 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6677 return 0;
6679 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6680 for commutative and comparison operators. Ensuring a canonical
6681 form allows the optimizers to find additional redundancies without
6682 having to explicitly check for both orderings. */
6683 if (TREE_CODE (arg0) == SSA_NAME
6684 && TREE_CODE (arg1) == SSA_NAME
6685 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6686 return 1;
6688 /* Put SSA_NAMEs last. */
6689 if (TREE_CODE (arg1) == SSA_NAME)
6690 return 0;
6691 if (TREE_CODE (arg0) == SSA_NAME)
6692 return 1;
6694 /* Put variables last. */
6695 if (DECL_P (arg1))
6696 return 0;
6697 if (DECL_P (arg0))
6698 return 1;
6700 return 0;
6703 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6704 ARG0 is extended to a wider type. */
6706 static tree
6707 fold_widened_comparison (location_t loc, enum tree_code code,
6708 tree type, tree arg0, tree arg1)
6710 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6711 tree arg1_unw;
6712 tree shorter_type, outer_type;
6713 tree min, max;
6714 bool above, below;
6716 if (arg0_unw == arg0)
6717 return NULL_TREE;
6718 shorter_type = TREE_TYPE (arg0_unw);
6720 #ifdef HAVE_canonicalize_funcptr_for_compare
6721 /* Disable this optimization if we're casting a function pointer
6722 type on targets that require function pointer canonicalization. */
6723 if (HAVE_canonicalize_funcptr_for_compare
6724 && TREE_CODE (shorter_type) == POINTER_TYPE
6725 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6726 return NULL_TREE;
6727 #endif
6729 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6730 return NULL_TREE;
6732 arg1_unw = get_unwidened (arg1, NULL_TREE);
6734 /* If possible, express the comparison in the shorter mode. */
6735 if ((code == EQ_EXPR || code == NE_EXPR
6736 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6737 && (TREE_TYPE (arg1_unw) == shorter_type
6738 || ((TYPE_PRECISION (shorter_type)
6739 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6740 && (TYPE_UNSIGNED (shorter_type)
6741 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6742 || (TREE_CODE (arg1_unw) == INTEGER_CST
6743 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6744 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6745 && int_fits_type_p (arg1_unw, shorter_type))))
6746 return fold_build2_loc (loc, code, type, arg0_unw,
6747 fold_convert_loc (loc, shorter_type, arg1_unw));
6749 if (TREE_CODE (arg1_unw) != INTEGER_CST
6750 || TREE_CODE (shorter_type) != INTEGER_TYPE
6751 || !int_fits_type_p (arg1_unw, shorter_type))
6752 return NULL_TREE;
6754 /* If we are comparing with the integer that does not fit into the range
6755 of the shorter type, the result is known. */
6756 outer_type = TREE_TYPE (arg1_unw);
6757 min = lower_bound_in_type (outer_type, shorter_type);
6758 max = upper_bound_in_type (outer_type, shorter_type);
6760 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6761 max, arg1_unw));
6762 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6763 arg1_unw, min));
6765 switch (code)
6767 case EQ_EXPR:
6768 if (above || below)
6769 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6770 break;
6772 case NE_EXPR:
6773 if (above || below)
6774 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6775 break;
6777 case LT_EXPR:
6778 case LE_EXPR:
6779 if (above)
6780 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6781 else if (below)
6782 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6784 case GT_EXPR:
6785 case GE_EXPR:
6786 if (above)
6787 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6788 else if (below)
6789 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6791 default:
6792 break;
6795 return NULL_TREE;
6798 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6799 ARG0 just the signedness is changed. */
6801 static tree
6802 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6803 tree arg0, tree arg1)
6805 tree arg0_inner;
6806 tree inner_type, outer_type;
6808 if (!CONVERT_EXPR_P (arg0))
6809 return NULL_TREE;
6811 outer_type = TREE_TYPE (arg0);
6812 arg0_inner = TREE_OPERAND (arg0, 0);
6813 inner_type = TREE_TYPE (arg0_inner);
6815 #ifdef HAVE_canonicalize_funcptr_for_compare
6816 /* Disable this optimization if we're casting a function pointer
6817 type on targets that require function pointer canonicalization. */
6818 if (HAVE_canonicalize_funcptr_for_compare
6819 && TREE_CODE (inner_type) == POINTER_TYPE
6820 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6821 return NULL_TREE;
6822 #endif
6824 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6825 return NULL_TREE;
6827 if (TREE_CODE (arg1) != INTEGER_CST
6828 && !(CONVERT_EXPR_P (arg1)
6829 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6830 return NULL_TREE;
6832 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6833 && code != NE_EXPR
6834 && code != EQ_EXPR)
6835 return NULL_TREE;
6837 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6838 return NULL_TREE;
6840 if (TREE_CODE (arg1) == INTEGER_CST)
6841 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6842 TREE_OVERFLOW (arg1));
6843 else
6844 arg1 = fold_convert_loc (loc, inner_type, arg1);
6846 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6849 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6850 step of the array. Reconstructs s and delta in the case of s *
6851 delta being an integer constant (and thus already folded). ADDR is
6852 the address. MULT is the multiplicative expression. If the
6853 function succeeds, the new address expression is returned.
6854 Otherwise NULL_TREE is returned. LOC is the location of the
6855 resulting expression. */
6857 static tree
6858 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6860 tree s, delta, step;
6861 tree ref = TREE_OPERAND (addr, 0), pref;
6862 tree ret, pos;
6863 tree itype;
6864 bool mdim = false;
6866 /* Strip the nops that might be added when converting op1 to sizetype. */
6867 STRIP_NOPS (op1);
6869 /* Canonicalize op1 into a possibly non-constant delta
6870 and an INTEGER_CST s. */
6871 if (TREE_CODE (op1) == MULT_EXPR)
6873 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6875 STRIP_NOPS (arg0);
6876 STRIP_NOPS (arg1);
6878 if (TREE_CODE (arg0) == INTEGER_CST)
6880 s = arg0;
6881 delta = arg1;
6883 else if (TREE_CODE (arg1) == INTEGER_CST)
6885 s = arg1;
6886 delta = arg0;
6888 else
6889 return NULL_TREE;
6891 else if (TREE_CODE (op1) == INTEGER_CST)
6893 delta = op1;
6894 s = NULL_TREE;
6896 else
6898 /* Simulate we are delta * 1. */
6899 delta = op1;
6900 s = integer_one_node;
6903 /* Handle &x.array the same as we would handle &x.array[0]. */
6904 if (TREE_CODE (ref) == COMPONENT_REF
6905 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6907 tree domain;
6909 /* Remember if this was a multi-dimensional array. */
6910 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6911 mdim = true;
6913 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6914 if (! domain)
6915 goto cont;
6916 itype = TREE_TYPE (domain);
6918 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6919 if (TREE_CODE (step) != INTEGER_CST)
6920 goto cont;
6922 if (s)
6924 if (! tree_int_cst_equal (step, s))
6925 goto cont;
6927 else
6929 /* Try if delta is a multiple of step. */
6930 tree tmp = div_if_zero_remainder (op1, step);
6931 if (! tmp)
6932 goto cont;
6933 delta = tmp;
6936 /* Only fold here if we can verify we do not overflow one
6937 dimension of a multi-dimensional array. */
6938 if (mdim)
6940 tree tmp;
6942 if (!TYPE_MIN_VALUE (domain)
6943 || !TYPE_MAX_VALUE (domain)
6944 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6945 goto cont;
6947 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6948 fold_convert_loc (loc, itype,
6949 TYPE_MIN_VALUE (domain)),
6950 fold_convert_loc (loc, itype, delta));
6951 if (TREE_CODE (tmp) != INTEGER_CST
6952 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6953 goto cont;
6956 /* We found a suitable component reference. */
6958 pref = TREE_OPERAND (addr, 0);
6959 ret = copy_node (pref);
6960 SET_EXPR_LOCATION (ret, loc);
6962 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6963 fold_build2_loc
6964 (loc, PLUS_EXPR, itype,
6965 fold_convert_loc (loc, itype,
6966 TYPE_MIN_VALUE
6967 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6968 fold_convert_loc (loc, itype, delta)),
6969 NULL_TREE, NULL_TREE);
6970 return build_fold_addr_expr_loc (loc, ret);
6973 cont:
6975 for (;; ref = TREE_OPERAND (ref, 0))
6977 if (TREE_CODE (ref) == ARRAY_REF)
6979 tree domain;
6981 /* Remember if this was a multi-dimensional array. */
6982 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6983 mdim = true;
6985 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6986 if (! domain)
6987 continue;
6988 itype = TREE_TYPE (domain);
6990 step = array_ref_element_size (ref);
6991 if (TREE_CODE (step) != INTEGER_CST)
6992 continue;
6994 if (s)
6996 if (! tree_int_cst_equal (step, s))
6997 continue;
6999 else
7001 /* Try if delta is a multiple of step. */
7002 tree tmp = div_if_zero_remainder (op1, step);
7003 if (! tmp)
7004 continue;
7005 delta = tmp;
7008 /* Only fold here if we can verify we do not overflow one
7009 dimension of a multi-dimensional array. */
7010 if (mdim)
7012 tree tmp;
7014 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7015 || !TYPE_MAX_VALUE (domain)
7016 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7017 continue;
7019 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7020 fold_convert_loc (loc, itype,
7021 TREE_OPERAND (ref, 1)),
7022 fold_convert_loc (loc, itype, delta));
7023 if (!tmp
7024 || TREE_CODE (tmp) != INTEGER_CST
7025 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7026 continue;
7029 break;
7031 else
7032 mdim = false;
7034 if (!handled_component_p (ref))
7035 return NULL_TREE;
7038 /* We found the suitable array reference. So copy everything up to it,
7039 and replace the index. */
7041 pref = TREE_OPERAND (addr, 0);
7042 ret = copy_node (pref);
7043 SET_EXPR_LOCATION (ret, loc);
7044 pos = ret;
7046 while (pref != ref)
7048 pref = TREE_OPERAND (pref, 0);
7049 TREE_OPERAND (pos, 0) = copy_node (pref);
7050 pos = TREE_OPERAND (pos, 0);
7053 TREE_OPERAND (pos, 1)
7054 = fold_build2_loc (loc, PLUS_EXPR, itype,
7055 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7056 fold_convert_loc (loc, itype, delta));
7057 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7061 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7062 means A >= Y && A != MAX, but in this case we know that
7063 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7065 static tree
7066 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7068 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7070 if (TREE_CODE (bound) == LT_EXPR)
7071 a = TREE_OPERAND (bound, 0);
7072 else if (TREE_CODE (bound) == GT_EXPR)
7073 a = TREE_OPERAND (bound, 1);
7074 else
7075 return NULL_TREE;
7077 typea = TREE_TYPE (a);
7078 if (!INTEGRAL_TYPE_P (typea)
7079 && !POINTER_TYPE_P (typea))
7080 return NULL_TREE;
7082 if (TREE_CODE (ineq) == LT_EXPR)
7084 a1 = TREE_OPERAND (ineq, 1);
7085 y = TREE_OPERAND (ineq, 0);
7087 else if (TREE_CODE (ineq) == GT_EXPR)
7089 a1 = TREE_OPERAND (ineq, 0);
7090 y = TREE_OPERAND (ineq, 1);
7092 else
7093 return NULL_TREE;
7095 if (TREE_TYPE (a1) != typea)
7096 return NULL_TREE;
7098 if (POINTER_TYPE_P (typea))
7100 /* Convert the pointer types into integer before taking the difference. */
7101 tree ta = fold_convert_loc (loc, ssizetype, a);
7102 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7103 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7105 else
7106 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7108 if (!diff || !integer_onep (diff))
7109 return NULL_TREE;
7111 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7114 /* Fold a sum or difference of at least one multiplication.
7115 Returns the folded tree or NULL if no simplification could be made. */
7117 static tree
7118 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7119 tree arg0, tree arg1)
7121 tree arg00, arg01, arg10, arg11;
7122 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7124 /* (A * C) +- (B * C) -> (A+-B) * C.
7125 (A * C) +- A -> A * (C+-1).
7126 We are most concerned about the case where C is a constant,
7127 but other combinations show up during loop reduction. Since
7128 it is not difficult, try all four possibilities. */
7130 if (TREE_CODE (arg0) == MULT_EXPR)
7132 arg00 = TREE_OPERAND (arg0, 0);
7133 arg01 = TREE_OPERAND (arg0, 1);
7135 else if (TREE_CODE (arg0) == INTEGER_CST)
7137 arg00 = build_one_cst (type);
7138 arg01 = arg0;
7140 else
7142 /* We cannot generate constant 1 for fract. */
7143 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7144 return NULL_TREE;
7145 arg00 = arg0;
7146 arg01 = build_one_cst (type);
7148 if (TREE_CODE (arg1) == MULT_EXPR)
7150 arg10 = TREE_OPERAND (arg1, 0);
7151 arg11 = TREE_OPERAND (arg1, 1);
7153 else if (TREE_CODE (arg1) == INTEGER_CST)
7155 arg10 = build_one_cst (type);
7156 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7157 the purpose of this canonicalization. */
7158 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7159 && negate_expr_p (arg1)
7160 && code == PLUS_EXPR)
7162 arg11 = negate_expr (arg1);
7163 code = MINUS_EXPR;
7165 else
7166 arg11 = arg1;
7168 else
7170 /* We cannot generate constant 1 for fract. */
7171 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7172 return NULL_TREE;
7173 arg10 = arg1;
7174 arg11 = build_one_cst (type);
7176 same = NULL_TREE;
7178 if (operand_equal_p (arg01, arg11, 0))
7179 same = arg01, alt0 = arg00, alt1 = arg10;
7180 else if (operand_equal_p (arg00, arg10, 0))
7181 same = arg00, alt0 = arg01, alt1 = arg11;
7182 else if (operand_equal_p (arg00, arg11, 0))
7183 same = arg00, alt0 = arg01, alt1 = arg10;
7184 else if (operand_equal_p (arg01, arg10, 0))
7185 same = arg01, alt0 = arg00, alt1 = arg11;
7187 /* No identical multiplicands; see if we can find a common
7188 power-of-two factor in non-power-of-two multiplies. This
7189 can help in multi-dimensional array access. */
7190 else if (tree_fits_shwi_p (arg01)
7191 && tree_fits_shwi_p (arg11))
7193 HOST_WIDE_INT int01, int11, tmp;
7194 bool swap = false;
7195 tree maybe_same;
7196 int01 = tree_to_shwi (arg01);
7197 int11 = tree_to_shwi (arg11);
7199 /* Move min of absolute values to int11. */
7200 if (absu_hwi (int01) < absu_hwi (int11))
7202 tmp = int01, int01 = int11, int11 = tmp;
7203 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7204 maybe_same = arg01;
7205 swap = true;
7207 else
7208 maybe_same = arg11;
7210 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7211 /* The remainder should not be a constant, otherwise we
7212 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7213 increased the number of multiplications necessary. */
7214 && TREE_CODE (arg10) != INTEGER_CST)
7216 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7217 build_int_cst (TREE_TYPE (arg00),
7218 int01 / int11));
7219 alt1 = arg10;
7220 same = maybe_same;
7221 if (swap)
7222 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7226 if (same)
7227 return fold_build2_loc (loc, MULT_EXPR, type,
7228 fold_build2_loc (loc, code, type,
7229 fold_convert_loc (loc, type, alt0),
7230 fold_convert_loc (loc, type, alt1)),
7231 fold_convert_loc (loc, type, same));
7233 return NULL_TREE;
7236 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7237 specified by EXPR into the buffer PTR of length LEN bytes.
7238 Return the number of bytes placed in the buffer, or zero
7239 upon failure. */
7241 static int
7242 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7244 tree type = TREE_TYPE (expr);
7245 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7246 int byte, offset, word, words;
7247 unsigned char value;
7249 if (total_bytes > len)
7250 return 0;
7251 words = total_bytes / UNITS_PER_WORD;
7253 for (byte = 0; byte < total_bytes; byte++)
7255 int bitpos = byte * BITS_PER_UNIT;
7256 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7257 number of bytes. */
7258 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7260 if (total_bytes > UNITS_PER_WORD)
7262 word = byte / UNITS_PER_WORD;
7263 if (WORDS_BIG_ENDIAN)
7264 word = (words - 1) - word;
7265 offset = word * UNITS_PER_WORD;
7266 if (BYTES_BIG_ENDIAN)
7267 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7268 else
7269 offset += byte % UNITS_PER_WORD;
7271 else
7272 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7273 ptr[offset] = value;
7275 return total_bytes;
7279 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7280 specified by EXPR into the buffer PTR of length LEN bytes.
7281 Return the number of bytes placed in the buffer, or zero
7282 upon failure. */
7284 static int
7285 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7287 tree type = TREE_TYPE (expr);
7288 enum machine_mode mode = TYPE_MODE (type);
7289 int total_bytes = GET_MODE_SIZE (mode);
7290 FIXED_VALUE_TYPE value;
7291 tree i_value, i_type;
7293 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7294 return 0;
7296 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7298 if (NULL_TREE == i_type
7299 || TYPE_PRECISION (i_type) != total_bytes)
7300 return 0;
7302 value = TREE_FIXED_CST (expr);
7303 i_value = double_int_to_tree (i_type, value.data);
7305 return native_encode_int (i_value, ptr, len);
7309 /* Subroutine of native_encode_expr. Encode the REAL_CST
7310 specified by EXPR into the buffer PTR of length LEN bytes.
7311 Return the number of bytes placed in the buffer, or zero
7312 upon failure. */
7314 static int
7315 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7317 tree type = TREE_TYPE (expr);
7318 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7319 int byte, offset, word, words, bitpos;
7320 unsigned char value;
7322 /* There are always 32 bits in each long, no matter the size of
7323 the hosts long. We handle floating point representations with
7324 up to 192 bits. */
7325 long tmp[6];
7327 if (total_bytes > len)
7328 return 0;
7329 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7331 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7333 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7334 bitpos += BITS_PER_UNIT)
7336 byte = (bitpos / BITS_PER_UNIT) & 3;
7337 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7339 if (UNITS_PER_WORD < 4)
7341 word = byte / UNITS_PER_WORD;
7342 if (WORDS_BIG_ENDIAN)
7343 word = (words - 1) - word;
7344 offset = word * UNITS_PER_WORD;
7345 if (BYTES_BIG_ENDIAN)
7346 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7347 else
7348 offset += byte % UNITS_PER_WORD;
7350 else
7351 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7352 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7354 return total_bytes;
7357 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7358 specified by EXPR into the buffer PTR of length LEN bytes.
7359 Return the number of bytes placed in the buffer, or zero
7360 upon failure. */
7362 static int
7363 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7365 int rsize, isize;
7366 tree part;
7368 part = TREE_REALPART (expr);
7369 rsize = native_encode_expr (part, ptr, len);
7370 if (rsize == 0)
7371 return 0;
7372 part = TREE_IMAGPART (expr);
7373 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7374 if (isize != rsize)
7375 return 0;
7376 return rsize + isize;
7380 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7381 specified by EXPR into the buffer PTR of length LEN bytes.
7382 Return the number of bytes placed in the buffer, or zero
7383 upon failure. */
7385 static int
7386 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7388 unsigned i, count;
7389 int size, offset;
7390 tree itype, elem;
7392 offset = 0;
7393 count = VECTOR_CST_NELTS (expr);
7394 itype = TREE_TYPE (TREE_TYPE (expr));
7395 size = GET_MODE_SIZE (TYPE_MODE (itype));
7396 for (i = 0; i < count; i++)
7398 elem = VECTOR_CST_ELT (expr, i);
7399 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7400 return 0;
7401 offset += size;
7403 return offset;
7407 /* Subroutine of native_encode_expr. Encode the STRING_CST
7408 specified by EXPR into the buffer PTR of length LEN bytes.
7409 Return the number of bytes placed in the buffer, or zero
7410 upon failure. */
7412 static int
7413 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7415 tree type = TREE_TYPE (expr);
7416 HOST_WIDE_INT total_bytes;
7418 if (TREE_CODE (type) != ARRAY_TYPE
7419 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7420 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7421 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7422 return 0;
7423 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7424 if (total_bytes > len)
7425 return 0;
7426 if (TREE_STRING_LENGTH (expr) < total_bytes)
7428 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7429 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7430 total_bytes - TREE_STRING_LENGTH (expr));
7432 else
7433 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7434 return total_bytes;
7438 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7439 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7440 buffer PTR of length LEN bytes. Return the number of bytes
7441 placed in the buffer, or zero upon failure. */
7444 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7446 switch (TREE_CODE (expr))
7448 case INTEGER_CST:
7449 return native_encode_int (expr, ptr, len);
7451 case REAL_CST:
7452 return native_encode_real (expr, ptr, len);
7454 case FIXED_CST:
7455 return native_encode_fixed (expr, ptr, len);
7457 case COMPLEX_CST:
7458 return native_encode_complex (expr, ptr, len);
7460 case VECTOR_CST:
7461 return native_encode_vector (expr, ptr, len);
7463 case STRING_CST:
7464 return native_encode_string (expr, ptr, len);
7466 default:
7467 return 0;
7472 /* Subroutine of native_interpret_expr. Interpret the contents of
7473 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7474 If the buffer cannot be interpreted, return NULL_TREE. */
7476 static tree
7477 native_interpret_int (tree type, const unsigned char *ptr, int len)
7479 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7481 if (total_bytes > len
7482 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7483 return NULL_TREE;
7485 wide_int result = wi::from_buffer (ptr, total_bytes);
7487 return wide_int_to_tree (type, result);
7491 /* Subroutine of native_interpret_expr. Interpret the contents of
7492 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7493 If the buffer cannot be interpreted, return NULL_TREE. */
7495 static tree
7496 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7498 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7499 double_int result;
7500 FIXED_VALUE_TYPE fixed_value;
7502 if (total_bytes > len
7503 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7504 return NULL_TREE;
7506 result = double_int::from_buffer (ptr, total_bytes);
7507 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7509 return build_fixed (type, fixed_value);
7513 /* Subroutine of native_interpret_expr. Interpret the contents of
7514 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7515 If the buffer cannot be interpreted, return NULL_TREE. */
7517 static tree
7518 native_interpret_real (tree type, const unsigned char *ptr, int len)
7520 enum machine_mode mode = TYPE_MODE (type);
7521 int total_bytes = GET_MODE_SIZE (mode);
7522 int byte, offset, word, words, bitpos;
7523 unsigned char value;
7524 /* There are always 32 bits in each long, no matter the size of
7525 the hosts long. We handle floating point representations with
7526 up to 192 bits. */
7527 REAL_VALUE_TYPE r;
7528 long tmp[6];
7530 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7531 if (total_bytes > len || total_bytes > 24)
7532 return NULL_TREE;
7533 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7535 memset (tmp, 0, sizeof (tmp));
7536 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7537 bitpos += BITS_PER_UNIT)
7539 byte = (bitpos / BITS_PER_UNIT) & 3;
7540 if (UNITS_PER_WORD < 4)
7542 word = byte / UNITS_PER_WORD;
7543 if (WORDS_BIG_ENDIAN)
7544 word = (words - 1) - word;
7545 offset = word * UNITS_PER_WORD;
7546 if (BYTES_BIG_ENDIAN)
7547 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7548 else
7549 offset += byte % UNITS_PER_WORD;
7551 else
7552 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7553 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7555 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7558 real_from_target (&r, tmp, mode);
7559 return build_real (type, r);
7563 /* Subroutine of native_interpret_expr. Interpret the contents of
7564 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7565 If the buffer cannot be interpreted, return NULL_TREE. */
7567 static tree
7568 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7570 tree etype, rpart, ipart;
7571 int size;
7573 etype = TREE_TYPE (type);
7574 size = GET_MODE_SIZE (TYPE_MODE (etype));
7575 if (size * 2 > len)
7576 return NULL_TREE;
7577 rpart = native_interpret_expr (etype, ptr, size);
7578 if (!rpart)
7579 return NULL_TREE;
7580 ipart = native_interpret_expr (etype, ptr+size, size);
7581 if (!ipart)
7582 return NULL_TREE;
7583 return build_complex (type, rpart, ipart);
7587 /* Subroutine of native_interpret_expr. Interpret the contents of
7588 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7589 If the buffer cannot be interpreted, return NULL_TREE. */
7591 static tree
7592 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7594 tree etype, elem;
7595 int i, size, count;
7596 tree *elements;
7598 etype = TREE_TYPE (type);
7599 size = GET_MODE_SIZE (TYPE_MODE (etype));
7600 count = TYPE_VECTOR_SUBPARTS (type);
7601 if (size * count > len)
7602 return NULL_TREE;
7604 elements = XALLOCAVEC (tree, count);
7605 for (i = count - 1; i >= 0; i--)
7607 elem = native_interpret_expr (etype, ptr+(i*size), size);
7608 if (!elem)
7609 return NULL_TREE;
7610 elements[i] = elem;
7612 return build_vector (type, elements);
7616 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7617 the buffer PTR of length LEN as a constant of type TYPE. For
7618 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7619 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7620 return NULL_TREE. */
7622 tree
7623 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7625 switch (TREE_CODE (type))
7627 case INTEGER_TYPE:
7628 case ENUMERAL_TYPE:
7629 case BOOLEAN_TYPE:
7630 case POINTER_TYPE:
7631 case REFERENCE_TYPE:
7632 return native_interpret_int (type, ptr, len);
7634 case REAL_TYPE:
7635 return native_interpret_real (type, ptr, len);
7637 case FIXED_POINT_TYPE:
7638 return native_interpret_fixed (type, ptr, len);
7640 case COMPLEX_TYPE:
7641 return native_interpret_complex (type, ptr, len);
7643 case VECTOR_TYPE:
7644 return native_interpret_vector (type, ptr, len);
7646 default:
7647 return NULL_TREE;
7651 /* Returns true if we can interpret the contents of a native encoding
7652 as TYPE. */
7654 static bool
7655 can_native_interpret_type_p (tree type)
7657 switch (TREE_CODE (type))
7659 case INTEGER_TYPE:
7660 case ENUMERAL_TYPE:
7661 case BOOLEAN_TYPE:
7662 case POINTER_TYPE:
7663 case REFERENCE_TYPE:
7664 case FIXED_POINT_TYPE:
7665 case REAL_TYPE:
7666 case COMPLEX_TYPE:
7667 case VECTOR_TYPE:
7668 return true;
7669 default:
7670 return false;
7674 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7675 TYPE at compile-time. If we're unable to perform the conversion
7676 return NULL_TREE. */
7678 static tree
7679 fold_view_convert_expr (tree type, tree expr)
7681 /* We support up to 512-bit values (for V8DFmode). */
7682 unsigned char buffer[64];
7683 int len;
7685 /* Check that the host and target are sane. */
7686 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7687 return NULL_TREE;
7689 len = native_encode_expr (expr, buffer, sizeof (buffer));
7690 if (len == 0)
7691 return NULL_TREE;
7693 return native_interpret_expr (type, buffer, len);
7696 /* Build an expression for the address of T. Folds away INDIRECT_REF
7697 to avoid confusing the gimplify process. */
7699 tree
7700 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7702 /* The size of the object is not relevant when talking about its address. */
7703 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7704 t = TREE_OPERAND (t, 0);
7706 if (TREE_CODE (t) == INDIRECT_REF)
7708 t = TREE_OPERAND (t, 0);
7710 if (TREE_TYPE (t) != ptrtype)
7711 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7713 else if (TREE_CODE (t) == MEM_REF
7714 && integer_zerop (TREE_OPERAND (t, 1)))
7715 return TREE_OPERAND (t, 0);
7716 else if (TREE_CODE (t) == MEM_REF
7717 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7718 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7719 TREE_OPERAND (t, 0),
7720 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7721 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7723 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7725 if (TREE_TYPE (t) != ptrtype)
7726 t = fold_convert_loc (loc, ptrtype, t);
7728 else
7729 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7731 return t;
7734 /* Build an expression for the address of T. */
7736 tree
7737 build_fold_addr_expr_loc (location_t loc, tree t)
7739 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7741 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7744 static bool vec_cst_ctor_to_array (tree, tree *);
7746 /* Fold a unary expression of code CODE and type TYPE with operand
7747 OP0. Return the folded expression if folding is successful.
7748 Otherwise, return NULL_TREE. */
7750 tree
7751 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7753 tree tem;
7754 tree arg0;
7755 enum tree_code_class kind = TREE_CODE_CLASS (code);
7757 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7758 && TREE_CODE_LENGTH (code) == 1);
7760 arg0 = op0;
7761 if (arg0)
7763 if (CONVERT_EXPR_CODE_P (code)
7764 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7766 /* Don't use STRIP_NOPS, because signedness of argument type
7767 matters. */
7768 STRIP_SIGN_NOPS (arg0);
7770 else
7772 /* Strip any conversions that don't change the mode. This
7773 is safe for every expression, except for a comparison
7774 expression because its signedness is derived from its
7775 operands.
7777 Note that this is done as an internal manipulation within
7778 the constant folder, in order to find the simplest
7779 representation of the arguments so that their form can be
7780 studied. In any cases, the appropriate type conversions
7781 should be put back in the tree that will get out of the
7782 constant folder. */
7783 STRIP_NOPS (arg0);
7787 if (TREE_CODE_CLASS (code) == tcc_unary)
7789 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7790 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7791 fold_build1_loc (loc, code, type,
7792 fold_convert_loc (loc, TREE_TYPE (op0),
7793 TREE_OPERAND (arg0, 1))));
7794 else if (TREE_CODE (arg0) == COND_EXPR)
7796 tree arg01 = TREE_OPERAND (arg0, 1);
7797 tree arg02 = TREE_OPERAND (arg0, 2);
7798 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7799 arg01 = fold_build1_loc (loc, code, type,
7800 fold_convert_loc (loc,
7801 TREE_TYPE (op0), arg01));
7802 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7803 arg02 = fold_build1_loc (loc, code, type,
7804 fold_convert_loc (loc,
7805 TREE_TYPE (op0), arg02));
7806 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7807 arg01, arg02);
7809 /* If this was a conversion, and all we did was to move into
7810 inside the COND_EXPR, bring it back out. But leave it if
7811 it is a conversion from integer to integer and the
7812 result precision is no wider than a word since such a
7813 conversion is cheap and may be optimized away by combine,
7814 while it couldn't if it were outside the COND_EXPR. Then return
7815 so we don't get into an infinite recursion loop taking the
7816 conversion out and then back in. */
7818 if ((CONVERT_EXPR_CODE_P (code)
7819 || code == NON_LVALUE_EXPR)
7820 && TREE_CODE (tem) == COND_EXPR
7821 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7822 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7823 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7824 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7825 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7826 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7827 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7828 && (INTEGRAL_TYPE_P
7829 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7830 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7831 || flag_syntax_only))
7832 tem = build1_loc (loc, code, type,
7833 build3 (COND_EXPR,
7834 TREE_TYPE (TREE_OPERAND
7835 (TREE_OPERAND (tem, 1), 0)),
7836 TREE_OPERAND (tem, 0),
7837 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7838 TREE_OPERAND (TREE_OPERAND (tem, 2),
7839 0)));
7840 return tem;
7844 switch (code)
7846 case PAREN_EXPR:
7847 /* Re-association barriers around constants and other re-association
7848 barriers can be removed. */
7849 if (CONSTANT_CLASS_P (op0)
7850 || TREE_CODE (op0) == PAREN_EXPR)
7851 return fold_convert_loc (loc, type, op0);
7852 return NULL_TREE;
7854 case NON_LVALUE_EXPR:
7855 if (!maybe_lvalue_p (op0))
7856 return fold_convert_loc (loc, type, op0);
7857 return NULL_TREE;
7859 CASE_CONVERT:
7860 case FLOAT_EXPR:
7861 case FIX_TRUNC_EXPR:
7862 if (TREE_TYPE (op0) == type)
7863 return op0;
7865 if (COMPARISON_CLASS_P (op0))
7867 /* If we have (type) (a CMP b) and type is an integral type, return
7868 new expression involving the new type. Canonicalize
7869 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7870 non-integral type.
7871 Do not fold the result as that would not simplify further, also
7872 folding again results in recursions. */
7873 if (TREE_CODE (type) == BOOLEAN_TYPE)
7874 return build2_loc (loc, TREE_CODE (op0), type,
7875 TREE_OPERAND (op0, 0),
7876 TREE_OPERAND (op0, 1));
7877 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7878 && TREE_CODE (type) != VECTOR_TYPE)
7879 return build3_loc (loc, COND_EXPR, type, op0,
7880 constant_boolean_node (true, type),
7881 constant_boolean_node (false, type));
7884 /* Handle cases of two conversions in a row. */
7885 if (CONVERT_EXPR_P (op0))
7887 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7888 tree inter_type = TREE_TYPE (op0);
7889 int inside_int = INTEGRAL_TYPE_P (inside_type);
7890 int inside_ptr = POINTER_TYPE_P (inside_type);
7891 int inside_float = FLOAT_TYPE_P (inside_type);
7892 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7893 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7894 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7895 int inter_int = INTEGRAL_TYPE_P (inter_type);
7896 int inter_ptr = POINTER_TYPE_P (inter_type);
7897 int inter_float = FLOAT_TYPE_P (inter_type);
7898 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7899 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7900 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7901 int final_int = INTEGRAL_TYPE_P (type);
7902 int final_ptr = POINTER_TYPE_P (type);
7903 int final_float = FLOAT_TYPE_P (type);
7904 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7905 unsigned int final_prec = TYPE_PRECISION (type);
7906 int final_unsignedp = TYPE_UNSIGNED (type);
7908 /* check for cases specific to UPC, involving pointer types */
7909 if (final_ptr || inter_ptr || inside_ptr)
7911 int final_pts = final_ptr
7912 && upc_shared_type_p (TREE_TYPE (type));
7913 int inter_pts = inter_ptr
7914 && upc_shared_type_p (TREE_TYPE (inter_type));
7915 int inside_pts = inside_ptr
7916 && upc_shared_type_p (TREE_TYPE (inside_type));
7917 if (final_pts || inter_pts || inside_pts)
7919 if (!((final_pts && inter_pts)
7920 && TREE_TYPE (type) == TREE_TYPE (inter_type))
7921 || ((inter_pts && inside_pts)
7922 && (TREE_TYPE (inter_type)
7923 == TREE_TYPE (inside_type))))
7924 return NULL;
7928 /* In addition to the cases of two conversions in a row
7929 handled below, if we are converting something to its own
7930 type via an object of identical or wider precision, neither
7931 conversion is needed. */
7932 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7933 && (((inter_int || inter_ptr) && final_int)
7934 || (inter_float && final_float))
7935 && inter_prec >= final_prec)
7936 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7938 /* Likewise, if the intermediate and initial types are either both
7939 float or both integer, we don't need the middle conversion if the
7940 former is wider than the latter and doesn't change the signedness
7941 (for integers). Avoid this if the final type is a pointer since
7942 then we sometimes need the middle conversion. Likewise if the
7943 final type has a precision not equal to the size of its mode. */
7944 if (((inter_int && inside_int)
7945 || (inter_float && inside_float)
7946 || (inter_vec && inside_vec))
7947 && inter_prec >= inside_prec
7948 && (inter_float || inter_vec
7949 || inter_unsignedp == inside_unsignedp)
7950 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7951 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7952 && ! final_ptr
7953 && (! final_vec || inter_prec == inside_prec))
7954 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7956 /* If we have a sign-extension of a zero-extended value, we can
7957 replace that by a single zero-extension. Likewise if the
7958 final conversion does not change precision we can drop the
7959 intermediate conversion. */
7960 if (inside_int && inter_int && final_int
7961 && ((inside_prec < inter_prec && inter_prec < final_prec
7962 && inside_unsignedp && !inter_unsignedp)
7963 || final_prec == inter_prec))
7964 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7966 /* Two conversions in a row are not needed unless:
7967 - some conversion is floating-point (overstrict for now), or
7968 - some conversion is a vector (overstrict for now), or
7969 - the intermediate type is narrower than both initial and
7970 final, or
7971 - the intermediate type and innermost type differ in signedness,
7972 and the outermost type is wider than the intermediate, or
7973 - the initial type is a pointer type and the precisions of the
7974 intermediate and final types differ, or
7975 - the final type is a pointer type and the precisions of the
7976 initial and intermediate types differ. */
7977 if (! inside_float && ! inter_float && ! final_float
7978 && ! inside_vec && ! inter_vec && ! final_vec
7979 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7980 && ! (inside_int && inter_int
7981 && inter_unsignedp != inside_unsignedp
7982 && inter_prec < final_prec)
7983 && ((inter_unsignedp && inter_prec > inside_prec)
7984 == (final_unsignedp && final_prec > inter_prec))
7985 && ! (inside_ptr && inter_prec != final_prec)
7986 && ! (final_ptr && inside_prec != inter_prec)
7987 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7988 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7989 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7992 /* Handle (T *)&A.B.C for A being of type T and B and C
7993 living at offset zero. This occurs frequently in
7994 C++ upcasting and then accessing the base. */
7995 if (TREE_CODE (op0) == ADDR_EXPR
7996 && POINTER_TYPE_P (type)
7997 && handled_component_p (TREE_OPERAND (op0, 0)))
7999 HOST_WIDE_INT bitsize, bitpos;
8000 tree offset;
8001 enum machine_mode mode;
8002 int unsignedp, volatilep;
8003 tree base = TREE_OPERAND (op0, 0);
8004 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8005 &mode, &unsignedp, &volatilep, false);
8006 /* If the reference was to a (constant) zero offset, we can use
8007 the address of the base if it has the same base type
8008 as the result type and the pointer type is unqualified. */
8009 if (! offset && bitpos == 0
8010 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8011 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8012 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8013 return fold_convert_loc (loc, type,
8014 build_fold_addr_expr_loc (loc, base));
8017 if (TREE_CODE (op0) == MODIFY_EXPR
8018 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8019 /* Detect assigning a bitfield. */
8020 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8021 && DECL_BIT_FIELD
8022 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8024 /* Don't leave an assignment inside a conversion
8025 unless assigning a bitfield. */
8026 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8027 /* First do the assignment, then return converted constant. */
8028 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8029 TREE_NO_WARNING (tem) = 1;
8030 TREE_USED (tem) = 1;
8031 return tem;
8034 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8035 constants (if x has signed type, the sign bit cannot be set
8036 in c). This folds extension into the BIT_AND_EXPR.
8037 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8038 very likely don't have maximal range for their precision and this
8039 transformation effectively doesn't preserve non-maximal ranges. */
8040 if (TREE_CODE (type) == INTEGER_TYPE
8041 && TREE_CODE (op0) == BIT_AND_EXPR
8042 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8044 tree and_expr = op0;
8045 tree and0 = TREE_OPERAND (and_expr, 0);
8046 tree and1 = TREE_OPERAND (and_expr, 1);
8047 int change = 0;
8049 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8050 || (TYPE_PRECISION (type)
8051 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8052 change = 1;
8053 else if (TYPE_PRECISION (TREE_TYPE (and1))
8054 <= HOST_BITS_PER_WIDE_INT
8055 && tree_fits_uhwi_p (and1))
8057 unsigned HOST_WIDE_INT cst;
8059 cst = tree_to_uhwi (and1);
8060 cst &= HOST_WIDE_INT_M1U
8061 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8062 change = (cst == 0);
8063 #ifdef LOAD_EXTEND_OP
8064 if (change
8065 && !flag_syntax_only
8066 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8067 == ZERO_EXTEND))
8069 tree uns = unsigned_type_for (TREE_TYPE (and0));
8070 and0 = fold_convert_loc (loc, uns, and0);
8071 and1 = fold_convert_loc (loc, uns, and1);
8073 #endif
8075 if (change)
8077 tem = force_fit_type (type, wi::to_widest (and1), 0,
8078 TREE_OVERFLOW (and1));
8079 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8080 fold_convert_loc (loc, type, and0), tem);
8084 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8085 when one of the new casts will fold away. Conservatively we assume
8086 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8087 if (POINTER_TYPE_P (type)
8088 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8089 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8090 && !upc_shared_type_p (TREE_TYPE (type))
8091 && !upc_shared_type_p (TREE_TYPE (
8092 TREE_TYPE (TREE_OPERAND (arg0, 0))))
8093 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8094 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8095 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8097 tree arg00 = TREE_OPERAND (arg0, 0);
8098 tree arg01 = TREE_OPERAND (arg0, 1);
8100 return fold_build_pointer_plus_loc
8101 (loc, fold_convert_loc (loc, type, arg00), arg01);
8104 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8105 of the same precision, and X is an integer type not narrower than
8106 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8107 if (INTEGRAL_TYPE_P (type)
8108 && TREE_CODE (op0) == BIT_NOT_EXPR
8109 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8110 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8111 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8113 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8114 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8115 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8116 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8117 fold_convert_loc (loc, type, tem));
8120 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8121 type of X and Y (integer types only). */
8122 if (INTEGRAL_TYPE_P (type)
8123 && TREE_CODE (op0) == MULT_EXPR
8124 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8125 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8127 /* Be careful not to introduce new overflows. */
8128 tree mult_type;
8129 if (TYPE_OVERFLOW_WRAPS (type))
8130 mult_type = type;
8131 else
8132 mult_type = unsigned_type_for (type);
8134 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8136 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8137 fold_convert_loc (loc, mult_type,
8138 TREE_OPERAND (op0, 0)),
8139 fold_convert_loc (loc, mult_type,
8140 TREE_OPERAND (op0, 1)));
8141 return fold_convert_loc (loc, type, tem);
8145 tem = fold_convert_const (code, type, arg0);
8146 return tem ? tem : NULL_TREE;
8148 case ADDR_SPACE_CONVERT_EXPR:
8149 if (integer_zerop (arg0))
8150 return fold_convert_const (code, type, arg0);
8151 return NULL_TREE;
8153 case FIXED_CONVERT_EXPR:
8154 tem = fold_convert_const (code, type, arg0);
8155 return tem ? tem : NULL_TREE;
8157 case VIEW_CONVERT_EXPR:
8158 if (TREE_TYPE (op0) == type)
8159 return op0;
8160 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8161 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8162 type, TREE_OPERAND (op0, 0));
8163 if (TREE_CODE (op0) == MEM_REF)
8164 return fold_build2_loc (loc, MEM_REF, type,
8165 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8167 /* For integral conversions with the same precision or pointer
8168 conversions use a NOP_EXPR instead. */
8169 if ((INTEGRAL_TYPE_P (type)
8170 || (POINTER_TYPE_P (type)
8171 && !upc_shared_type_p (TREE_TYPE (type))))
8172 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8173 || (POINTER_TYPE_P (TREE_TYPE (op0))
8174 && !upc_shared_type_p (TREE_TYPE (TREE_TYPE (op0)))))
8175 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8176 return fold_convert_loc (loc, type, op0);
8178 /* Strip inner integral conversions that do not change the precision. */
8179 if (CONVERT_EXPR_P (op0)
8180 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8181 || (POINTER_TYPE_P (TREE_TYPE (op0))
8182 && !upc_shared_type_p (TREE_TYPE (TREE_TYPE (op0)))))
8183 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8184 || (POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8185 && !upc_shared_type_p (TREE_TYPE (
8186 TREE_TYPE (
8187 TREE_OPERAND (op0, 0))))))
8188 && (TYPE_PRECISION (TREE_TYPE (op0))
8189 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8190 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8191 type, TREE_OPERAND (op0, 0));
8193 return fold_view_convert_expr (type, op0);
8195 case NEGATE_EXPR:
8196 tem = fold_negate_expr (loc, arg0);
8197 if (tem)
8198 return fold_convert_loc (loc, type, tem);
8199 return NULL_TREE;
8201 case ABS_EXPR:
8202 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8203 return fold_abs_const (arg0, type);
8204 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8205 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8206 /* Convert fabs((double)float) into (double)fabsf(float). */
8207 else if (TREE_CODE (arg0) == NOP_EXPR
8208 && TREE_CODE (type) == REAL_TYPE)
8210 tree targ0 = strip_float_extensions (arg0);
8211 if (targ0 != arg0)
8212 return fold_convert_loc (loc, type,
8213 fold_build1_loc (loc, ABS_EXPR,
8214 TREE_TYPE (targ0),
8215 targ0));
8217 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8218 else if (TREE_CODE (arg0) == ABS_EXPR)
8219 return arg0;
8220 else if (tree_expr_nonnegative_p (arg0))
8221 return arg0;
8223 /* Strip sign ops from argument. */
8224 if (TREE_CODE (type) == REAL_TYPE)
8226 tem = fold_strip_sign_ops (arg0);
8227 if (tem)
8228 return fold_build1_loc (loc, ABS_EXPR, type,
8229 fold_convert_loc (loc, type, tem));
8231 return NULL_TREE;
8233 case CONJ_EXPR:
8234 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8235 return fold_convert_loc (loc, type, arg0);
8236 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8238 tree itype = TREE_TYPE (type);
8239 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8240 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8241 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8242 negate_expr (ipart));
8244 if (TREE_CODE (arg0) == COMPLEX_CST)
8246 tree itype = TREE_TYPE (type);
8247 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8248 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8249 return build_complex (type, rpart, negate_expr (ipart));
8251 if (TREE_CODE (arg0) == CONJ_EXPR)
8252 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8253 return NULL_TREE;
8255 case BIT_NOT_EXPR:
8256 if (TREE_CODE (arg0) == INTEGER_CST)
8257 return fold_not_const (arg0, type);
8258 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8259 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8260 /* Convert ~ (-A) to A - 1. */
8261 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8262 return fold_build2_loc (loc, MINUS_EXPR, type,
8263 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8264 build_int_cst (type, 1));
8265 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8266 else if (INTEGRAL_TYPE_P (type)
8267 && ((TREE_CODE (arg0) == MINUS_EXPR
8268 && integer_onep (TREE_OPERAND (arg0, 1)))
8269 || (TREE_CODE (arg0) == PLUS_EXPR
8270 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8271 return fold_build1_loc (loc, NEGATE_EXPR, type,
8272 fold_convert_loc (loc, type,
8273 TREE_OPERAND (arg0, 0)));
8274 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8275 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8276 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8277 fold_convert_loc (loc, type,
8278 TREE_OPERAND (arg0, 0)))))
8279 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8280 fold_convert_loc (loc, type,
8281 TREE_OPERAND (arg0, 1)));
8282 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8283 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8284 fold_convert_loc (loc, type,
8285 TREE_OPERAND (arg0, 1)))))
8286 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8287 fold_convert_loc (loc, type,
8288 TREE_OPERAND (arg0, 0)), tem);
8289 /* Perform BIT_NOT_EXPR on each element individually. */
8290 else if (TREE_CODE (arg0) == VECTOR_CST)
8292 tree *elements;
8293 tree elem;
8294 unsigned count = VECTOR_CST_NELTS (arg0), i;
8296 elements = XALLOCAVEC (tree, count);
8297 for (i = 0; i < count; i++)
8299 elem = VECTOR_CST_ELT (arg0, i);
8300 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8301 if (elem == NULL_TREE)
8302 break;
8303 elements[i] = elem;
8305 if (i == count)
8306 return build_vector (type, elements);
8308 else if (COMPARISON_CLASS_P (arg0)
8309 && (VECTOR_TYPE_P (type)
8310 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8312 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8313 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8314 HONOR_NANS (TYPE_MODE (op_type)));
8315 if (subcode != ERROR_MARK)
8316 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8317 TREE_OPERAND (arg0, 1));
8321 return NULL_TREE;
8323 case TRUTH_NOT_EXPR:
8324 /* Note that the operand of this must be an int
8325 and its values must be 0 or 1.
8326 ("true" is a fixed value perhaps depending on the language,
8327 but we don't handle values other than 1 correctly yet.) */
8328 tem = fold_truth_not_expr (loc, arg0);
8329 if (!tem)
8330 return NULL_TREE;
8331 return fold_convert_loc (loc, type, tem);
8333 case REALPART_EXPR:
8334 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8335 return fold_convert_loc (loc, type, arg0);
8336 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8337 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8338 TREE_OPERAND (arg0, 1));
8339 if (TREE_CODE (arg0) == COMPLEX_CST)
8340 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8341 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8343 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8344 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8345 fold_build1_loc (loc, REALPART_EXPR, itype,
8346 TREE_OPERAND (arg0, 0)),
8347 fold_build1_loc (loc, REALPART_EXPR, itype,
8348 TREE_OPERAND (arg0, 1)));
8349 return fold_convert_loc (loc, type, tem);
8351 if (TREE_CODE (arg0) == CONJ_EXPR)
8353 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8354 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8355 TREE_OPERAND (arg0, 0));
8356 return fold_convert_loc (loc, type, tem);
8358 if (TREE_CODE (arg0) == CALL_EXPR)
8360 tree fn = get_callee_fndecl (arg0);
8361 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8362 switch (DECL_FUNCTION_CODE (fn))
8364 CASE_FLT_FN (BUILT_IN_CEXPI):
8365 fn = mathfn_built_in (type, BUILT_IN_COS);
8366 if (fn)
8367 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8368 break;
8370 default:
8371 break;
8374 return NULL_TREE;
8376 case IMAGPART_EXPR:
8377 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8378 return build_zero_cst (type);
8379 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8380 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8381 TREE_OPERAND (arg0, 0));
8382 if (TREE_CODE (arg0) == COMPLEX_CST)
8383 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8384 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8386 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8387 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8388 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8389 TREE_OPERAND (arg0, 0)),
8390 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8391 TREE_OPERAND (arg0, 1)));
8392 return fold_convert_loc (loc, type, tem);
8394 if (TREE_CODE (arg0) == CONJ_EXPR)
8396 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8397 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8398 return fold_convert_loc (loc, type, negate_expr (tem));
8400 if (TREE_CODE (arg0) == CALL_EXPR)
8402 tree fn = get_callee_fndecl (arg0);
8403 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8404 switch (DECL_FUNCTION_CODE (fn))
8406 CASE_FLT_FN (BUILT_IN_CEXPI):
8407 fn = mathfn_built_in (type, BUILT_IN_SIN);
8408 if (fn)
8409 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8410 break;
8412 default:
8413 break;
8416 return NULL_TREE;
8418 case INDIRECT_REF:
8419 /* Fold *&X to X if X is an lvalue. */
8420 if (TREE_CODE (op0) == ADDR_EXPR)
8422 tree op00 = TREE_OPERAND (op0, 0);
8423 if ((TREE_CODE (op00) == VAR_DECL
8424 || TREE_CODE (op00) == PARM_DECL
8425 || TREE_CODE (op00) == RESULT_DECL)
8426 && !TREE_READONLY (op00))
8427 return op00;
8429 return NULL_TREE;
8431 case VEC_UNPACK_LO_EXPR:
8432 case VEC_UNPACK_HI_EXPR:
8433 case VEC_UNPACK_FLOAT_LO_EXPR:
8434 case VEC_UNPACK_FLOAT_HI_EXPR:
8436 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8437 tree *elts;
8438 enum tree_code subcode;
8440 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8441 if (TREE_CODE (arg0) != VECTOR_CST)
8442 return NULL_TREE;
8444 elts = XALLOCAVEC (tree, nelts * 2);
8445 if (!vec_cst_ctor_to_array (arg0, elts))
8446 return NULL_TREE;
8448 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8449 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8450 elts += nelts;
8452 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8453 subcode = NOP_EXPR;
8454 else
8455 subcode = FLOAT_EXPR;
8457 for (i = 0; i < nelts; i++)
8459 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8460 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8461 return NULL_TREE;
8464 return build_vector (type, elts);
8467 case REDUC_MIN_EXPR:
8468 case REDUC_MAX_EXPR:
8469 case REDUC_PLUS_EXPR:
8471 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8472 tree *elts;
8473 enum tree_code subcode;
8475 if (TREE_CODE (op0) != VECTOR_CST)
8476 return NULL_TREE;
8478 elts = XALLOCAVEC (tree, nelts);
8479 if (!vec_cst_ctor_to_array (op0, elts))
8480 return NULL_TREE;
8482 switch (code)
8484 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8485 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8486 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8487 default: gcc_unreachable ();
8490 for (i = 1; i < nelts; i++)
8492 elts[0] = const_binop (subcode, elts[0], elts[i]);
8493 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8494 return NULL_TREE;
8495 elts[i] = build_zero_cst (TREE_TYPE (type));
8498 return build_vector (type, elts);
8501 default:
8502 return NULL_TREE;
8503 } /* switch (code) */
8507 /* If the operation was a conversion do _not_ mark a resulting constant
8508 with TREE_OVERFLOW if the original constant was not. These conversions
8509 have implementation defined behavior and retaining the TREE_OVERFLOW
8510 flag here would confuse later passes such as VRP. */
8511 tree
8512 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8513 tree type, tree op0)
8515 tree res = fold_unary_loc (loc, code, type, op0);
8516 if (res
8517 && TREE_CODE (res) == INTEGER_CST
8518 && TREE_CODE (op0) == INTEGER_CST
8519 && CONVERT_EXPR_CODE_P (code))
8520 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8522 return res;
8525 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8526 operands OP0 and OP1. LOC is the location of the resulting expression.
8527 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8528 Return the folded expression if folding is successful. Otherwise,
8529 return NULL_TREE. */
8530 static tree
8531 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8532 tree arg0, tree arg1, tree op0, tree op1)
8534 tree tem;
8536 /* We only do these simplifications if we are optimizing. */
8537 if (!optimize)
8538 return NULL_TREE;
8540 /* Check for things like (A || B) && (A || C). We can convert this
8541 to A || (B && C). Note that either operator can be any of the four
8542 truth and/or operations and the transformation will still be
8543 valid. Also note that we only care about order for the
8544 ANDIF and ORIF operators. If B contains side effects, this
8545 might change the truth-value of A. */
8546 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8547 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8548 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8549 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8550 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8551 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8553 tree a00 = TREE_OPERAND (arg0, 0);
8554 tree a01 = TREE_OPERAND (arg0, 1);
8555 tree a10 = TREE_OPERAND (arg1, 0);
8556 tree a11 = TREE_OPERAND (arg1, 1);
8557 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8558 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8559 && (code == TRUTH_AND_EXPR
8560 || code == TRUTH_OR_EXPR));
8562 if (operand_equal_p (a00, a10, 0))
8563 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8564 fold_build2_loc (loc, code, type, a01, a11));
8565 else if (commutative && operand_equal_p (a00, a11, 0))
8566 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8567 fold_build2_loc (loc, code, type, a01, a10));
8568 else if (commutative && operand_equal_p (a01, a10, 0))
8569 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8570 fold_build2_loc (loc, code, type, a00, a11));
8572 /* This case if tricky because we must either have commutative
8573 operators or else A10 must not have side-effects. */
8575 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8576 && operand_equal_p (a01, a11, 0))
8577 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8578 fold_build2_loc (loc, code, type, a00, a10),
8579 a01);
8582 /* See if we can build a range comparison. */
8583 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8584 return tem;
8586 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8587 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8589 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8590 if (tem)
8591 return fold_build2_loc (loc, code, type, tem, arg1);
8594 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8595 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8597 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8598 if (tem)
8599 return fold_build2_loc (loc, code, type, arg0, tem);
8602 /* Check for the possibility of merging component references. If our
8603 lhs is another similar operation, try to merge its rhs with our
8604 rhs. Then try to merge our lhs and rhs. */
8605 if (TREE_CODE (arg0) == code
8606 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8607 TREE_OPERAND (arg0, 1), arg1)))
8608 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8610 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8611 return tem;
8613 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8614 && (code == TRUTH_AND_EXPR
8615 || code == TRUTH_ANDIF_EXPR
8616 || code == TRUTH_OR_EXPR
8617 || code == TRUTH_ORIF_EXPR))
8619 enum tree_code ncode, icode;
8621 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8622 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8623 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8625 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8626 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8627 We don't want to pack more than two leafs to a non-IF AND/OR
8628 expression.
8629 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8630 equal to IF-CODE, then we don't want to add right-hand operand.
8631 If the inner right-hand side of left-hand operand has
8632 side-effects, or isn't simple, then we can't add to it,
8633 as otherwise we might destroy if-sequence. */
8634 if (TREE_CODE (arg0) == icode
8635 && simple_operand_p_2 (arg1)
8636 /* Needed for sequence points to handle trappings, and
8637 side-effects. */
8638 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8640 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8641 arg1);
8642 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8643 tem);
8645 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8646 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8647 else if (TREE_CODE (arg1) == icode
8648 && simple_operand_p_2 (arg0)
8649 /* Needed for sequence points to handle trappings, and
8650 side-effects. */
8651 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8653 tem = fold_build2_loc (loc, ncode, type,
8654 arg0, TREE_OPERAND (arg1, 0));
8655 return fold_build2_loc (loc, icode, type, tem,
8656 TREE_OPERAND (arg1, 1));
8658 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8659 into (A OR B).
8660 For sequence point consistancy, we need to check for trapping,
8661 and side-effects. */
8662 else if (code == icode && simple_operand_p_2 (arg0)
8663 && simple_operand_p_2 (arg1))
8664 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8667 return NULL_TREE;
8670 /* Fold a binary expression of code CODE and type TYPE with operands
8671 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8672 Return the folded expression if folding is successful. Otherwise,
8673 return NULL_TREE. */
8675 static tree
8676 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8678 enum tree_code compl_code;
8680 if (code == MIN_EXPR)
8681 compl_code = MAX_EXPR;
8682 else if (code == MAX_EXPR)
8683 compl_code = MIN_EXPR;
8684 else
8685 gcc_unreachable ();
8687 /* MIN (MAX (a, b), b) == b. */
8688 if (TREE_CODE (op0) == compl_code
8689 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8690 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8692 /* MIN (MAX (b, a), b) == b. */
8693 if (TREE_CODE (op0) == compl_code
8694 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8695 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8696 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8698 /* MIN (a, MAX (a, b)) == a. */
8699 if (TREE_CODE (op1) == compl_code
8700 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8701 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8702 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8704 /* MIN (a, MAX (b, a)) == a. */
8705 if (TREE_CODE (op1) == compl_code
8706 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8707 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8708 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8710 return NULL_TREE;
8713 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8714 by changing CODE to reduce the magnitude of constants involved in
8715 ARG0 of the comparison.
8716 Returns a canonicalized comparison tree if a simplification was
8717 possible, otherwise returns NULL_TREE.
8718 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8719 valid if signed overflow is undefined. */
8721 static tree
8722 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8723 tree arg0, tree arg1,
8724 bool *strict_overflow_p)
8726 enum tree_code code0 = TREE_CODE (arg0);
8727 tree t, cst0 = NULL_TREE;
8728 int sgn0;
8729 bool swap = false;
8731 /* Match A +- CST code arg1 and CST code arg1. We can change the
8732 first form only if overflow is undefined. */
8733 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8734 /* In principle pointers also have undefined overflow behavior,
8735 but that causes problems elsewhere. */
8736 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8737 && (code0 == MINUS_EXPR
8738 || code0 == PLUS_EXPR)
8739 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8740 || code0 == INTEGER_CST))
8741 return NULL_TREE;
8743 /* Identify the constant in arg0 and its sign. */
8744 if (code0 == INTEGER_CST)
8745 cst0 = arg0;
8746 else
8747 cst0 = TREE_OPERAND (arg0, 1);
8748 sgn0 = tree_int_cst_sgn (cst0);
8750 /* Overflowed constants and zero will cause problems. */
8751 if (integer_zerop (cst0)
8752 || TREE_OVERFLOW (cst0))
8753 return NULL_TREE;
8755 /* See if we can reduce the magnitude of the constant in
8756 arg0 by changing the comparison code. */
8757 if (code0 == INTEGER_CST)
8759 /* CST <= arg1 -> CST-1 < arg1. */
8760 if (code == LE_EXPR && sgn0 == 1)
8761 code = LT_EXPR;
8762 /* -CST < arg1 -> -CST-1 <= arg1. */
8763 else if (code == LT_EXPR && sgn0 == -1)
8764 code = LE_EXPR;
8765 /* CST > arg1 -> CST-1 >= arg1. */
8766 else if (code == GT_EXPR && sgn0 == 1)
8767 code = GE_EXPR;
8768 /* -CST >= arg1 -> -CST-1 > arg1. */
8769 else if (code == GE_EXPR && sgn0 == -1)
8770 code = GT_EXPR;
8771 else
8772 return NULL_TREE;
8773 /* arg1 code' CST' might be more canonical. */
8774 swap = true;
8776 else
8778 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8779 if (code == LT_EXPR
8780 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8781 code = LE_EXPR;
8782 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8783 else if (code == GT_EXPR
8784 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8785 code = GE_EXPR;
8786 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8787 else if (code == LE_EXPR
8788 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8789 code = LT_EXPR;
8790 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8791 else if (code == GE_EXPR
8792 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8793 code = GT_EXPR;
8794 else
8795 return NULL_TREE;
8796 *strict_overflow_p = true;
8799 /* Now build the constant reduced in magnitude. But not if that
8800 would produce one outside of its types range. */
8801 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8802 && ((sgn0 == 1
8803 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8804 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8805 || (sgn0 == -1
8806 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8807 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8808 /* We cannot swap the comparison here as that would cause us to
8809 endlessly recurse. */
8810 return NULL_TREE;
8812 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8813 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8814 if (code0 != INTEGER_CST)
8815 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8816 t = fold_convert (TREE_TYPE (arg1), t);
8818 /* If swapping might yield to a more canonical form, do so. */
8819 if (swap)
8820 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8821 else
8822 return fold_build2_loc (loc, code, type, t, arg1);
8825 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8826 overflow further. Try to decrease the magnitude of constants involved
8827 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8828 and put sole constants at the second argument position.
8829 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8831 static tree
8832 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8833 tree arg0, tree arg1)
8835 tree t;
8836 bool strict_overflow_p;
8837 const char * const warnmsg = G_("assuming signed overflow does not occur "
8838 "when reducing constant in comparison");
8840 /* Try canonicalization by simplifying arg0. */
8841 strict_overflow_p = false;
8842 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8843 &strict_overflow_p);
8844 if (t)
8846 if (strict_overflow_p)
8847 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8848 return t;
8851 /* Try canonicalization by simplifying arg1 using the swapped
8852 comparison. */
8853 code = swap_tree_comparison (code);
8854 strict_overflow_p = false;
8855 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8856 &strict_overflow_p);
8857 if (t && strict_overflow_p)
8858 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8859 return t;
8862 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8863 space. This is used to avoid issuing overflow warnings for
8864 expressions like &p->x which can not wrap. */
8866 static bool
8867 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8869 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8870 return true;
8872 if (bitpos < 0)
8873 return true;
8875 wide_int wi_offset;
8876 int precision = TYPE_PRECISION (TREE_TYPE (base));
8877 if (offset == NULL_TREE)
8878 wi_offset = wi::zero (precision);
8879 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8880 return true;
8881 else
8882 wi_offset = offset;
8884 bool overflow;
8885 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8886 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8887 if (overflow)
8888 return true;
8890 if (!wi::fits_uhwi_p (total))
8891 return true;
8893 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8894 if (size <= 0)
8895 return true;
8897 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8898 array. */
8899 if (TREE_CODE (base) == ADDR_EXPR)
8901 HOST_WIDE_INT base_size;
8903 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8904 if (base_size > 0 && size < base_size)
8905 size = base_size;
8908 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8911 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8912 kind INTEGER_CST. This makes sure to properly sign-extend the
8913 constant. */
8915 static HOST_WIDE_INT
8916 size_low_cst (const_tree t)
8918 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8919 int prec = TYPE_PRECISION (TREE_TYPE (t));
8920 if (prec < HOST_BITS_PER_WIDE_INT)
8921 return sext_hwi (w, prec);
8922 return w;
8925 /* Subroutine of fold_binary. This routine performs all of the
8926 transformations that are common to the equality/inequality
8927 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8928 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8929 fold_binary should call fold_binary. Fold a comparison with
8930 tree code CODE and type TYPE with operands OP0 and OP1. Return
8931 the folded comparison or NULL_TREE. */
8933 static tree
8934 fold_comparison (location_t loc, enum tree_code code, tree type,
8935 tree op0, tree op1)
8937 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8938 tree arg0, arg1, tem;
8940 arg0 = op0;
8941 arg1 = op1;
8943 STRIP_SIGN_NOPS (arg0);
8944 STRIP_SIGN_NOPS (arg1);
8946 tem = fold_relational_const (code, type, arg0, arg1);
8947 if (tem != NULL_TREE)
8948 return tem;
8950 /* If one arg is a real or integer constant, put it last. */
8951 if (tree_swap_operands_p (arg0, arg1, true))
8952 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8954 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8955 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8956 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8957 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8958 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8959 && TREE_CODE (arg1) == INTEGER_CST
8960 && !TREE_OVERFLOW (arg1))
8962 const enum tree_code
8963 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8964 tree const1 = TREE_OPERAND (arg0, 1);
8965 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8966 tree variable = TREE_OPERAND (arg0, 0);
8967 tree new_const = int_const_binop (reverse_op, const2, const1);
8969 /* If the constant operation overflowed this can be
8970 simplified as a comparison against INT_MAX/INT_MIN. */
8971 if (TREE_OVERFLOW (new_const))
8973 int const1_sgn = tree_int_cst_sgn (const1);
8974 enum tree_code code2 = code;
8976 /* Get the sign of the constant on the lhs if the
8977 operation were VARIABLE + CONST1. */
8978 if (TREE_CODE (arg0) == MINUS_EXPR)
8979 const1_sgn = -const1_sgn;
8981 /* The sign of the constant determines if we overflowed
8982 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8983 Canonicalize to the INT_MIN overflow by swapping the comparison
8984 if necessary. */
8985 if (const1_sgn == -1)
8986 code2 = swap_tree_comparison (code);
8988 /* We now can look at the canonicalized case
8989 VARIABLE + 1 CODE2 INT_MIN
8990 and decide on the result. */
8991 switch (code2)
8993 case EQ_EXPR:
8994 case LT_EXPR:
8995 case LE_EXPR:
8996 return
8997 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8999 case NE_EXPR:
9000 case GE_EXPR:
9001 case GT_EXPR:
9002 return
9003 omit_one_operand_loc (loc, type, boolean_true_node, variable);
9005 default:
9006 gcc_unreachable ();
9009 else
9011 if (!equality_code)
9012 fold_overflow_warning ("assuming signed overflow does not occur "
9013 "when changing X +- C1 cmp C2 to "
9014 "X cmp C2 -+ C1",
9015 WARN_STRICT_OVERFLOW_COMPARISON);
9016 return fold_build2_loc (loc, code, type, variable, new_const);
9020 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
9021 if (TREE_CODE (arg0) == MINUS_EXPR
9022 && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9023 && integer_zerop (arg1))
9025 if (!equality_code)
9026 fold_overflow_warning ("assuming signed overflow does not occur "
9027 "when changing X - Y cmp 0 to X cmp Y",
9028 WARN_STRICT_OVERFLOW_COMPARISON);
9029 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
9030 TREE_OPERAND (arg0, 1));
9033 /* For comparisons of pointers we can decompose it to a compile time
9034 comparison of the base objects and the offsets into the object.
9035 This requires at least one operand being an ADDR_EXPR or a
9036 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9037 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9038 && (TREE_CODE (arg0) == ADDR_EXPR
9039 || TREE_CODE (arg1) == ADDR_EXPR
9040 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9041 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9043 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9044 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9045 enum machine_mode mode;
9046 int volatilep, unsignedp;
9047 bool indirect_base0 = false, indirect_base1 = false;
9049 /* Get base and offset for the access. Strip ADDR_EXPR for
9050 get_inner_reference, but put it back by stripping INDIRECT_REF
9051 off the base object if possible. indirect_baseN will be true
9052 if baseN is not an address but refers to the object itself. */
9053 base0 = arg0;
9054 if (TREE_CODE (arg0) == ADDR_EXPR)
9056 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9057 &bitsize, &bitpos0, &offset0, &mode,
9058 &unsignedp, &volatilep, false);
9059 if (TREE_CODE (base0) == INDIRECT_REF)
9060 base0 = TREE_OPERAND (base0, 0);
9061 else
9062 indirect_base0 = true;
9064 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9066 base0 = TREE_OPERAND (arg0, 0);
9067 STRIP_SIGN_NOPS (base0);
9068 if (TREE_CODE (base0) == ADDR_EXPR)
9070 base0 = TREE_OPERAND (base0, 0);
9071 indirect_base0 = true;
9073 offset0 = TREE_OPERAND (arg0, 1);
9074 if (tree_fits_shwi_p (offset0))
9076 HOST_WIDE_INT off = size_low_cst (offset0);
9077 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9078 * BITS_PER_UNIT)
9079 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9081 bitpos0 = off * BITS_PER_UNIT;
9082 offset0 = NULL_TREE;
9087 base1 = arg1;
9088 if (TREE_CODE (arg1) == ADDR_EXPR)
9090 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9091 &bitsize, &bitpos1, &offset1, &mode,
9092 &unsignedp, &volatilep, false);
9093 if (TREE_CODE (base1) == INDIRECT_REF)
9094 base1 = TREE_OPERAND (base1, 0);
9095 else
9096 indirect_base1 = true;
9098 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9100 base1 = TREE_OPERAND (arg1, 0);
9101 STRIP_SIGN_NOPS (base1);
9102 if (TREE_CODE (base1) == ADDR_EXPR)
9104 base1 = TREE_OPERAND (base1, 0);
9105 indirect_base1 = true;
9107 offset1 = TREE_OPERAND (arg1, 1);
9108 if (tree_fits_shwi_p (offset1))
9110 HOST_WIDE_INT off = size_low_cst (offset1);
9111 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9112 * BITS_PER_UNIT)
9113 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9115 bitpos1 = off * BITS_PER_UNIT;
9116 offset1 = NULL_TREE;
9121 /* A local variable can never be pointed to by
9122 the default SSA name of an incoming parameter. */
9123 if ((TREE_CODE (arg0) == ADDR_EXPR
9124 && indirect_base0
9125 && TREE_CODE (base0) == VAR_DECL
9126 && auto_var_in_fn_p (base0, current_function_decl)
9127 && !indirect_base1
9128 && TREE_CODE (base1) == SSA_NAME
9129 && SSA_NAME_IS_DEFAULT_DEF (base1)
9130 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9131 || (TREE_CODE (arg1) == ADDR_EXPR
9132 && indirect_base1
9133 && TREE_CODE (base1) == VAR_DECL
9134 && auto_var_in_fn_p (base1, current_function_decl)
9135 && !indirect_base0
9136 && TREE_CODE (base0) == SSA_NAME
9137 && SSA_NAME_IS_DEFAULT_DEF (base0)
9138 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9140 if (code == NE_EXPR)
9141 return constant_boolean_node (1, type);
9142 else if (code == EQ_EXPR)
9143 return constant_boolean_node (0, type);
9145 /* If we have equivalent bases we might be able to simplify. */
9146 else if (indirect_base0 == indirect_base1
9147 && operand_equal_p (base0, base1, 0))
9149 /* We can fold this expression to a constant if the non-constant
9150 offset parts are equal. */
9151 if ((offset0 == offset1
9152 || (offset0 && offset1
9153 && operand_equal_p (offset0, offset1, 0)))
9154 && (code == EQ_EXPR
9155 || code == NE_EXPR
9156 || (indirect_base0 && DECL_P (base0))
9157 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9160 if (!equality_code
9161 && bitpos0 != bitpos1
9162 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9163 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9164 fold_overflow_warning (("assuming pointer wraparound does not "
9165 "occur when comparing P +- C1 with "
9166 "P +- C2"),
9167 WARN_STRICT_OVERFLOW_CONDITIONAL);
9169 switch (code)
9171 case EQ_EXPR:
9172 return constant_boolean_node (bitpos0 == bitpos1, type);
9173 case NE_EXPR:
9174 return constant_boolean_node (bitpos0 != bitpos1, type);
9175 case LT_EXPR:
9176 return constant_boolean_node (bitpos0 < bitpos1, type);
9177 case LE_EXPR:
9178 return constant_boolean_node (bitpos0 <= bitpos1, type);
9179 case GE_EXPR:
9180 return constant_boolean_node (bitpos0 >= bitpos1, type);
9181 case GT_EXPR:
9182 return constant_boolean_node (bitpos0 > bitpos1, type);
9183 default:;
9186 /* We can simplify the comparison to a comparison of the variable
9187 offset parts if the constant offset parts are equal.
9188 Be careful to use signed sizetype here because otherwise we
9189 mess with array offsets in the wrong way. This is possible
9190 because pointer arithmetic is restricted to retain within an
9191 object and overflow on pointer differences is undefined as of
9192 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9193 else if (bitpos0 == bitpos1
9194 && (equality_code
9195 || (indirect_base0 && DECL_P (base0))
9196 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9198 /* By converting to signed sizetype we cover middle-end pointer
9199 arithmetic which operates on unsigned pointer types of size
9200 type size and ARRAY_REF offsets which are properly sign or
9201 zero extended from their type in case it is narrower than
9202 sizetype. */
9203 if (offset0 == NULL_TREE)
9204 offset0 = build_int_cst (ssizetype, 0);
9205 else
9206 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9207 if (offset1 == NULL_TREE)
9208 offset1 = build_int_cst (ssizetype, 0);
9209 else
9210 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9212 if (!equality_code
9213 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9214 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9215 fold_overflow_warning (("assuming pointer wraparound does not "
9216 "occur when comparing P +- C1 with "
9217 "P +- C2"),
9218 WARN_STRICT_OVERFLOW_COMPARISON);
9220 return fold_build2_loc (loc, code, type, offset0, offset1);
9223 /* For non-equal bases we can simplify if they are addresses
9224 of local binding decls or constants. */
9225 else if (indirect_base0 && indirect_base1
9226 /* We know that !operand_equal_p (base0, base1, 0)
9227 because the if condition was false. But make
9228 sure two decls are not the same. */
9229 && base0 != base1
9230 && TREE_CODE (arg0) == ADDR_EXPR
9231 && TREE_CODE (arg1) == ADDR_EXPR
9232 && (((TREE_CODE (base0) == VAR_DECL
9233 || TREE_CODE (base0) == PARM_DECL)
9234 && (targetm.binds_local_p (base0)
9235 || CONSTANT_CLASS_P (base1)))
9236 || CONSTANT_CLASS_P (base0))
9237 && (((TREE_CODE (base1) == VAR_DECL
9238 || TREE_CODE (base1) == PARM_DECL)
9239 && (targetm.binds_local_p (base1)
9240 || CONSTANT_CLASS_P (base0)))
9241 || CONSTANT_CLASS_P (base1)))
9243 if (code == EQ_EXPR)
9244 return omit_two_operands_loc (loc, type, boolean_false_node,
9245 arg0, arg1);
9246 else if (code == NE_EXPR)
9247 return omit_two_operands_loc (loc, type, boolean_true_node,
9248 arg0, arg1);
9250 /* For equal offsets we can simplify to a comparison of the
9251 base addresses. */
9252 else if (bitpos0 == bitpos1
9253 && (indirect_base0
9254 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9255 && (indirect_base1
9256 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9257 && ((offset0 == offset1)
9258 || (offset0 && offset1
9259 && operand_equal_p (offset0, offset1, 0))))
9261 if (indirect_base0)
9262 base0 = build_fold_addr_expr_loc (loc, base0);
9263 if (indirect_base1)
9264 base1 = build_fold_addr_expr_loc (loc, base1);
9265 return fold_build2_loc (loc, code, type, base0, base1);
9269 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9270 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9271 the resulting offset is smaller in absolute value than the
9272 original one and has the same sign. */
9273 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9274 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9275 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9276 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9277 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9278 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9279 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9281 tree const1 = TREE_OPERAND (arg0, 1);
9282 tree const2 = TREE_OPERAND (arg1, 1);
9283 tree variable1 = TREE_OPERAND (arg0, 0);
9284 tree variable2 = TREE_OPERAND (arg1, 0);
9285 tree cst;
9286 const char * const warnmsg = G_("assuming signed overflow does not "
9287 "occur when combining constants around "
9288 "a comparison");
9290 /* Put the constant on the side where it doesn't overflow and is
9291 of lower absolute value and of same sign than before. */
9292 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9293 ? MINUS_EXPR : PLUS_EXPR,
9294 const2, const1);
9295 if (!TREE_OVERFLOW (cst)
9296 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9297 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9299 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9300 return fold_build2_loc (loc, code, type,
9301 variable1,
9302 fold_build2_loc (loc, TREE_CODE (arg1),
9303 TREE_TYPE (arg1),
9304 variable2, cst));
9307 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9308 ? MINUS_EXPR : PLUS_EXPR,
9309 const1, const2);
9310 if (!TREE_OVERFLOW (cst)
9311 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9312 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9314 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9315 return fold_build2_loc (loc, code, type,
9316 fold_build2_loc (loc, TREE_CODE (arg0),
9317 TREE_TYPE (arg0),
9318 variable1, cst),
9319 variable2);
9323 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9324 signed arithmetic case. That form is created by the compiler
9325 often enough for folding it to be of value. One example is in
9326 computing loop trip counts after Operator Strength Reduction. */
9327 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9328 && TREE_CODE (arg0) == MULT_EXPR
9329 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9330 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9331 && integer_zerop (arg1))
9333 tree const1 = TREE_OPERAND (arg0, 1);
9334 tree const2 = arg1; /* zero */
9335 tree variable1 = TREE_OPERAND (arg0, 0);
9336 enum tree_code cmp_code = code;
9338 /* Handle unfolded multiplication by zero. */
9339 if (integer_zerop (const1))
9340 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9342 fold_overflow_warning (("assuming signed overflow does not occur when "
9343 "eliminating multiplication in comparison "
9344 "with zero"),
9345 WARN_STRICT_OVERFLOW_COMPARISON);
9347 /* If const1 is negative we swap the sense of the comparison. */
9348 if (tree_int_cst_sgn (const1) < 0)
9349 cmp_code = swap_tree_comparison (cmp_code);
9351 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9354 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9355 if (tem)
9356 return tem;
9358 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9360 tree targ0 = strip_float_extensions (arg0);
9361 tree targ1 = strip_float_extensions (arg1);
9362 tree newtype = TREE_TYPE (targ0);
9364 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9365 newtype = TREE_TYPE (targ1);
9367 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9368 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9369 return fold_build2_loc (loc, code, type,
9370 fold_convert_loc (loc, newtype, targ0),
9371 fold_convert_loc (loc, newtype, targ1));
9373 /* (-a) CMP (-b) -> b CMP a */
9374 if (TREE_CODE (arg0) == NEGATE_EXPR
9375 && TREE_CODE (arg1) == NEGATE_EXPR)
9376 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9377 TREE_OPERAND (arg0, 0));
9379 if (TREE_CODE (arg1) == REAL_CST)
9381 REAL_VALUE_TYPE cst;
9382 cst = TREE_REAL_CST (arg1);
9384 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9385 if (TREE_CODE (arg0) == NEGATE_EXPR)
9386 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9387 TREE_OPERAND (arg0, 0),
9388 build_real (TREE_TYPE (arg1),
9389 real_value_negate (&cst)));
9391 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9392 /* a CMP (-0) -> a CMP 0 */
9393 if (REAL_VALUE_MINUS_ZERO (cst))
9394 return fold_build2_loc (loc, code, type, arg0,
9395 build_real (TREE_TYPE (arg1), dconst0));
9397 /* x != NaN is always true, other ops are always false. */
9398 if (REAL_VALUE_ISNAN (cst)
9399 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9401 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9402 return omit_one_operand_loc (loc, type, tem, arg0);
9405 /* Fold comparisons against infinity. */
9406 if (REAL_VALUE_ISINF (cst)
9407 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9409 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9410 if (tem != NULL_TREE)
9411 return tem;
9415 /* If this is a comparison of a real constant with a PLUS_EXPR
9416 or a MINUS_EXPR of a real constant, we can convert it into a
9417 comparison with a revised real constant as long as no overflow
9418 occurs when unsafe_math_optimizations are enabled. */
9419 if (flag_unsafe_math_optimizations
9420 && TREE_CODE (arg1) == REAL_CST
9421 && (TREE_CODE (arg0) == PLUS_EXPR
9422 || TREE_CODE (arg0) == MINUS_EXPR)
9423 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9424 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9425 ? MINUS_EXPR : PLUS_EXPR,
9426 arg1, TREE_OPERAND (arg0, 1)))
9427 && !TREE_OVERFLOW (tem))
9428 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9430 /* Likewise, we can simplify a comparison of a real constant with
9431 a MINUS_EXPR whose first operand is also a real constant, i.e.
9432 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9433 floating-point types only if -fassociative-math is set. */
9434 if (flag_associative_math
9435 && TREE_CODE (arg1) == REAL_CST
9436 && TREE_CODE (arg0) == MINUS_EXPR
9437 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9438 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9439 arg1))
9440 && !TREE_OVERFLOW (tem))
9441 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9442 TREE_OPERAND (arg0, 1), tem);
9444 /* Fold comparisons against built-in math functions. */
9445 if (TREE_CODE (arg1) == REAL_CST
9446 && flag_unsafe_math_optimizations
9447 && ! flag_errno_math)
9449 enum built_in_function fcode = builtin_mathfn_code (arg0);
9451 if (fcode != END_BUILTINS)
9453 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9454 if (tem != NULL_TREE)
9455 return tem;
9460 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9461 && CONVERT_EXPR_P (arg0))
9463 /* If we are widening one operand of an integer comparison,
9464 see if the other operand is similarly being widened. Perhaps we
9465 can do the comparison in the narrower type. */
9466 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9467 if (tem)
9468 return tem;
9470 /* Or if we are changing signedness. */
9471 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9472 if (tem)
9473 return tem;
9476 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9477 constant, we can simplify it. */
9478 if (TREE_CODE (arg1) == INTEGER_CST
9479 && (TREE_CODE (arg0) == MIN_EXPR
9480 || TREE_CODE (arg0) == MAX_EXPR)
9481 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9483 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9484 if (tem)
9485 return tem;
9488 /* Simplify comparison of something with itself. (For IEEE
9489 floating-point, we can only do some of these simplifications.) */
9490 if (operand_equal_p (arg0, arg1, 0))
9492 switch (code)
9494 case EQ_EXPR:
9495 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9496 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9497 return constant_boolean_node (1, type);
9498 break;
9500 case GE_EXPR:
9501 case LE_EXPR:
9502 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9503 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9504 return constant_boolean_node (1, type);
9505 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9507 case NE_EXPR:
9508 /* For NE, we can only do this simplification if integer
9509 or we don't honor IEEE floating point NaNs. */
9510 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9511 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9512 break;
9513 /* ... fall through ... */
9514 case GT_EXPR:
9515 case LT_EXPR:
9516 return constant_boolean_node (0, type);
9517 default:
9518 gcc_unreachable ();
9522 /* If we are comparing an expression that just has comparisons
9523 of two integer values, arithmetic expressions of those comparisons,
9524 and constants, we can simplify it. There are only three cases
9525 to check: the two values can either be equal, the first can be
9526 greater, or the second can be greater. Fold the expression for
9527 those three values. Since each value must be 0 or 1, we have
9528 eight possibilities, each of which corresponds to the constant 0
9529 or 1 or one of the six possible comparisons.
9531 This handles common cases like (a > b) == 0 but also handles
9532 expressions like ((x > y) - (y > x)) > 0, which supposedly
9533 occur in macroized code. */
9535 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9537 tree cval1 = 0, cval2 = 0;
9538 int save_p = 0;
9540 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9541 /* Don't handle degenerate cases here; they should already
9542 have been handled anyway. */
9543 && cval1 != 0 && cval2 != 0
9544 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9545 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9546 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9547 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9548 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9549 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9550 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9552 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9553 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9555 /* We can't just pass T to eval_subst in case cval1 or cval2
9556 was the same as ARG1. */
9558 tree high_result
9559 = fold_build2_loc (loc, code, type,
9560 eval_subst (loc, arg0, cval1, maxval,
9561 cval2, minval),
9562 arg1);
9563 tree equal_result
9564 = fold_build2_loc (loc, code, type,
9565 eval_subst (loc, arg0, cval1, maxval,
9566 cval2, maxval),
9567 arg1);
9568 tree low_result
9569 = fold_build2_loc (loc, code, type,
9570 eval_subst (loc, arg0, cval1, minval,
9571 cval2, maxval),
9572 arg1);
9574 /* All three of these results should be 0 or 1. Confirm they are.
9575 Then use those values to select the proper code to use. */
9577 if (TREE_CODE (high_result) == INTEGER_CST
9578 && TREE_CODE (equal_result) == INTEGER_CST
9579 && TREE_CODE (low_result) == INTEGER_CST)
9581 /* Make a 3-bit mask with the high-order bit being the
9582 value for `>', the next for '=', and the low for '<'. */
9583 switch ((integer_onep (high_result) * 4)
9584 + (integer_onep (equal_result) * 2)
9585 + integer_onep (low_result))
9587 case 0:
9588 /* Always false. */
9589 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9590 case 1:
9591 code = LT_EXPR;
9592 break;
9593 case 2:
9594 code = EQ_EXPR;
9595 break;
9596 case 3:
9597 code = LE_EXPR;
9598 break;
9599 case 4:
9600 code = GT_EXPR;
9601 break;
9602 case 5:
9603 code = NE_EXPR;
9604 break;
9605 case 6:
9606 code = GE_EXPR;
9607 break;
9608 case 7:
9609 /* Always true. */
9610 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9613 if (save_p)
9615 tem = save_expr (build2 (code, type, cval1, cval2));
9616 SET_EXPR_LOCATION (tem, loc);
9617 return tem;
9619 return fold_build2_loc (loc, code, type, cval1, cval2);
9624 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9625 into a single range test. */
9626 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9627 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9628 && TREE_CODE (arg1) == INTEGER_CST
9629 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9630 && !integer_zerop (TREE_OPERAND (arg0, 1))
9631 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9632 && !TREE_OVERFLOW (arg1))
9634 tem = fold_div_compare (loc, code, type, arg0, arg1);
9635 if (tem != NULL_TREE)
9636 return tem;
9639 /* Fold ~X op ~Y as Y op X. */
9640 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9641 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9643 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9644 return fold_build2_loc (loc, code, type,
9645 fold_convert_loc (loc, cmp_type,
9646 TREE_OPERAND (arg1, 0)),
9647 TREE_OPERAND (arg0, 0));
9650 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9651 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9652 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9654 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9655 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9656 TREE_OPERAND (arg0, 0),
9657 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9658 fold_convert_loc (loc, cmp_type, arg1)));
9661 return NULL_TREE;
9665 /* Subroutine of fold_binary. Optimize complex multiplications of the
9666 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9667 argument EXPR represents the expression "z" of type TYPE. */
9669 static tree
9670 fold_mult_zconjz (location_t loc, tree type, tree expr)
9672 tree itype = TREE_TYPE (type);
9673 tree rpart, ipart, tem;
9675 if (TREE_CODE (expr) == COMPLEX_EXPR)
9677 rpart = TREE_OPERAND (expr, 0);
9678 ipart = TREE_OPERAND (expr, 1);
9680 else if (TREE_CODE (expr) == COMPLEX_CST)
9682 rpart = TREE_REALPART (expr);
9683 ipart = TREE_IMAGPART (expr);
9685 else
9687 expr = save_expr (expr);
9688 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9689 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9692 rpart = save_expr (rpart);
9693 ipart = save_expr (ipart);
9694 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9695 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9696 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9697 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9698 build_zero_cst (itype));
9702 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9703 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9704 guarantees that P and N have the same least significant log2(M) bits.
9705 N is not otherwise constrained. In particular, N is not normalized to
9706 0 <= N < M as is common. In general, the precise value of P is unknown.
9707 M is chosen as large as possible such that constant N can be determined.
9709 Returns M and sets *RESIDUE to N.
9711 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9712 account. This is not always possible due to PR 35705.
9715 static unsigned HOST_WIDE_INT
9716 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9717 bool allow_func_align)
9719 enum tree_code code;
9721 *residue = 0;
9723 code = TREE_CODE (expr);
9724 if (code == ADDR_EXPR)
9726 unsigned int bitalign;
9727 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9728 *residue /= BITS_PER_UNIT;
9729 return bitalign / BITS_PER_UNIT;
9731 else if (code == POINTER_PLUS_EXPR)
9733 tree op0, op1;
9734 unsigned HOST_WIDE_INT modulus;
9735 enum tree_code inner_code;
9737 op0 = TREE_OPERAND (expr, 0);
9738 STRIP_NOPS (op0);
9739 modulus = get_pointer_modulus_and_residue (op0, residue,
9740 allow_func_align);
9742 op1 = TREE_OPERAND (expr, 1);
9743 STRIP_NOPS (op1);
9744 inner_code = TREE_CODE (op1);
9745 if (inner_code == INTEGER_CST)
9747 *residue += TREE_INT_CST_LOW (op1);
9748 return modulus;
9750 else if (inner_code == MULT_EXPR)
9752 op1 = TREE_OPERAND (op1, 1);
9753 if (TREE_CODE (op1) == INTEGER_CST)
9755 unsigned HOST_WIDE_INT align;
9757 /* Compute the greatest power-of-2 divisor of op1. */
9758 align = TREE_INT_CST_LOW (op1);
9759 align &= -align;
9761 /* If align is non-zero and less than *modulus, replace
9762 *modulus with align., If align is 0, then either op1 is 0
9763 or the greatest power-of-2 divisor of op1 doesn't fit in an
9764 unsigned HOST_WIDE_INT. In either case, no additional
9765 constraint is imposed. */
9766 if (align)
9767 modulus = MIN (modulus, align);
9769 return modulus;
9774 /* If we get here, we were unable to determine anything useful about the
9775 expression. */
9776 return 1;
9779 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9780 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9782 static bool
9783 vec_cst_ctor_to_array (tree arg, tree *elts)
9785 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9787 if (TREE_CODE (arg) == VECTOR_CST)
9789 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9790 elts[i] = VECTOR_CST_ELT (arg, i);
9792 else if (TREE_CODE (arg) == CONSTRUCTOR)
9794 constructor_elt *elt;
9796 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9797 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9798 return false;
9799 else
9800 elts[i] = elt->value;
9802 else
9803 return false;
9804 for (; i < nelts; i++)
9805 elts[i]
9806 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9807 return true;
9810 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9811 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9812 NULL_TREE otherwise. */
9814 static tree
9815 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9817 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9818 tree *elts;
9819 bool need_ctor = false;
9821 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9822 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9823 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9824 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9825 return NULL_TREE;
9827 elts = XALLOCAVEC (tree, nelts * 3);
9828 if (!vec_cst_ctor_to_array (arg0, elts)
9829 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9830 return NULL_TREE;
9832 for (i = 0; i < nelts; i++)
9834 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9835 need_ctor = true;
9836 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9839 if (need_ctor)
9841 vec<constructor_elt, va_gc> *v;
9842 vec_alloc (v, nelts);
9843 for (i = 0; i < nelts; i++)
9844 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9845 return build_constructor (type, v);
9847 else
9848 return build_vector (type, &elts[2 * nelts]);
9851 /* Try to fold a pointer difference of type TYPE two address expressions of
9852 array references AREF0 and AREF1 using location LOC. Return a
9853 simplified expression for the difference or NULL_TREE. */
9855 static tree
9856 fold_addr_of_array_ref_difference (location_t loc, tree type,
9857 tree aref0, tree aref1)
9859 tree base0 = TREE_OPERAND (aref0, 0);
9860 tree base1 = TREE_OPERAND (aref1, 0);
9861 tree base_offset = build_int_cst (type, 0);
9863 /* If the bases are array references as well, recurse. If the bases
9864 are pointer indirections compute the difference of the pointers.
9865 If the bases are equal, we are set. */
9866 if ((TREE_CODE (base0) == ARRAY_REF
9867 && TREE_CODE (base1) == ARRAY_REF
9868 && (base_offset
9869 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9870 || (INDIRECT_REF_P (base0)
9871 && INDIRECT_REF_P (base1)
9872 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9873 TREE_OPERAND (base0, 0),
9874 TREE_OPERAND (base1, 0))))
9875 || operand_equal_p (base0, base1, 0))
9877 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9878 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9879 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9880 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9881 return fold_build2_loc (loc, PLUS_EXPR, type,
9882 base_offset,
9883 fold_build2_loc (loc, MULT_EXPR, type,
9884 diff, esz));
9886 return NULL_TREE;
9889 /* If the real or vector real constant CST of type TYPE has an exact
9890 inverse, return it, else return NULL. */
9892 static tree
9893 exact_inverse (tree type, tree cst)
9895 REAL_VALUE_TYPE r;
9896 tree unit_type, *elts;
9897 enum machine_mode mode;
9898 unsigned vec_nelts, i;
9900 switch (TREE_CODE (cst))
9902 case REAL_CST:
9903 r = TREE_REAL_CST (cst);
9905 if (exact_real_inverse (TYPE_MODE (type), &r))
9906 return build_real (type, r);
9908 return NULL_TREE;
9910 case VECTOR_CST:
9911 vec_nelts = VECTOR_CST_NELTS (cst);
9912 elts = XALLOCAVEC (tree, vec_nelts);
9913 unit_type = TREE_TYPE (type);
9914 mode = TYPE_MODE (unit_type);
9916 for (i = 0; i < vec_nelts; i++)
9918 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9919 if (!exact_real_inverse (mode, &r))
9920 return NULL_TREE;
9921 elts[i] = build_real (unit_type, r);
9924 return build_vector (type, elts);
9926 default:
9927 return NULL_TREE;
9931 /* Mask out the tz least significant bits of X of type TYPE where
9932 tz is the number of trailing zeroes in Y. */
9933 static wide_int
9934 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9936 int tz = wi::ctz (y);
9937 if (tz > 0)
9938 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9939 return x;
9942 /* Return true when T is an address and is known to be nonzero.
9943 For floating point we further ensure that T is not denormal.
9944 Similar logic is present in nonzero_address in rtlanal.h.
9946 If the return value is based on the assumption that signed overflow
9947 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9948 change *STRICT_OVERFLOW_P. */
9950 static bool
9951 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9953 tree type = TREE_TYPE (t);
9954 enum tree_code code;
9956 /* Doing something useful for floating point would need more work. */
9957 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9958 return false;
9960 code = TREE_CODE (t);
9961 switch (TREE_CODE_CLASS (code))
9963 case tcc_unary:
9964 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9965 strict_overflow_p);
9966 case tcc_binary:
9967 case tcc_comparison:
9968 return tree_binary_nonzero_warnv_p (code, type,
9969 TREE_OPERAND (t, 0),
9970 TREE_OPERAND (t, 1),
9971 strict_overflow_p);
9972 case tcc_constant:
9973 case tcc_declaration:
9974 case tcc_reference:
9975 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9977 default:
9978 break;
9981 switch (code)
9983 case TRUTH_NOT_EXPR:
9984 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9985 strict_overflow_p);
9987 case TRUTH_AND_EXPR:
9988 case TRUTH_OR_EXPR:
9989 case TRUTH_XOR_EXPR:
9990 return tree_binary_nonzero_warnv_p (code, type,
9991 TREE_OPERAND (t, 0),
9992 TREE_OPERAND (t, 1),
9993 strict_overflow_p);
9995 case COND_EXPR:
9996 case CONSTRUCTOR:
9997 case OBJ_TYPE_REF:
9998 case ASSERT_EXPR:
9999 case ADDR_EXPR:
10000 case WITH_SIZE_EXPR:
10001 case SSA_NAME:
10002 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10004 case COMPOUND_EXPR:
10005 case MODIFY_EXPR:
10006 case BIND_EXPR:
10007 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10008 strict_overflow_p);
10010 case SAVE_EXPR:
10011 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10012 strict_overflow_p);
10014 case CALL_EXPR:
10016 tree fndecl = get_callee_fndecl (t);
10017 if (!fndecl) return false;
10018 if (flag_delete_null_pointer_checks && !flag_check_new
10019 && DECL_IS_OPERATOR_NEW (fndecl)
10020 && !TREE_NOTHROW (fndecl))
10021 return true;
10022 if (flag_delete_null_pointer_checks
10023 && lookup_attribute ("returns_nonnull",
10024 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10025 return true;
10026 return alloca_call_p (t);
10029 default:
10030 break;
10032 return false;
10035 /* Return true when T is an address and is known to be nonzero.
10036 Handle warnings about undefined signed overflow. */
10038 static bool
10039 tree_expr_nonzero_p (tree t)
10041 bool ret, strict_overflow_p;
10043 strict_overflow_p = false;
10044 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10045 if (strict_overflow_p)
10046 fold_overflow_warning (("assuming signed overflow does not occur when "
10047 "determining that expression is always "
10048 "non-zero"),
10049 WARN_STRICT_OVERFLOW_MISC);
10050 return ret;
10053 /* Fold a binary expression of code CODE and type TYPE with operands
10054 OP0 and OP1. LOC is the location of the resulting expression.
10055 Return the folded expression if folding is successful. Otherwise,
10056 return NULL_TREE. */
10058 tree
10059 fold_binary_loc (location_t loc,
10060 enum tree_code code, tree type, tree op0, tree op1)
10062 enum tree_code_class kind = TREE_CODE_CLASS (code);
10063 tree arg0, arg1, tem;
10064 tree t1 = NULL_TREE;
10065 bool strict_overflow_p;
10066 unsigned int prec;
10068 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10069 && TREE_CODE_LENGTH (code) == 2
10070 && op0 != NULL_TREE
10071 && op1 != NULL_TREE);
10073 arg0 = op0;
10074 arg1 = op1;
10076 /* Strip any conversions that don't change the mode. This is
10077 safe for every expression, except for a comparison expression
10078 because its signedness is derived from its operands. So, in
10079 the latter case, only strip conversions that don't change the
10080 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10081 preserved.
10083 Note that this is done as an internal manipulation within the
10084 constant folder, in order to find the simplest representation
10085 of the arguments so that their form can be studied. In any
10086 cases, the appropriate type conversions should be put back in
10087 the tree that will get out of the constant folder. */
10089 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10091 STRIP_SIGN_NOPS (arg0);
10092 STRIP_SIGN_NOPS (arg1);
10094 else
10096 STRIP_NOPS (arg0);
10097 STRIP_NOPS (arg1);
10100 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10101 constant but we can't do arithmetic on them. */
10102 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10103 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10104 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10105 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10106 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10107 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
10108 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
10110 if (kind == tcc_binary)
10112 /* Make sure type and arg0 have the same saturating flag. */
10113 gcc_assert (TYPE_SATURATING (type)
10114 == TYPE_SATURATING (TREE_TYPE (arg0)));
10115 tem = const_binop (code, arg0, arg1);
10117 else if (kind == tcc_comparison)
10118 tem = fold_relational_const (code, type, arg0, arg1);
10119 else
10120 tem = NULL_TREE;
10122 if (tem != NULL_TREE)
10124 if (TREE_TYPE (tem) != type)
10125 tem = fold_convert_loc (loc, type, tem);
10126 return tem;
10130 /* If this is a commutative operation, and ARG0 is a constant, move it
10131 to ARG1 to reduce the number of tests below. */
10132 if (commutative_tree_code (code)
10133 && tree_swap_operands_p (arg0, arg1, true))
10134 return fold_build2_loc (loc, code, type, op1, op0);
10136 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10138 First check for cases where an arithmetic operation is applied to a
10139 compound, conditional, or comparison operation. Push the arithmetic
10140 operation inside the compound or conditional to see if any folding
10141 can then be done. Convert comparison to conditional for this purpose.
10142 The also optimizes non-constant cases that used to be done in
10143 expand_expr.
10145 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10146 one of the operands is a comparison and the other is a comparison, a
10147 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10148 code below would make the expression more complex. Change it to a
10149 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10150 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10152 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10153 || code == EQ_EXPR || code == NE_EXPR)
10154 && TREE_CODE (type) != VECTOR_TYPE
10155 && ((truth_value_p (TREE_CODE (arg0))
10156 && (truth_value_p (TREE_CODE (arg1))
10157 || (TREE_CODE (arg1) == BIT_AND_EXPR
10158 && integer_onep (TREE_OPERAND (arg1, 1)))))
10159 || (truth_value_p (TREE_CODE (arg1))
10160 && (truth_value_p (TREE_CODE (arg0))
10161 || (TREE_CODE (arg0) == BIT_AND_EXPR
10162 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10164 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10165 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10166 : TRUTH_XOR_EXPR,
10167 boolean_type_node,
10168 fold_convert_loc (loc, boolean_type_node, arg0),
10169 fold_convert_loc (loc, boolean_type_node, arg1));
10171 if (code == EQ_EXPR)
10172 tem = invert_truthvalue_loc (loc, tem);
10174 return fold_convert_loc (loc, type, tem);
10177 if (TREE_CODE_CLASS (code) == tcc_binary
10178 || TREE_CODE_CLASS (code) == tcc_comparison)
10180 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10182 tem = fold_build2_loc (loc, code, type,
10183 fold_convert_loc (loc, TREE_TYPE (op0),
10184 TREE_OPERAND (arg0, 1)), op1);
10185 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10186 tem);
10188 if (TREE_CODE (arg1) == COMPOUND_EXPR
10189 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10191 tem = fold_build2_loc (loc, code, type, op0,
10192 fold_convert_loc (loc, TREE_TYPE (op1),
10193 TREE_OPERAND (arg1, 1)));
10194 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10195 tem);
10198 if (TREE_CODE (arg0) == COND_EXPR
10199 || TREE_CODE (arg0) == VEC_COND_EXPR
10200 || COMPARISON_CLASS_P (arg0))
10202 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10203 arg0, arg1,
10204 /*cond_first_p=*/1);
10205 if (tem != NULL_TREE)
10206 return tem;
10209 if (TREE_CODE (arg1) == COND_EXPR
10210 || TREE_CODE (arg1) == VEC_COND_EXPR
10211 || COMPARISON_CLASS_P (arg1))
10213 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10214 arg1, arg0,
10215 /*cond_first_p=*/0);
10216 if (tem != NULL_TREE)
10217 return tem;
10221 switch (code)
10223 case MEM_REF:
10224 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10225 if (TREE_CODE (arg0) == ADDR_EXPR
10226 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10228 tree iref = TREE_OPERAND (arg0, 0);
10229 return fold_build2 (MEM_REF, type,
10230 TREE_OPERAND (iref, 0),
10231 int_const_binop (PLUS_EXPR, arg1,
10232 TREE_OPERAND (iref, 1)));
10235 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10236 if (TREE_CODE (arg0) == ADDR_EXPR
10237 && handled_component_p (TREE_OPERAND (arg0, 0)))
10239 tree base;
10240 HOST_WIDE_INT coffset;
10241 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10242 &coffset);
10243 if (!base)
10244 return NULL_TREE;
10245 return fold_build2 (MEM_REF, type,
10246 build_fold_addr_expr (base),
10247 int_const_binop (PLUS_EXPR, arg1,
10248 size_int (coffset)));
10251 return NULL_TREE;
10253 case POINTER_PLUS_EXPR:
10254 /* 0 +p index -> (type)index */
10255 if (integer_zerop (arg0))
10256 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10258 /* PTR +p 0 -> PTR */
10259 if (integer_zerop (arg1))
10260 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10262 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10263 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10264 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10265 return fold_convert_loc (loc, type,
10266 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10267 fold_convert_loc (loc, sizetype,
10268 arg1),
10269 fold_convert_loc (loc, sizetype,
10270 arg0)));
10272 /* (PTR +p B) +p A -> PTR +p (B + A) */
10273 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10274 && !upc_shared_type_p (TREE_TYPE (type)))
10276 tree inner;
10277 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10278 tree arg00 = TREE_OPERAND (arg0, 0);
10279 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10280 arg01, fold_convert_loc (loc, sizetype, arg1));
10281 return fold_convert_loc (loc, type,
10282 fold_build_pointer_plus_loc (loc,
10283 arg00, inner));
10286 /* PTR_CST +p CST -> CST1 */
10287 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10288 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10289 fold_convert_loc (loc, type, arg1));
10291 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10292 of the array. Loop optimizer sometimes produce this type of
10293 expressions. */
10294 if (TREE_CODE (arg0) == ADDR_EXPR)
10296 tem = try_move_mult_to_index (loc, arg0,
10297 fold_convert_loc (loc,
10298 ssizetype, arg1));
10299 if (tem)
10300 return fold_convert_loc (loc, type, tem);
10303 return NULL_TREE;
10305 case PLUS_EXPR:
10306 /* A + (-B) -> A - B */
10307 if (TREE_CODE (arg1) == NEGATE_EXPR
10308 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10309 return fold_build2_loc (loc, MINUS_EXPR, type,
10310 fold_convert_loc (loc, type, arg0),
10311 fold_convert_loc (loc, type,
10312 TREE_OPERAND (arg1, 0)));
10313 /* (-A) + B -> B - A */
10314 if (TREE_CODE (arg0) == NEGATE_EXPR
10315 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)
10316 && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10317 return fold_build2_loc (loc, MINUS_EXPR, type,
10318 fold_convert_loc (loc, type, arg1),
10319 fold_convert_loc (loc, type,
10320 TREE_OPERAND (arg0, 0)));
10322 /* Disable further optimizations involving UPC shared pointers,
10323 because integers are not interoperable with shared pointers. */
10324 if ((TREE_TYPE (arg0) && POINTER_TYPE_P (TREE_TYPE (arg0))
10325 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg0))))
10326 || (TREE_TYPE (arg1) && POINTER_TYPE_P (TREE_TYPE (arg1))
10327 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg1)))))
10328 return NULL_TREE;
10330 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10332 /* Convert ~A + 1 to -A. */
10333 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10334 && integer_onep (arg1))
10335 return fold_build1_loc (loc, NEGATE_EXPR, type,
10336 fold_convert_loc (loc, type,
10337 TREE_OPERAND (arg0, 0)));
10339 /* ~X + X is -1. */
10340 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10341 && !TYPE_OVERFLOW_TRAPS (type))
10343 tree tem = TREE_OPERAND (arg0, 0);
10345 STRIP_NOPS (tem);
10346 if (operand_equal_p (tem, arg1, 0))
10348 t1 = build_all_ones_cst (type);
10349 return omit_one_operand_loc (loc, type, t1, arg1);
10353 /* X + ~X is -1. */
10354 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10355 && !TYPE_OVERFLOW_TRAPS (type))
10357 tree tem = TREE_OPERAND (arg1, 0);
10359 STRIP_NOPS (tem);
10360 if (operand_equal_p (arg0, tem, 0))
10362 t1 = build_all_ones_cst (type);
10363 return omit_one_operand_loc (loc, type, t1, arg0);
10367 /* X + (X / CST) * -CST is X % CST. */
10368 if (TREE_CODE (arg1) == MULT_EXPR
10369 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10370 && operand_equal_p (arg0,
10371 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10373 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10374 tree cst1 = TREE_OPERAND (arg1, 1);
10375 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10376 cst1, cst0);
10377 if (sum && integer_zerop (sum))
10378 return fold_convert_loc (loc, type,
10379 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10380 TREE_TYPE (arg0), arg0,
10381 cst0));
10385 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10386 one. Make sure the type is not saturating and has the signedness of
10387 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10388 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10389 if ((TREE_CODE (arg0) == MULT_EXPR
10390 || TREE_CODE (arg1) == MULT_EXPR)
10391 && !TYPE_SATURATING (type)
10392 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10393 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10394 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10396 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10397 if (tem)
10398 return tem;
10401 if (! FLOAT_TYPE_P (type))
10403 if (integer_zerop (arg1))
10404 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10406 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10407 with a constant, and the two constants have no bits in common,
10408 we should treat this as a BIT_IOR_EXPR since this may produce more
10409 simplifications. */
10410 if (TREE_CODE (arg0) == BIT_AND_EXPR
10411 && TREE_CODE (arg1) == BIT_AND_EXPR
10412 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10413 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10414 && wi::bit_and (TREE_OPERAND (arg0, 1),
10415 TREE_OPERAND (arg1, 1)) == 0)
10417 code = BIT_IOR_EXPR;
10418 goto bit_ior;
10421 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10422 (plus (plus (mult) (mult)) (foo)) so that we can
10423 take advantage of the factoring cases below. */
10424 if (TYPE_OVERFLOW_WRAPS (type)
10425 && (((TREE_CODE (arg0) == PLUS_EXPR
10426 || TREE_CODE (arg0) == MINUS_EXPR)
10427 && TREE_CODE (arg1) == MULT_EXPR)
10428 || ((TREE_CODE (arg1) == PLUS_EXPR
10429 || TREE_CODE (arg1) == MINUS_EXPR)
10430 && TREE_CODE (arg0) == MULT_EXPR)))
10432 tree parg0, parg1, parg, marg;
10433 enum tree_code pcode;
10435 if (TREE_CODE (arg1) == MULT_EXPR)
10436 parg = arg0, marg = arg1;
10437 else
10438 parg = arg1, marg = arg0;
10439 pcode = TREE_CODE (parg);
10440 parg0 = TREE_OPERAND (parg, 0);
10441 parg1 = TREE_OPERAND (parg, 1);
10442 STRIP_NOPS (parg0);
10443 STRIP_NOPS (parg1);
10445 if (TREE_CODE (parg0) == MULT_EXPR
10446 && TREE_CODE (parg1) != MULT_EXPR)
10447 return fold_build2_loc (loc, pcode, type,
10448 fold_build2_loc (loc, PLUS_EXPR, type,
10449 fold_convert_loc (loc, type,
10450 parg0),
10451 fold_convert_loc (loc, type,
10452 marg)),
10453 fold_convert_loc (loc, type, parg1));
10454 if (TREE_CODE (parg0) != MULT_EXPR
10455 && TREE_CODE (parg1) == MULT_EXPR)
10456 return
10457 fold_build2_loc (loc, PLUS_EXPR, type,
10458 fold_convert_loc (loc, type, parg0),
10459 fold_build2_loc (loc, pcode, type,
10460 fold_convert_loc (loc, type, marg),
10461 fold_convert_loc (loc, type,
10462 parg1)));
10465 else
10467 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10468 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10469 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10471 /* Likewise if the operands are reversed. */
10472 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10473 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10475 /* Convert X + -C into X - C. */
10476 if (TREE_CODE (arg1) == REAL_CST
10477 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10479 tem = fold_negate_const (arg1, type);
10480 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10481 return fold_build2_loc (loc, MINUS_EXPR, type,
10482 fold_convert_loc (loc, type, arg0),
10483 fold_convert_loc (loc, type, tem));
10486 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10487 to __complex__ ( x, y ). This is not the same for SNaNs or
10488 if signed zeros are involved. */
10489 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10490 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10491 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10493 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10494 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10495 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10496 bool arg0rz = false, arg0iz = false;
10497 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10498 || (arg0i && (arg0iz = real_zerop (arg0i))))
10500 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10501 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10502 if (arg0rz && arg1i && real_zerop (arg1i))
10504 tree rp = arg1r ? arg1r
10505 : build1 (REALPART_EXPR, rtype, arg1);
10506 tree ip = arg0i ? arg0i
10507 : build1 (IMAGPART_EXPR, rtype, arg0);
10508 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10510 else if (arg0iz && arg1r && real_zerop (arg1r))
10512 tree rp = arg0r ? arg0r
10513 : build1 (REALPART_EXPR, rtype, arg0);
10514 tree ip = arg1i ? arg1i
10515 : build1 (IMAGPART_EXPR, rtype, arg1);
10516 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10521 if (flag_unsafe_math_optimizations
10522 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10523 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10524 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10525 return tem;
10527 /* Convert x+x into x*2.0. */
10528 if (operand_equal_p (arg0, arg1, 0)
10529 && SCALAR_FLOAT_TYPE_P (type))
10530 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10531 build_real (type, dconst2));
10533 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10534 We associate floats only if the user has specified
10535 -fassociative-math. */
10536 if (flag_associative_math
10537 && TREE_CODE (arg1) == PLUS_EXPR
10538 && TREE_CODE (arg0) != MULT_EXPR)
10540 tree tree10 = TREE_OPERAND (arg1, 0);
10541 tree tree11 = TREE_OPERAND (arg1, 1);
10542 if (TREE_CODE (tree11) == MULT_EXPR
10543 && TREE_CODE (tree10) == MULT_EXPR)
10545 tree tree0;
10546 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10547 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10550 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10551 We associate floats only if the user has specified
10552 -fassociative-math. */
10553 if (flag_associative_math
10554 && TREE_CODE (arg0) == PLUS_EXPR
10555 && TREE_CODE (arg1) != MULT_EXPR)
10557 tree tree00 = TREE_OPERAND (arg0, 0);
10558 tree tree01 = TREE_OPERAND (arg0, 1);
10559 if (TREE_CODE (tree01) == MULT_EXPR
10560 && TREE_CODE (tree00) == MULT_EXPR)
10562 tree tree0;
10563 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10564 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10569 bit_rotate:
10570 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10571 is a rotate of A by C1 bits. */
10572 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10573 is a rotate of A by B bits. */
10575 enum tree_code code0, code1;
10576 tree rtype;
10577 code0 = TREE_CODE (arg0);
10578 code1 = TREE_CODE (arg1);
10579 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10580 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10581 && operand_equal_p (TREE_OPERAND (arg0, 0),
10582 TREE_OPERAND (arg1, 0), 0)
10583 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10584 TYPE_UNSIGNED (rtype))
10585 /* Only create rotates in complete modes. Other cases are not
10586 expanded properly. */
10587 && (element_precision (rtype)
10588 == element_precision (TYPE_MODE (rtype))))
10590 tree tree01, tree11;
10591 enum tree_code code01, code11;
10593 tree01 = TREE_OPERAND (arg0, 1);
10594 tree11 = TREE_OPERAND (arg1, 1);
10595 STRIP_NOPS (tree01);
10596 STRIP_NOPS (tree11);
10597 code01 = TREE_CODE (tree01);
10598 code11 = TREE_CODE (tree11);
10599 if (code01 == INTEGER_CST
10600 && code11 == INTEGER_CST
10601 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10602 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10604 tem = build2_loc (loc, LROTATE_EXPR,
10605 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10606 TREE_OPERAND (arg0, 0),
10607 code0 == LSHIFT_EXPR ? tree01 : tree11);
10608 return fold_convert_loc (loc, type, tem);
10610 else if (code11 == MINUS_EXPR)
10612 tree tree110, tree111;
10613 tree110 = TREE_OPERAND (tree11, 0);
10614 tree111 = TREE_OPERAND (tree11, 1);
10615 STRIP_NOPS (tree110);
10616 STRIP_NOPS (tree111);
10617 if (TREE_CODE (tree110) == INTEGER_CST
10618 && 0 == compare_tree_int (tree110,
10619 element_precision
10620 (TREE_TYPE (TREE_OPERAND
10621 (arg0, 0))))
10622 && operand_equal_p (tree01, tree111, 0))
10623 return
10624 fold_convert_loc (loc, type,
10625 build2 ((code0 == LSHIFT_EXPR
10626 ? LROTATE_EXPR
10627 : RROTATE_EXPR),
10628 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10629 TREE_OPERAND (arg0, 0), tree01));
10631 else if (code01 == MINUS_EXPR)
10633 tree tree010, tree011;
10634 tree010 = TREE_OPERAND (tree01, 0);
10635 tree011 = TREE_OPERAND (tree01, 1);
10636 STRIP_NOPS (tree010);
10637 STRIP_NOPS (tree011);
10638 if (TREE_CODE (tree010) == INTEGER_CST
10639 && 0 == compare_tree_int (tree010,
10640 element_precision
10641 (TREE_TYPE (TREE_OPERAND
10642 (arg0, 0))))
10643 && operand_equal_p (tree11, tree011, 0))
10644 return fold_convert_loc
10645 (loc, type,
10646 build2 ((code0 != LSHIFT_EXPR
10647 ? LROTATE_EXPR
10648 : RROTATE_EXPR),
10649 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10650 TREE_OPERAND (arg0, 0), tree11));
10655 associate:
10656 /* In most languages, can't associate operations on floats through
10657 parentheses. Rather than remember where the parentheses were, we
10658 don't associate floats at all, unless the user has specified
10659 -fassociative-math.
10660 And, we need to make sure type is not saturating. */
10662 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10663 && !TYPE_SATURATING (type))
10665 tree var0, con0, lit0, minus_lit0;
10666 tree var1, con1, lit1, minus_lit1;
10667 tree atype = type;
10668 bool ok = true;
10670 /* Split both trees into variables, constants, and literals. Then
10671 associate each group together, the constants with literals,
10672 then the result with variables. This increases the chances of
10673 literals being recombined later and of generating relocatable
10674 expressions for the sum of a constant and literal. */
10675 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10676 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10677 code == MINUS_EXPR);
10679 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10680 if (code == MINUS_EXPR)
10681 code = PLUS_EXPR;
10683 /* With undefined overflow prefer doing association in a type
10684 which wraps on overflow, if that is one of the operand types. */
10685 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10686 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10688 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10689 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10690 atype = TREE_TYPE (arg0);
10691 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10692 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10693 atype = TREE_TYPE (arg1);
10694 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10697 /* With undefined overflow we can only associate constants with one
10698 variable, and constants whose association doesn't overflow. */
10699 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10700 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10702 if (var0 && var1)
10704 tree tmp0 = var0;
10705 tree tmp1 = var1;
10707 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10708 tmp0 = TREE_OPERAND (tmp0, 0);
10709 if (CONVERT_EXPR_P (tmp0)
10710 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10711 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10712 <= TYPE_PRECISION (atype)))
10713 tmp0 = TREE_OPERAND (tmp0, 0);
10714 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10715 tmp1 = TREE_OPERAND (tmp1, 0);
10716 if (CONVERT_EXPR_P (tmp1)
10717 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10718 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10719 <= TYPE_PRECISION (atype)))
10720 tmp1 = TREE_OPERAND (tmp1, 0);
10721 /* The only case we can still associate with two variables
10722 is if they are the same, modulo negation and bit-pattern
10723 preserving conversions. */
10724 if (!operand_equal_p (tmp0, tmp1, 0))
10725 ok = false;
10729 /* Only do something if we found more than two objects. Otherwise,
10730 nothing has changed and we risk infinite recursion. */
10731 if (ok
10732 && (2 < ((var0 != 0) + (var1 != 0)
10733 + (con0 != 0) + (con1 != 0)
10734 + (lit0 != 0) + (lit1 != 0)
10735 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10737 bool any_overflows = false;
10738 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10739 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10740 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10741 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10742 var0 = associate_trees (loc, var0, var1, code, atype);
10743 con0 = associate_trees (loc, con0, con1, code, atype);
10744 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10745 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10746 code, atype);
10748 /* Preserve the MINUS_EXPR if the negative part of the literal is
10749 greater than the positive part. Otherwise, the multiplicative
10750 folding code (i.e extract_muldiv) may be fooled in case
10751 unsigned constants are subtracted, like in the following
10752 example: ((X*2 + 4) - 8U)/2. */
10753 if (minus_lit0 && lit0)
10755 if (TREE_CODE (lit0) == INTEGER_CST
10756 && TREE_CODE (minus_lit0) == INTEGER_CST
10757 && tree_int_cst_lt (lit0, minus_lit0))
10759 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10760 MINUS_EXPR, atype);
10761 lit0 = 0;
10763 else
10765 lit0 = associate_trees (loc, lit0, minus_lit0,
10766 MINUS_EXPR, atype);
10767 minus_lit0 = 0;
10771 /* Don't introduce overflows through reassociation. */
10772 if (!any_overflows
10773 && ((lit0 && TREE_OVERFLOW (lit0))
10774 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10775 return NULL_TREE;
10777 if (minus_lit0)
10779 if (con0 == 0)
10780 return
10781 fold_convert_loc (loc, type,
10782 associate_trees (loc, var0, minus_lit0,
10783 MINUS_EXPR, atype));
10784 else
10786 con0 = associate_trees (loc, con0, minus_lit0,
10787 MINUS_EXPR, atype);
10788 return
10789 fold_convert_loc (loc, type,
10790 associate_trees (loc, var0, con0,
10791 PLUS_EXPR, atype));
10795 con0 = associate_trees (loc, con0, lit0, code, atype);
10796 return
10797 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10798 code, atype));
10802 return NULL_TREE;
10804 case MINUS_EXPR:
10805 /* Pointer simplifications for subtraction, simple reassociations. */
10806 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10808 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10809 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10810 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10812 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10813 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10814 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10815 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10816 return fold_build2_loc (loc, PLUS_EXPR, type,
10817 fold_build2_loc (loc, MINUS_EXPR, type,
10818 arg00, arg10),
10819 fold_build2_loc (loc, MINUS_EXPR, type,
10820 arg01, arg11));
10822 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10823 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10825 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10826 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10827 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10828 fold_convert_loc (loc, type, arg1));
10829 if (tmp)
10830 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10833 /* A - (-B) -> A + B */
10834 if (TREE_CODE (arg1) == NEGATE_EXPR)
10835 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10836 fold_convert_loc (loc, type,
10837 TREE_OPERAND (arg1, 0)));
10839 /* Disable further optimizations involving UPC shared pointers,
10840 because integers are not interoperable with shared pointers.
10841 (The test below also detects pointer difference between
10842 shared pointers, which cannot be folded. */
10844 if (TREE_TYPE (arg0) && POINTER_TYPE_P (TREE_TYPE (arg0))
10845 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg0))))
10846 return NULL_TREE;
10848 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10849 if (TREE_CODE (arg0) == NEGATE_EXPR
10850 && negate_expr_p (arg1)
10851 && reorder_operands_p (arg0, arg1))
10852 return fold_build2_loc (loc, MINUS_EXPR, type,
10853 fold_convert_loc (loc, type,
10854 negate_expr (arg1)),
10855 fold_convert_loc (loc, type,
10856 TREE_OPERAND (arg0, 0)));
10857 /* Convert -A - 1 to ~A. */
10858 if (TREE_CODE (type) != COMPLEX_TYPE
10859 && TREE_CODE (arg0) == NEGATE_EXPR
10860 && integer_onep (arg1)
10861 && !TYPE_OVERFLOW_TRAPS (type))
10862 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10863 fold_convert_loc (loc, type,
10864 TREE_OPERAND (arg0, 0)));
10866 /* Convert -1 - A to ~A. */
10867 if (TREE_CODE (type) != COMPLEX_TYPE
10868 && integer_all_onesp (arg0))
10869 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10872 /* X - (X / Y) * Y is X % Y. */
10873 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10874 && TREE_CODE (arg1) == MULT_EXPR
10875 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10876 && operand_equal_p (arg0,
10877 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10878 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10879 TREE_OPERAND (arg1, 1), 0))
10880 return
10881 fold_convert_loc (loc, type,
10882 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10883 arg0, TREE_OPERAND (arg1, 1)));
10885 if (! FLOAT_TYPE_P (type))
10887 if (integer_zerop (arg0))
10888 return negate_expr (fold_convert_loc (loc, type, arg1));
10889 if (integer_zerop (arg1))
10890 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10892 /* Fold A - (A & B) into ~B & A. */
10893 if (!TREE_SIDE_EFFECTS (arg0)
10894 && TREE_CODE (arg1) == BIT_AND_EXPR)
10896 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10898 tree arg10 = fold_convert_loc (loc, type,
10899 TREE_OPERAND (arg1, 0));
10900 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10901 fold_build1_loc (loc, BIT_NOT_EXPR,
10902 type, arg10),
10903 fold_convert_loc (loc, type, arg0));
10905 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10907 tree arg11 = fold_convert_loc (loc,
10908 type, TREE_OPERAND (arg1, 1));
10909 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10910 fold_build1_loc (loc, BIT_NOT_EXPR,
10911 type, arg11),
10912 fold_convert_loc (loc, type, arg0));
10916 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10917 any power of 2 minus 1. */
10918 if (TREE_CODE (arg0) == BIT_AND_EXPR
10919 && TREE_CODE (arg1) == BIT_AND_EXPR
10920 && operand_equal_p (TREE_OPERAND (arg0, 0),
10921 TREE_OPERAND (arg1, 0), 0))
10923 tree mask0 = TREE_OPERAND (arg0, 1);
10924 tree mask1 = TREE_OPERAND (arg1, 1);
10925 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10927 if (operand_equal_p (tem, mask1, 0))
10929 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10930 TREE_OPERAND (arg0, 0), mask1);
10931 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10936 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10937 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10938 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10940 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10941 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10942 (-ARG1 + ARG0) reduces to -ARG1. */
10943 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10944 return negate_expr (fold_convert_loc (loc, type, arg1));
10946 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10947 __complex__ ( x, -y ). This is not the same for SNaNs or if
10948 signed zeros are involved. */
10949 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10950 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10951 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10953 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10954 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10955 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10956 bool arg0rz = false, arg0iz = false;
10957 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10958 || (arg0i && (arg0iz = real_zerop (arg0i))))
10960 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10961 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10962 if (arg0rz && arg1i && real_zerop (arg1i))
10964 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10965 arg1r ? arg1r
10966 : build1 (REALPART_EXPR, rtype, arg1));
10967 tree ip = arg0i ? arg0i
10968 : build1 (IMAGPART_EXPR, rtype, arg0);
10969 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10971 else if (arg0iz && arg1r && real_zerop (arg1r))
10973 tree rp = arg0r ? arg0r
10974 : build1 (REALPART_EXPR, rtype, arg0);
10975 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10976 arg1i ? arg1i
10977 : build1 (IMAGPART_EXPR, rtype, arg1));
10978 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10983 /* Fold &x - &x. This can happen from &x.foo - &x.
10984 This is unsafe for certain floats even in non-IEEE formats.
10985 In IEEE, it is unsafe because it does wrong for NaNs.
10986 Also note that operand_equal_p is always false if an operand
10987 is volatile. */
10989 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10990 && operand_equal_p (arg0, arg1, 0))
10991 return build_zero_cst (type);
10993 /* A - B -> A + (-B) if B is easily negatable. */
10994 if (negate_expr_p (arg1)
10995 && ((FLOAT_TYPE_P (type)
10996 /* Avoid this transformation if B is a positive REAL_CST. */
10997 && (TREE_CODE (arg1) != REAL_CST
10998 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10999 || INTEGRAL_TYPE_P (type)))
11000 return fold_build2_loc (loc, PLUS_EXPR, type,
11001 fold_convert_loc (loc, type, arg0),
11002 fold_convert_loc (loc, type,
11003 negate_expr (arg1)));
11005 /* Try folding difference of addresses. */
11007 HOST_WIDE_INT diff;
11009 if ((TREE_CODE (arg0) == ADDR_EXPR
11010 || TREE_CODE (arg1) == ADDR_EXPR)
11011 && ptr_difference_const (arg0, arg1, &diff))
11012 return build_int_cst_type (type, diff);
11015 /* Fold &a[i] - &a[j] to i-j. */
11016 if (TREE_CODE (arg0) == ADDR_EXPR
11017 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11018 && TREE_CODE (arg1) == ADDR_EXPR
11019 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11021 tree tem = fold_addr_of_array_ref_difference (loc, type,
11022 TREE_OPERAND (arg0, 0),
11023 TREE_OPERAND (arg1, 0));
11024 if (tem)
11025 return tem;
11028 if (FLOAT_TYPE_P (type)
11029 && flag_unsafe_math_optimizations
11030 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
11031 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
11032 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
11033 return tem;
11035 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11036 one. Make sure the type is not saturating and has the signedness of
11037 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11038 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11039 if ((TREE_CODE (arg0) == MULT_EXPR
11040 || TREE_CODE (arg1) == MULT_EXPR)
11041 && !TYPE_SATURATING (type)
11042 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11043 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11044 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11046 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11047 if (tem)
11048 return tem;
11051 goto associate;
11053 case MULT_EXPR:
11054 /* (-A) * (-B) -> A * B */
11055 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11056 return fold_build2_loc (loc, MULT_EXPR, type,
11057 fold_convert_loc (loc, type,
11058 TREE_OPERAND (arg0, 0)),
11059 fold_convert_loc (loc, type,
11060 negate_expr (arg1)));
11061 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11062 return fold_build2_loc (loc, MULT_EXPR, type,
11063 fold_convert_loc (loc, type,
11064 negate_expr (arg0)),
11065 fold_convert_loc (loc, type,
11066 TREE_OPERAND (arg1, 0)));
11068 if (! FLOAT_TYPE_P (type))
11070 if (integer_zerop (arg1))
11071 return omit_one_operand_loc (loc, type, arg1, arg0);
11072 if (integer_onep (arg1))
11073 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11074 /* Transform x * -1 into -x. Make sure to do the negation
11075 on the original operand with conversions not stripped
11076 because we can only strip non-sign-changing conversions. */
11077 if (integer_minus_onep (arg1))
11078 return fold_convert_loc (loc, type, negate_expr (op0));
11079 /* Transform x * -C into -x * C if x is easily negatable. */
11080 if (TREE_CODE (arg1) == INTEGER_CST
11081 && tree_int_cst_sgn (arg1) == -1
11082 && negate_expr_p (arg0)
11083 && (tem = negate_expr (arg1)) != arg1
11084 && !TREE_OVERFLOW (tem))
11085 return fold_build2_loc (loc, MULT_EXPR, type,
11086 fold_convert_loc (loc, type,
11087 negate_expr (arg0)),
11088 tem);
11090 /* (a * (1 << b)) is (a << b) */
11091 if (TREE_CODE (arg1) == LSHIFT_EXPR
11092 && integer_onep (TREE_OPERAND (arg1, 0)))
11093 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
11094 TREE_OPERAND (arg1, 1));
11095 if (TREE_CODE (arg0) == LSHIFT_EXPR
11096 && integer_onep (TREE_OPERAND (arg0, 0)))
11097 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
11098 TREE_OPERAND (arg0, 1));
11100 /* (A + A) * C -> A * 2 * C */
11101 if (TREE_CODE (arg0) == PLUS_EXPR
11102 && TREE_CODE (arg1) == INTEGER_CST
11103 && operand_equal_p (TREE_OPERAND (arg0, 0),
11104 TREE_OPERAND (arg0, 1), 0))
11105 return fold_build2_loc (loc, MULT_EXPR, type,
11106 omit_one_operand_loc (loc, type,
11107 TREE_OPERAND (arg0, 0),
11108 TREE_OPERAND (arg0, 1)),
11109 fold_build2_loc (loc, MULT_EXPR, type,
11110 build_int_cst (type, 2) , arg1));
11112 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
11113 sign-changing only. */
11114 if (TREE_CODE (arg1) == INTEGER_CST
11115 && TREE_CODE (arg0) == EXACT_DIV_EXPR
11116 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
11117 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11119 strict_overflow_p = false;
11120 if (TREE_CODE (arg1) == INTEGER_CST
11121 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11122 &strict_overflow_p)))
11124 if (strict_overflow_p)
11125 fold_overflow_warning (("assuming signed overflow does not "
11126 "occur when simplifying "
11127 "multiplication"),
11128 WARN_STRICT_OVERFLOW_MISC);
11129 return fold_convert_loc (loc, type, tem);
11132 /* Optimize z * conj(z) for integer complex numbers. */
11133 if (TREE_CODE (arg0) == CONJ_EXPR
11134 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11135 return fold_mult_zconjz (loc, type, arg1);
11136 if (TREE_CODE (arg1) == CONJ_EXPR
11137 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11138 return fold_mult_zconjz (loc, type, arg0);
11140 else
11142 /* Maybe fold x * 0 to 0. The expressions aren't the same
11143 when x is NaN, since x * 0 is also NaN. Nor are they the
11144 same in modes with signed zeros, since multiplying a
11145 negative value by 0 gives -0, not +0. */
11146 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11147 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11148 && real_zerop (arg1))
11149 return omit_one_operand_loc (loc, type, arg1, arg0);
11150 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11151 Likewise for complex arithmetic with signed zeros. */
11152 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11153 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11154 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11155 && real_onep (arg1))
11156 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11158 /* Transform x * -1.0 into -x. */
11159 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11160 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11161 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11162 && real_minus_onep (arg1))
11163 return fold_convert_loc (loc, type, negate_expr (arg0));
11165 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11166 the result for floating point types due to rounding so it is applied
11167 only if -fassociative-math was specify. */
11168 if (flag_associative_math
11169 && TREE_CODE (arg0) == RDIV_EXPR
11170 && TREE_CODE (arg1) == REAL_CST
11171 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11173 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11174 arg1);
11175 if (tem)
11176 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11177 TREE_OPERAND (arg0, 1));
11180 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11181 if (operand_equal_p (arg0, arg1, 0))
11183 tree tem = fold_strip_sign_ops (arg0);
11184 if (tem != NULL_TREE)
11186 tem = fold_convert_loc (loc, type, tem);
11187 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11191 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11192 This is not the same for NaNs or if signed zeros are
11193 involved. */
11194 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11195 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11196 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11197 && TREE_CODE (arg1) == COMPLEX_CST
11198 && real_zerop (TREE_REALPART (arg1)))
11200 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11201 if (real_onep (TREE_IMAGPART (arg1)))
11202 return
11203 fold_build2_loc (loc, COMPLEX_EXPR, type,
11204 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11205 rtype, arg0)),
11206 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11207 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11208 return
11209 fold_build2_loc (loc, COMPLEX_EXPR, type,
11210 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11211 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11212 rtype, arg0)));
11215 /* Optimize z * conj(z) for floating point complex numbers.
11216 Guarded by flag_unsafe_math_optimizations as non-finite
11217 imaginary components don't produce scalar results. */
11218 if (flag_unsafe_math_optimizations
11219 && TREE_CODE (arg0) == CONJ_EXPR
11220 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11221 return fold_mult_zconjz (loc, type, arg1);
11222 if (flag_unsafe_math_optimizations
11223 && TREE_CODE (arg1) == CONJ_EXPR
11224 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11225 return fold_mult_zconjz (loc, type, arg0);
11227 if (flag_unsafe_math_optimizations)
11229 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11230 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11232 /* Optimizations of root(...)*root(...). */
11233 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11235 tree rootfn, arg;
11236 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11237 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11239 /* Optimize sqrt(x)*sqrt(x) as x. */
11240 if (BUILTIN_SQRT_P (fcode0)
11241 && operand_equal_p (arg00, arg10, 0)
11242 && ! HONOR_SNANS (TYPE_MODE (type)))
11243 return arg00;
11245 /* Optimize root(x)*root(y) as root(x*y). */
11246 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11247 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11248 return build_call_expr_loc (loc, rootfn, 1, arg);
11251 /* Optimize expN(x)*expN(y) as expN(x+y). */
11252 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11254 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11255 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11256 CALL_EXPR_ARG (arg0, 0),
11257 CALL_EXPR_ARG (arg1, 0));
11258 return build_call_expr_loc (loc, expfn, 1, arg);
11261 /* Optimizations of pow(...)*pow(...). */
11262 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11263 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11264 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11266 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11267 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11268 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11269 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11271 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11272 if (operand_equal_p (arg01, arg11, 0))
11274 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11275 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11276 arg00, arg10);
11277 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11280 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11281 if (operand_equal_p (arg00, arg10, 0))
11283 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11284 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11285 arg01, arg11);
11286 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11290 /* Optimize tan(x)*cos(x) as sin(x). */
11291 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11292 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11293 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11294 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11295 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11296 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11297 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11298 CALL_EXPR_ARG (arg1, 0), 0))
11300 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11302 if (sinfn != NULL_TREE)
11303 return build_call_expr_loc (loc, sinfn, 1,
11304 CALL_EXPR_ARG (arg0, 0));
11307 /* Optimize x*pow(x,c) as pow(x,c+1). */
11308 if (fcode1 == BUILT_IN_POW
11309 || fcode1 == BUILT_IN_POWF
11310 || fcode1 == BUILT_IN_POWL)
11312 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11313 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11314 if (TREE_CODE (arg11) == REAL_CST
11315 && !TREE_OVERFLOW (arg11)
11316 && operand_equal_p (arg0, arg10, 0))
11318 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11319 REAL_VALUE_TYPE c;
11320 tree arg;
11322 c = TREE_REAL_CST (arg11);
11323 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11324 arg = build_real (type, c);
11325 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11329 /* Optimize pow(x,c)*x as pow(x,c+1). */
11330 if (fcode0 == BUILT_IN_POW
11331 || fcode0 == BUILT_IN_POWF
11332 || fcode0 == BUILT_IN_POWL)
11334 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11335 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11336 if (TREE_CODE (arg01) == REAL_CST
11337 && !TREE_OVERFLOW (arg01)
11338 && operand_equal_p (arg1, arg00, 0))
11340 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11341 REAL_VALUE_TYPE c;
11342 tree arg;
11344 c = TREE_REAL_CST (arg01);
11345 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11346 arg = build_real (type, c);
11347 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11351 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11352 if (!in_gimple_form
11353 && optimize
11354 && operand_equal_p (arg0, arg1, 0))
11356 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11358 if (powfn)
11360 tree arg = build_real (type, dconst2);
11361 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11366 goto associate;
11368 case BIT_IOR_EXPR:
11369 bit_ior:
11370 if (integer_all_onesp (arg1))
11371 return omit_one_operand_loc (loc, type, arg1, arg0);
11372 if (integer_zerop (arg1))
11373 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11374 if (operand_equal_p (arg0, arg1, 0))
11375 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11377 /* ~X | X is -1. */
11378 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11379 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11381 t1 = build_zero_cst (type);
11382 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11383 return omit_one_operand_loc (loc, type, t1, arg1);
11386 /* X | ~X is -1. */
11387 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11388 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11390 t1 = build_zero_cst (type);
11391 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11392 return omit_one_operand_loc (loc, type, t1, arg0);
11395 /* Canonicalize (X & C1) | C2. */
11396 if (TREE_CODE (arg0) == BIT_AND_EXPR
11397 && TREE_CODE (arg1) == INTEGER_CST
11398 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11400 int width = TYPE_PRECISION (type), w;
11401 wide_int c1 = TREE_OPERAND (arg0, 1);
11402 wide_int c2 = arg1;
11404 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11405 if ((c1 & c2) == c1)
11406 return omit_one_operand_loc (loc, type, arg1,
11407 TREE_OPERAND (arg0, 0));
11409 wide_int msk = wi::mask (width, false,
11410 TYPE_PRECISION (TREE_TYPE (arg1)));
11412 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11413 if (msk.and_not (c1 | c2) == 0)
11414 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11415 TREE_OPERAND (arg0, 0), arg1);
11417 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11418 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11419 mode which allows further optimizations. */
11420 c1 &= msk;
11421 c2 &= msk;
11422 wide_int c3 = c1.and_not (c2);
11423 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11425 wide_int mask = wi::mask (w, false,
11426 TYPE_PRECISION (type));
11427 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11429 c3 = mask;
11430 break;
11434 if (c3 != c1)
11435 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11436 fold_build2_loc (loc, BIT_AND_EXPR, type,
11437 TREE_OPERAND (arg0, 0),
11438 wide_int_to_tree (type,
11439 c3)),
11440 arg1);
11443 /* (X & Y) | Y is (X, Y). */
11444 if (TREE_CODE (arg0) == BIT_AND_EXPR
11445 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11446 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11447 /* (X & Y) | X is (Y, X). */
11448 if (TREE_CODE (arg0) == BIT_AND_EXPR
11449 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11450 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11451 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11452 /* X | (X & Y) is (Y, X). */
11453 if (TREE_CODE (arg1) == BIT_AND_EXPR
11454 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11455 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11456 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11457 /* X | (Y & X) is (Y, X). */
11458 if (TREE_CODE (arg1) == BIT_AND_EXPR
11459 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11460 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11461 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11463 /* (X & ~Y) | (~X & Y) is X ^ Y */
11464 if (TREE_CODE (arg0) == BIT_AND_EXPR
11465 && TREE_CODE (arg1) == BIT_AND_EXPR)
11467 tree a0, a1, l0, l1, n0, n1;
11469 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11470 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11472 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11473 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11475 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11476 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11478 if ((operand_equal_p (n0, a0, 0)
11479 && operand_equal_p (n1, a1, 0))
11480 || (operand_equal_p (n0, a1, 0)
11481 && operand_equal_p (n1, a0, 0)))
11482 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11485 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11486 if (t1 != NULL_TREE)
11487 return t1;
11489 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11491 This results in more efficient code for machines without a NAND
11492 instruction. Combine will canonicalize to the first form
11493 which will allow use of NAND instructions provided by the
11494 backend if they exist. */
11495 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11496 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11498 return
11499 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11500 build2 (BIT_AND_EXPR, type,
11501 fold_convert_loc (loc, type,
11502 TREE_OPERAND (arg0, 0)),
11503 fold_convert_loc (loc, type,
11504 TREE_OPERAND (arg1, 0))));
11507 /* See if this can be simplified into a rotate first. If that
11508 is unsuccessful continue in the association code. */
11509 goto bit_rotate;
11511 case BIT_XOR_EXPR:
11512 if (integer_zerop (arg1))
11513 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11514 if (integer_all_onesp (arg1))
11515 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11516 if (operand_equal_p (arg0, arg1, 0))
11517 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11519 /* ~X ^ X is -1. */
11520 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11521 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11523 t1 = build_zero_cst (type);
11524 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11525 return omit_one_operand_loc (loc, type, t1, arg1);
11528 /* X ^ ~X is -1. */
11529 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11530 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11532 t1 = build_zero_cst (type);
11533 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11534 return omit_one_operand_loc (loc, type, t1, arg0);
11537 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11538 with a constant, and the two constants have no bits in common,
11539 we should treat this as a BIT_IOR_EXPR since this may produce more
11540 simplifications. */
11541 if (TREE_CODE (arg0) == BIT_AND_EXPR
11542 && TREE_CODE (arg1) == BIT_AND_EXPR
11543 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11544 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11545 && wi::bit_and (TREE_OPERAND (arg0, 1),
11546 TREE_OPERAND (arg1, 1)) == 0)
11548 code = BIT_IOR_EXPR;
11549 goto bit_ior;
11552 /* (X | Y) ^ X -> Y & ~ X*/
11553 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11554 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11556 tree t2 = TREE_OPERAND (arg0, 1);
11557 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11558 arg1);
11559 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11560 fold_convert_loc (loc, type, t2),
11561 fold_convert_loc (loc, type, t1));
11562 return t1;
11565 /* (Y | X) ^ X -> Y & ~ X*/
11566 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11567 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11569 tree t2 = TREE_OPERAND (arg0, 0);
11570 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11571 arg1);
11572 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11573 fold_convert_loc (loc, type, t2),
11574 fold_convert_loc (loc, type, t1));
11575 return t1;
11578 /* X ^ (X | Y) -> Y & ~ X*/
11579 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11580 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11582 tree t2 = TREE_OPERAND (arg1, 1);
11583 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11584 arg0);
11585 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11586 fold_convert_loc (loc, type, t2),
11587 fold_convert_loc (loc, type, t1));
11588 return t1;
11591 /* X ^ (Y | X) -> Y & ~ X*/
11592 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11593 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11595 tree t2 = TREE_OPERAND (arg1, 0);
11596 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11597 arg0);
11598 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11599 fold_convert_loc (loc, type, t2),
11600 fold_convert_loc (loc, type, t1));
11601 return t1;
11604 /* Convert ~X ^ ~Y to X ^ Y. */
11605 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11606 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11607 return fold_build2_loc (loc, code, type,
11608 fold_convert_loc (loc, type,
11609 TREE_OPERAND (arg0, 0)),
11610 fold_convert_loc (loc, type,
11611 TREE_OPERAND (arg1, 0)));
11613 /* Convert ~X ^ C to X ^ ~C. */
11614 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11615 && TREE_CODE (arg1) == INTEGER_CST)
11616 return fold_build2_loc (loc, code, type,
11617 fold_convert_loc (loc, type,
11618 TREE_OPERAND (arg0, 0)),
11619 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11621 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11622 if (TREE_CODE (arg0) == BIT_AND_EXPR
11623 && integer_onep (TREE_OPERAND (arg0, 1))
11624 && integer_onep (arg1))
11625 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11626 build_zero_cst (TREE_TYPE (arg0)));
11628 /* Fold (X & Y) ^ Y as ~X & Y. */
11629 if (TREE_CODE (arg0) == BIT_AND_EXPR
11630 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11632 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11633 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11634 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11635 fold_convert_loc (loc, type, arg1));
11637 /* Fold (X & Y) ^ X as ~Y & X. */
11638 if (TREE_CODE (arg0) == BIT_AND_EXPR
11639 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11640 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11642 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11643 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11644 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11645 fold_convert_loc (loc, type, arg1));
11647 /* Fold X ^ (X & Y) as X & ~Y. */
11648 if (TREE_CODE (arg1) == BIT_AND_EXPR
11649 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11651 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11652 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11653 fold_convert_loc (loc, type, arg0),
11654 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11656 /* Fold X ^ (Y & X) as ~Y & X. */
11657 if (TREE_CODE (arg1) == BIT_AND_EXPR
11658 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11659 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11661 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11662 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11663 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11664 fold_convert_loc (loc, type, arg0));
11667 /* See if this can be simplified into a rotate first. If that
11668 is unsuccessful continue in the association code. */
11669 goto bit_rotate;
11671 case BIT_AND_EXPR:
11672 if (integer_all_onesp (arg1))
11673 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11674 if (integer_zerop (arg1))
11675 return omit_one_operand_loc (loc, type, arg1, arg0);
11676 if (operand_equal_p (arg0, arg1, 0))
11677 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11679 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11680 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11681 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11682 || (TREE_CODE (arg0) == EQ_EXPR
11683 && integer_zerop (TREE_OPERAND (arg0, 1))))
11684 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11685 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11687 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11688 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11689 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11690 || (TREE_CODE (arg1) == EQ_EXPR
11691 && integer_zerop (TREE_OPERAND (arg1, 1))))
11692 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11693 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11695 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11696 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11697 && TREE_CODE (arg1) == INTEGER_CST
11698 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11700 tree tmp1 = fold_convert_loc (loc, type, arg1);
11701 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11702 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11703 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11704 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11705 return
11706 fold_convert_loc (loc, type,
11707 fold_build2_loc (loc, BIT_IOR_EXPR,
11708 type, tmp2, tmp3));
11711 /* (X | Y) & Y is (X, Y). */
11712 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11713 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11714 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11715 /* (X | Y) & X is (Y, X). */
11716 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11717 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11718 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11719 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11720 /* X & (X | Y) is (Y, X). */
11721 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11722 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11723 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11724 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11725 /* X & (Y | X) is (Y, X). */
11726 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11727 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11728 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11729 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11731 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11732 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11733 && integer_onep (TREE_OPERAND (arg0, 1))
11734 && integer_onep (arg1))
11736 tree tem2;
11737 tem = TREE_OPERAND (arg0, 0);
11738 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11739 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11740 tem, tem2);
11741 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11742 build_zero_cst (TREE_TYPE (tem)));
11744 /* Fold ~X & 1 as (X & 1) == 0. */
11745 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11746 && integer_onep (arg1))
11748 tree tem2;
11749 tem = TREE_OPERAND (arg0, 0);
11750 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11751 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11752 tem, tem2);
11753 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11754 build_zero_cst (TREE_TYPE (tem)));
11756 /* Fold !X & 1 as X == 0. */
11757 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11758 && integer_onep (arg1))
11760 tem = TREE_OPERAND (arg0, 0);
11761 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11762 build_zero_cst (TREE_TYPE (tem)));
11765 /* Fold (X ^ Y) & Y as ~X & Y. */
11766 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11767 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11769 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11770 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11771 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11772 fold_convert_loc (loc, type, arg1));
11774 /* Fold (X ^ Y) & X as ~Y & X. */
11775 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11776 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11777 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11779 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11780 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11781 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11782 fold_convert_loc (loc, type, arg1));
11784 /* Fold X & (X ^ Y) as X & ~Y. */
11785 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11786 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11788 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11789 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11790 fold_convert_loc (loc, type, arg0),
11791 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11793 /* Fold X & (Y ^ X) as ~Y & X. */
11794 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11795 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11796 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11798 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11799 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11800 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11801 fold_convert_loc (loc, type, arg0));
11804 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11805 multiple of 1 << CST. */
11806 if (TREE_CODE (arg1) == INTEGER_CST)
11808 wide_int cst1 = arg1;
11809 wide_int ncst1 = -cst1;
11810 if ((cst1 & ncst1) == ncst1
11811 && multiple_of_p (type, arg0,
11812 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11813 return fold_convert_loc (loc, type, arg0);
11816 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11817 bits from CST2. */
11818 if (TREE_CODE (arg1) == INTEGER_CST
11819 && TREE_CODE (arg0) == MULT_EXPR
11820 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11822 wide_int warg1 = arg1;
11823 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11825 if (masked == 0)
11826 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11827 arg0, arg1);
11828 else if (masked != warg1)
11830 /* Avoid the transform if arg1 is a mask of some
11831 mode which allows further optimizations. */
11832 int pop = wi::popcount (warg1);
11833 if (!(pop >= BITS_PER_UNIT
11834 && exact_log2 (pop) != -1
11835 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11836 return fold_build2_loc (loc, code, type, op0,
11837 wide_int_to_tree (type, masked));
11841 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11842 ((A & N) + B) & M -> (A + B) & M
11843 Similarly if (N & M) == 0,
11844 ((A | N) + B) & M -> (A + B) & M
11845 and for - instead of + (or unary - instead of +)
11846 and/or ^ instead of |.
11847 If B is constant and (B & M) == 0, fold into A & M. */
11848 if (TREE_CODE (arg1) == INTEGER_CST)
11850 wide_int cst1 = arg1;
11851 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11852 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11853 && (TREE_CODE (arg0) == PLUS_EXPR
11854 || TREE_CODE (arg0) == MINUS_EXPR
11855 || TREE_CODE (arg0) == NEGATE_EXPR)
11856 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11857 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11859 tree pmop[2];
11860 int which = 0;
11861 wide_int cst0;
11863 /* Now we know that arg0 is (C + D) or (C - D) or
11864 -C and arg1 (M) is == (1LL << cst) - 1.
11865 Store C into PMOP[0] and D into PMOP[1]. */
11866 pmop[0] = TREE_OPERAND (arg0, 0);
11867 pmop[1] = NULL;
11868 if (TREE_CODE (arg0) != NEGATE_EXPR)
11870 pmop[1] = TREE_OPERAND (arg0, 1);
11871 which = 1;
11874 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11875 which = -1;
11877 for (; which >= 0; which--)
11878 switch (TREE_CODE (pmop[which]))
11880 case BIT_AND_EXPR:
11881 case BIT_IOR_EXPR:
11882 case BIT_XOR_EXPR:
11883 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11884 != INTEGER_CST)
11885 break;
11886 cst0 = TREE_OPERAND (pmop[which], 1);
11887 cst0 &= cst1;
11888 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11890 if (cst0 != cst1)
11891 break;
11893 else if (cst0 != 0)
11894 break;
11895 /* If C or D is of the form (A & N) where
11896 (N & M) == M, or of the form (A | N) or
11897 (A ^ N) where (N & M) == 0, replace it with A. */
11898 pmop[which] = TREE_OPERAND (pmop[which], 0);
11899 break;
11900 case INTEGER_CST:
11901 /* If C or D is a N where (N & M) == 0, it can be
11902 omitted (assumed 0). */
11903 if ((TREE_CODE (arg0) == PLUS_EXPR
11904 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11905 && (cst1 & pmop[which]) == 0)
11906 pmop[which] = NULL;
11907 break;
11908 default:
11909 break;
11912 /* Only build anything new if we optimized one or both arguments
11913 above. */
11914 if (pmop[0] != TREE_OPERAND (arg0, 0)
11915 || (TREE_CODE (arg0) != NEGATE_EXPR
11916 && pmop[1] != TREE_OPERAND (arg0, 1)))
11918 tree utype = TREE_TYPE (arg0);
11919 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11921 /* Perform the operations in a type that has defined
11922 overflow behavior. */
11923 utype = unsigned_type_for (TREE_TYPE (arg0));
11924 if (pmop[0] != NULL)
11925 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11926 if (pmop[1] != NULL)
11927 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11930 if (TREE_CODE (arg0) == NEGATE_EXPR)
11931 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11932 else if (TREE_CODE (arg0) == PLUS_EXPR)
11934 if (pmop[0] != NULL && pmop[1] != NULL)
11935 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11936 pmop[0], pmop[1]);
11937 else if (pmop[0] != NULL)
11938 tem = pmop[0];
11939 else if (pmop[1] != NULL)
11940 tem = pmop[1];
11941 else
11942 return build_int_cst (type, 0);
11944 else if (pmop[0] == NULL)
11945 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11946 else
11947 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11948 pmop[0], pmop[1]);
11949 /* TEM is now the new binary +, - or unary - replacement. */
11950 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11951 fold_convert_loc (loc, utype, arg1));
11952 return fold_convert_loc (loc, type, tem);
11957 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11958 if (t1 != NULL_TREE)
11959 return t1;
11960 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11961 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11962 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11964 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11966 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11967 if (mask == -1)
11968 return
11969 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11972 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11974 This results in more efficient code for machines without a NOR
11975 instruction. Combine will canonicalize to the first form
11976 which will allow use of NOR instructions provided by the
11977 backend if they exist. */
11978 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11979 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11981 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11982 build2 (BIT_IOR_EXPR, type,
11983 fold_convert_loc (loc, type,
11984 TREE_OPERAND (arg0, 0)),
11985 fold_convert_loc (loc, type,
11986 TREE_OPERAND (arg1, 0))));
11989 /* If arg0 is derived from the address of an object or function, we may
11990 be able to fold this expression using the object or function's
11991 alignment. */
11992 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11994 unsigned HOST_WIDE_INT modulus, residue;
11995 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11997 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11998 integer_onep (arg1));
12000 /* This works because modulus is a power of 2. If this weren't the
12001 case, we'd have to replace it by its greatest power-of-2
12002 divisor: modulus & -modulus. */
12003 if (low < modulus)
12004 return build_int_cst (type, residue & low);
12007 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
12008 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
12009 if the new mask might be further optimized. */
12010 if ((TREE_CODE (arg0) == LSHIFT_EXPR
12011 || TREE_CODE (arg0) == RSHIFT_EXPR)
12012 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
12013 && TREE_CODE (arg1) == INTEGER_CST
12014 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12015 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
12016 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12017 < TYPE_PRECISION (TREE_TYPE (arg0))))
12019 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12020 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
12021 unsigned HOST_WIDE_INT newmask, zerobits = 0;
12022 tree shift_type = TREE_TYPE (arg0);
12024 if (TREE_CODE (arg0) == LSHIFT_EXPR)
12025 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
12026 else if (TREE_CODE (arg0) == RSHIFT_EXPR
12027 && TYPE_PRECISION (TREE_TYPE (arg0))
12028 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
12030 prec = TYPE_PRECISION (TREE_TYPE (arg0));
12031 tree arg00 = TREE_OPERAND (arg0, 0);
12032 /* See if more bits can be proven as zero because of
12033 zero extension. */
12034 if (TREE_CODE (arg00) == NOP_EXPR
12035 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
12037 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
12038 if (TYPE_PRECISION (inner_type)
12039 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
12040 && TYPE_PRECISION (inner_type) < prec)
12042 prec = TYPE_PRECISION (inner_type);
12043 /* See if we can shorten the right shift. */
12044 if (shiftc < prec)
12045 shift_type = inner_type;
12046 /* Otherwise X >> C1 is all zeros, so we'll optimize
12047 it into (X, 0) later on by making sure zerobits
12048 is all ones. */
12051 zerobits = ~(unsigned HOST_WIDE_INT) 0;
12052 if (shiftc < prec)
12054 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
12055 zerobits <<= prec - shiftc;
12057 /* For arithmetic shift if sign bit could be set, zerobits
12058 can contain actually sign bits, so no transformation is
12059 possible, unless MASK masks them all away. In that
12060 case the shift needs to be converted into logical shift. */
12061 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
12062 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
12064 if ((mask & zerobits) == 0)
12065 shift_type = unsigned_type_for (TREE_TYPE (arg0));
12066 else
12067 zerobits = 0;
12071 /* ((X << 16) & 0xff00) is (X, 0). */
12072 if ((mask & zerobits) == mask)
12073 return omit_one_operand_loc (loc, type,
12074 build_int_cst (type, 0), arg0);
12076 newmask = mask | zerobits;
12077 if (newmask != mask && (newmask & (newmask + 1)) == 0)
12079 /* Only do the transformation if NEWMASK is some integer
12080 mode's mask. */
12081 for (prec = BITS_PER_UNIT;
12082 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
12083 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
12084 break;
12085 if (prec < HOST_BITS_PER_WIDE_INT
12086 || newmask == ~(unsigned HOST_WIDE_INT) 0)
12088 tree newmaskt;
12090 if (shift_type != TREE_TYPE (arg0))
12092 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
12093 fold_convert_loc (loc, shift_type,
12094 TREE_OPERAND (arg0, 0)),
12095 TREE_OPERAND (arg0, 1));
12096 tem = fold_convert_loc (loc, type, tem);
12098 else
12099 tem = op0;
12100 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
12101 if (!tree_int_cst_equal (newmaskt, arg1))
12102 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
12107 goto associate;
12109 case RDIV_EXPR:
12110 /* Don't touch a floating-point divide by zero unless the mode
12111 of the constant can represent infinity. */
12112 if (TREE_CODE (arg1) == REAL_CST
12113 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12114 && real_zerop (arg1))
12115 return NULL_TREE;
12117 /* Optimize A / A to 1.0 if we don't care about
12118 NaNs or Infinities. Skip the transformation
12119 for non-real operands. */
12120 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
12121 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12122 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
12123 && operand_equal_p (arg0, arg1, 0))
12125 tree r = build_real (TREE_TYPE (arg0), dconst1);
12127 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12130 /* The complex version of the above A / A optimization. */
12131 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12132 && operand_equal_p (arg0, arg1, 0))
12134 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
12135 if (! HONOR_NANS (TYPE_MODE (elem_type))
12136 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
12138 tree r = build_real (elem_type, dconst1);
12139 /* omit_two_operands will call fold_convert for us. */
12140 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12144 /* (-A) / (-B) -> A / B */
12145 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12146 return fold_build2_loc (loc, RDIV_EXPR, type,
12147 TREE_OPERAND (arg0, 0),
12148 negate_expr (arg1));
12149 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12150 return fold_build2_loc (loc, RDIV_EXPR, type,
12151 negate_expr (arg0),
12152 TREE_OPERAND (arg1, 0));
12154 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12155 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12156 && real_onep (arg1))
12157 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12159 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12160 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12161 && real_minus_onep (arg1))
12162 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12163 negate_expr (arg0)));
12165 /* If ARG1 is a constant, we can convert this to a multiply by the
12166 reciprocal. This does not have the same rounding properties,
12167 so only do this if -freciprocal-math. We can actually
12168 always safely do it if ARG1 is a power of two, but it's hard to
12169 tell if it is or not in a portable manner. */
12170 if (optimize
12171 && (TREE_CODE (arg1) == REAL_CST
12172 || (TREE_CODE (arg1) == COMPLEX_CST
12173 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12174 || (TREE_CODE (arg1) == VECTOR_CST
12175 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12177 if (flag_reciprocal_math
12178 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12179 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12180 /* Find the reciprocal if optimizing and the result is exact.
12181 TODO: Complex reciprocal not implemented. */
12182 if (TREE_CODE (arg1) != COMPLEX_CST)
12184 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12186 if (inverse)
12187 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12190 /* Convert A/B/C to A/(B*C). */
12191 if (flag_reciprocal_math
12192 && TREE_CODE (arg0) == RDIV_EXPR)
12193 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12194 fold_build2_loc (loc, MULT_EXPR, type,
12195 TREE_OPERAND (arg0, 1), arg1));
12197 /* Convert A/(B/C) to (A/B)*C. */
12198 if (flag_reciprocal_math
12199 && TREE_CODE (arg1) == RDIV_EXPR)
12200 return fold_build2_loc (loc, MULT_EXPR, type,
12201 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12202 TREE_OPERAND (arg1, 0)),
12203 TREE_OPERAND (arg1, 1));
12205 /* Convert C1/(X*C2) into (C1/C2)/X. */
12206 if (flag_reciprocal_math
12207 && TREE_CODE (arg1) == MULT_EXPR
12208 && TREE_CODE (arg0) == REAL_CST
12209 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12211 tree tem = const_binop (RDIV_EXPR, arg0,
12212 TREE_OPERAND (arg1, 1));
12213 if (tem)
12214 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12215 TREE_OPERAND (arg1, 0));
12218 if (flag_unsafe_math_optimizations)
12220 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12221 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12223 /* Optimize sin(x)/cos(x) as tan(x). */
12224 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12225 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12226 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12227 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12228 CALL_EXPR_ARG (arg1, 0), 0))
12230 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12232 if (tanfn != NULL_TREE)
12233 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12236 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12237 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12238 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12239 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12240 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12241 CALL_EXPR_ARG (arg1, 0), 0))
12243 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12245 if (tanfn != NULL_TREE)
12247 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12248 CALL_EXPR_ARG (arg0, 0));
12249 return fold_build2_loc (loc, RDIV_EXPR, type,
12250 build_real (type, dconst1), tmp);
12254 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12255 NaNs or Infinities. */
12256 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12257 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12258 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12260 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12261 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12263 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12264 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12265 && operand_equal_p (arg00, arg01, 0))
12267 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12269 if (cosfn != NULL_TREE)
12270 return build_call_expr_loc (loc, cosfn, 1, arg00);
12274 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12275 NaNs or Infinities. */
12276 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12277 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12278 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12280 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12281 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12283 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12284 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12285 && operand_equal_p (arg00, arg01, 0))
12287 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12289 if (cosfn != NULL_TREE)
12291 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12292 return fold_build2_loc (loc, RDIV_EXPR, type,
12293 build_real (type, dconst1),
12294 tmp);
12299 /* Optimize pow(x,c)/x as pow(x,c-1). */
12300 if (fcode0 == BUILT_IN_POW
12301 || fcode0 == BUILT_IN_POWF
12302 || fcode0 == BUILT_IN_POWL)
12304 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12305 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12306 if (TREE_CODE (arg01) == REAL_CST
12307 && !TREE_OVERFLOW (arg01)
12308 && operand_equal_p (arg1, arg00, 0))
12310 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12311 REAL_VALUE_TYPE c;
12312 tree arg;
12314 c = TREE_REAL_CST (arg01);
12315 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12316 arg = build_real (type, c);
12317 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12321 /* Optimize a/root(b/c) into a*root(c/b). */
12322 if (BUILTIN_ROOT_P (fcode1))
12324 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12326 if (TREE_CODE (rootarg) == RDIV_EXPR)
12328 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12329 tree b = TREE_OPERAND (rootarg, 0);
12330 tree c = TREE_OPERAND (rootarg, 1);
12332 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12334 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12335 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12339 /* Optimize x/expN(y) into x*expN(-y). */
12340 if (BUILTIN_EXPONENT_P (fcode1))
12342 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12343 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12344 arg1 = build_call_expr_loc (loc,
12345 expfn, 1,
12346 fold_convert_loc (loc, type, arg));
12347 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12350 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12351 if (fcode1 == BUILT_IN_POW
12352 || fcode1 == BUILT_IN_POWF
12353 || fcode1 == BUILT_IN_POWL)
12355 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12356 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12357 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12358 tree neg11 = fold_convert_loc (loc, type,
12359 negate_expr (arg11));
12360 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12361 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12364 return NULL_TREE;
12366 case TRUNC_DIV_EXPR:
12367 /* Optimize (X & (-A)) / A where A is a power of 2,
12368 to X >> log2(A) */
12369 if (TREE_CODE (arg0) == BIT_AND_EXPR
12370 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12371 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12373 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12374 arg1, TREE_OPERAND (arg0, 1));
12375 if (sum && integer_zerop (sum)) {
12376 tree pow2 = build_int_cst (integer_type_node,
12377 wi::exact_log2 (arg1));
12378 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12379 TREE_OPERAND (arg0, 0), pow2);
12383 /* Fall through */
12385 case FLOOR_DIV_EXPR:
12386 /* Simplify A / (B << N) where A and B are positive and B is
12387 a power of 2, to A >> (N + log2(B)). */
12388 strict_overflow_p = false;
12389 if (TREE_CODE (arg1) == LSHIFT_EXPR
12390 && (TYPE_UNSIGNED (type)
12391 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12393 tree sval = TREE_OPERAND (arg1, 0);
12394 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12396 tree sh_cnt = TREE_OPERAND (arg1, 1);
12397 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12398 wi::exact_log2 (sval));
12400 if (strict_overflow_p)
12401 fold_overflow_warning (("assuming signed overflow does not "
12402 "occur when simplifying A / (B << N)"),
12403 WARN_STRICT_OVERFLOW_MISC);
12405 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12406 sh_cnt, pow2);
12407 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12408 fold_convert_loc (loc, type, arg0), sh_cnt);
12412 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12413 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12414 if (INTEGRAL_TYPE_P (type)
12415 && TYPE_UNSIGNED (type)
12416 && code == FLOOR_DIV_EXPR)
12417 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12419 /* Fall through */
12421 case ROUND_DIV_EXPR:
12422 case CEIL_DIV_EXPR:
12423 case EXACT_DIV_EXPR:
12424 if (integer_onep (arg1))
12425 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12426 if (integer_zerop (arg1))
12427 return NULL_TREE;
12428 /* X / -1 is -X. */
12429 if (!TYPE_UNSIGNED (type)
12430 && TREE_CODE (arg1) == INTEGER_CST
12431 && wi::eq_p (arg1, -1))
12432 return fold_convert_loc (loc, type, negate_expr (arg0));
12434 /* Convert -A / -B to A / B when the type is signed and overflow is
12435 undefined. */
12436 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12437 && TREE_CODE (arg0) == NEGATE_EXPR
12438 && negate_expr_p (arg1))
12440 if (INTEGRAL_TYPE_P (type))
12441 fold_overflow_warning (("assuming signed overflow does not occur "
12442 "when distributing negation across "
12443 "division"),
12444 WARN_STRICT_OVERFLOW_MISC);
12445 return fold_build2_loc (loc, code, type,
12446 fold_convert_loc (loc, type,
12447 TREE_OPERAND (arg0, 0)),
12448 fold_convert_loc (loc, type,
12449 negate_expr (arg1)));
12451 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12452 && TREE_CODE (arg1) == NEGATE_EXPR
12453 && negate_expr_p (arg0))
12455 if (INTEGRAL_TYPE_P (type))
12456 fold_overflow_warning (("assuming signed overflow does not occur "
12457 "when distributing negation across "
12458 "division"),
12459 WARN_STRICT_OVERFLOW_MISC);
12460 return fold_build2_loc (loc, code, type,
12461 fold_convert_loc (loc, type,
12462 negate_expr (arg0)),
12463 fold_convert_loc (loc, type,
12464 TREE_OPERAND (arg1, 0)));
12467 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12468 operation, EXACT_DIV_EXPR.
12470 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12471 At one time others generated faster code, it's not clear if they do
12472 after the last round to changes to the DIV code in expmed.c. */
12473 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12474 && multiple_of_p (type, arg0, arg1))
12475 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12477 strict_overflow_p = false;
12478 if (TREE_CODE (arg1) == INTEGER_CST
12479 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12480 &strict_overflow_p)))
12482 if (strict_overflow_p)
12483 fold_overflow_warning (("assuming signed overflow does not occur "
12484 "when simplifying division"),
12485 WARN_STRICT_OVERFLOW_MISC);
12486 return fold_convert_loc (loc, type, tem);
12489 return NULL_TREE;
12491 case CEIL_MOD_EXPR:
12492 case FLOOR_MOD_EXPR:
12493 case ROUND_MOD_EXPR:
12494 case TRUNC_MOD_EXPR:
12495 /* X % 1 is always zero, but be sure to preserve any side
12496 effects in X. */
12497 if (integer_onep (arg1))
12498 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12500 /* X % 0, return X % 0 unchanged so that we can get the
12501 proper warnings and errors. */
12502 if (integer_zerop (arg1))
12503 return NULL_TREE;
12505 /* 0 % X is always zero, but be sure to preserve any side
12506 effects in X. Place this after checking for X == 0. */
12507 if (integer_zerop (arg0))
12508 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12510 /* X % -1 is zero. */
12511 if (!TYPE_UNSIGNED (type)
12512 && TREE_CODE (arg1) == INTEGER_CST
12513 && wi::eq_p (arg1, -1))
12514 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12516 /* X % -C is the same as X % C. */
12517 if (code == TRUNC_MOD_EXPR
12518 && TYPE_SIGN (type) == SIGNED
12519 && TREE_CODE (arg1) == INTEGER_CST
12520 && !TREE_OVERFLOW (arg1)
12521 && wi::neg_p (arg1)
12522 && !TYPE_OVERFLOW_TRAPS (type)
12523 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12524 && !sign_bit_p (arg1, arg1))
12525 return fold_build2_loc (loc, code, type,
12526 fold_convert_loc (loc, type, arg0),
12527 fold_convert_loc (loc, type,
12528 negate_expr (arg1)));
12530 /* X % -Y is the same as X % Y. */
12531 if (code == TRUNC_MOD_EXPR
12532 && !TYPE_UNSIGNED (type)
12533 && TREE_CODE (arg1) == NEGATE_EXPR
12534 && !TYPE_OVERFLOW_TRAPS (type))
12535 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12536 fold_convert_loc (loc, type,
12537 TREE_OPERAND (arg1, 0)));
12539 strict_overflow_p = false;
12540 if (TREE_CODE (arg1) == INTEGER_CST
12541 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12542 &strict_overflow_p)))
12544 if (strict_overflow_p)
12545 fold_overflow_warning (("assuming signed overflow does not occur "
12546 "when simplifying modulus"),
12547 WARN_STRICT_OVERFLOW_MISC);
12548 return fold_convert_loc (loc, type, tem);
12551 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12552 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12553 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12554 && (TYPE_UNSIGNED (type)
12555 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12557 tree c = arg1;
12558 /* Also optimize A % (C << N) where C is a power of 2,
12559 to A & ((C << N) - 1). */
12560 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12561 c = TREE_OPERAND (arg1, 0);
12563 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12565 tree mask
12566 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12567 build_int_cst (TREE_TYPE (arg1), 1));
12568 if (strict_overflow_p)
12569 fold_overflow_warning (("assuming signed overflow does not "
12570 "occur when simplifying "
12571 "X % (power of two)"),
12572 WARN_STRICT_OVERFLOW_MISC);
12573 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12574 fold_convert_loc (loc, type, arg0),
12575 fold_convert_loc (loc, type, mask));
12579 return NULL_TREE;
12581 case LROTATE_EXPR:
12582 case RROTATE_EXPR:
12583 if (integer_all_onesp (arg0))
12584 return omit_one_operand_loc (loc, type, arg0, arg1);
12585 goto shift;
12587 case RSHIFT_EXPR:
12588 /* Optimize -1 >> x for arithmetic right shifts. */
12589 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12590 && tree_expr_nonnegative_p (arg1))
12591 return omit_one_operand_loc (loc, type, arg0, arg1);
12592 /* ... fall through ... */
12594 case LSHIFT_EXPR:
12595 shift:
12596 if (integer_zerop (arg1))
12597 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12598 if (integer_zerop (arg0))
12599 return omit_one_operand_loc (loc, type, arg0, arg1);
12601 /* Prefer vector1 << scalar to vector1 << vector2
12602 if vector2 is uniform. */
12603 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12604 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12605 return fold_build2_loc (loc, code, type, op0, tem);
12607 /* Since negative shift count is not well-defined,
12608 don't try to compute it in the compiler. */
12609 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12610 return NULL_TREE;
12612 prec = element_precision (type);
12614 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12615 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12616 && tree_to_uhwi (arg1) < prec
12617 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12618 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12620 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12621 + tree_to_uhwi (arg1));
12623 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12624 being well defined. */
12625 if (low >= prec)
12627 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12628 low = low % prec;
12629 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12630 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12631 TREE_OPERAND (arg0, 0));
12632 else
12633 low = prec - 1;
12636 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12637 build_int_cst (TREE_TYPE (arg1), low));
12640 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12641 into x & ((unsigned)-1 >> c) for unsigned types. */
12642 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12643 || (TYPE_UNSIGNED (type)
12644 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12645 && tree_fits_uhwi_p (arg1)
12646 && tree_to_uhwi (arg1) < prec
12647 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12648 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12650 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12651 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12652 tree lshift;
12653 tree arg00;
12655 if (low0 == low1)
12657 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12659 lshift = build_minus_one_cst (type);
12660 lshift = const_binop (code, lshift, arg1);
12662 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12666 /* Rewrite an LROTATE_EXPR by a constant into an
12667 RROTATE_EXPR by a new constant. */
12668 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12670 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12671 tem = const_binop (MINUS_EXPR, tem, arg1);
12672 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12675 /* If we have a rotate of a bit operation with the rotate count and
12676 the second operand of the bit operation both constant,
12677 permute the two operations. */
12678 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12679 && (TREE_CODE (arg0) == BIT_AND_EXPR
12680 || TREE_CODE (arg0) == BIT_IOR_EXPR
12681 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12682 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12683 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12684 fold_build2_loc (loc, code, type,
12685 TREE_OPERAND (arg0, 0), arg1),
12686 fold_build2_loc (loc, code, type,
12687 TREE_OPERAND (arg0, 1), arg1));
12689 /* Two consecutive rotates adding up to the some integer
12690 multiple of the precision of the type can be ignored. */
12691 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12692 && TREE_CODE (arg0) == RROTATE_EXPR
12693 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12694 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12695 prec) == 0)
12696 return TREE_OPERAND (arg0, 0);
12698 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12699 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12700 if the latter can be further optimized. */
12701 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12702 && TREE_CODE (arg0) == BIT_AND_EXPR
12703 && TREE_CODE (arg1) == INTEGER_CST
12704 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12706 tree mask = fold_build2_loc (loc, code, type,
12707 fold_convert_loc (loc, type,
12708 TREE_OPERAND (arg0, 1)),
12709 arg1);
12710 tree shift = fold_build2_loc (loc, code, type,
12711 fold_convert_loc (loc, type,
12712 TREE_OPERAND (arg0, 0)),
12713 arg1);
12714 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12715 if (tem)
12716 return tem;
12719 return NULL_TREE;
12721 case MIN_EXPR:
12722 if (operand_equal_p (arg0, arg1, 0))
12723 return omit_one_operand_loc (loc, type, arg0, arg1);
12724 if (INTEGRAL_TYPE_P (type)
12725 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12726 return omit_one_operand_loc (loc, type, arg1, arg0);
12727 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12728 if (tem)
12729 return tem;
12730 goto associate;
12732 case MAX_EXPR:
12733 if (operand_equal_p (arg0, arg1, 0))
12734 return omit_one_operand_loc (loc, type, arg0, arg1);
12735 if (INTEGRAL_TYPE_P (type)
12736 && TYPE_MAX_VALUE (type)
12737 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12738 return omit_one_operand_loc (loc, type, arg1, arg0);
12739 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12740 if (tem)
12741 return tem;
12742 goto associate;
12744 case TRUTH_ANDIF_EXPR:
12745 /* Note that the operands of this must be ints
12746 and their values must be 0 or 1.
12747 ("true" is a fixed value perhaps depending on the language.) */
12748 /* If first arg is constant zero, return it. */
12749 if (integer_zerop (arg0))
12750 return fold_convert_loc (loc, type, arg0);
12751 case TRUTH_AND_EXPR:
12752 /* If either arg is constant true, drop it. */
12753 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12754 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12755 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12756 /* Preserve sequence points. */
12757 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12758 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12759 /* If second arg is constant zero, result is zero, but first arg
12760 must be evaluated. */
12761 if (integer_zerop (arg1))
12762 return omit_one_operand_loc (loc, type, arg1, arg0);
12763 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12764 case will be handled here. */
12765 if (integer_zerop (arg0))
12766 return omit_one_operand_loc (loc, type, arg0, arg1);
12768 /* !X && X is always false. */
12769 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12770 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12771 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12772 /* X && !X is always false. */
12773 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12774 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12775 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12777 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12778 means A >= Y && A != MAX, but in this case we know that
12779 A < X <= MAX. */
12781 if (!TREE_SIDE_EFFECTS (arg0)
12782 && !TREE_SIDE_EFFECTS (arg1))
12784 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12785 if (tem && !operand_equal_p (tem, arg0, 0))
12786 return fold_build2_loc (loc, code, type, tem, arg1);
12788 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12789 if (tem && !operand_equal_p (tem, arg1, 0))
12790 return fold_build2_loc (loc, code, type, arg0, tem);
12793 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12794 != NULL_TREE)
12795 return tem;
12797 return NULL_TREE;
12799 case TRUTH_ORIF_EXPR:
12800 /* Note that the operands of this must be ints
12801 and their values must be 0 or true.
12802 ("true" is a fixed value perhaps depending on the language.) */
12803 /* If first arg is constant true, return it. */
12804 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12805 return fold_convert_loc (loc, type, arg0);
12806 case TRUTH_OR_EXPR:
12807 /* If either arg is constant zero, drop it. */
12808 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12809 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12810 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12811 /* Preserve sequence points. */
12812 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12813 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12814 /* If second arg is constant true, result is true, but we must
12815 evaluate first arg. */
12816 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12817 return omit_one_operand_loc (loc, type, arg1, arg0);
12818 /* Likewise for first arg, but note this only occurs here for
12819 TRUTH_OR_EXPR. */
12820 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12821 return omit_one_operand_loc (loc, type, arg0, arg1);
12823 /* !X || X is always true. */
12824 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12825 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12826 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12827 /* X || !X is always true. */
12828 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12829 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12830 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12832 /* (X && !Y) || (!X && Y) is X ^ Y */
12833 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12834 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12836 tree a0, a1, l0, l1, n0, n1;
12838 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12839 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12841 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12842 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12844 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12845 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12847 if ((operand_equal_p (n0, a0, 0)
12848 && operand_equal_p (n1, a1, 0))
12849 || (operand_equal_p (n0, a1, 0)
12850 && operand_equal_p (n1, a0, 0)))
12851 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12854 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12855 != NULL_TREE)
12856 return tem;
12858 return NULL_TREE;
12860 case TRUTH_XOR_EXPR:
12861 /* If the second arg is constant zero, drop it. */
12862 if (integer_zerop (arg1))
12863 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12864 /* If the second arg is constant true, this is a logical inversion. */
12865 if (integer_onep (arg1))
12867 tem = invert_truthvalue_loc (loc, arg0);
12868 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12870 /* Identical arguments cancel to zero. */
12871 if (operand_equal_p (arg0, arg1, 0))
12872 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12874 /* !X ^ X is always true. */
12875 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12876 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12877 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12879 /* X ^ !X is always true. */
12880 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12881 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12882 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12884 return NULL_TREE;
12886 case EQ_EXPR:
12887 case NE_EXPR:
12888 STRIP_NOPS (arg0);
12889 STRIP_NOPS (arg1);
12891 tem = fold_comparison (loc, code, type, op0, op1);
12892 if (tem != NULL_TREE)
12893 return tem;
12895 /* bool_var != 0 becomes bool_var. */
12896 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12897 && code == NE_EXPR)
12898 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12900 /* bool_var == 1 becomes bool_var. */
12901 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12902 && code == EQ_EXPR)
12903 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12905 /* bool_var != 1 becomes !bool_var. */
12906 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12907 && code == NE_EXPR)
12908 return fold_convert_loc (loc, type,
12909 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12910 TREE_TYPE (arg0), arg0));
12912 /* bool_var == 0 becomes !bool_var. */
12913 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12914 && code == EQ_EXPR)
12915 return fold_convert_loc (loc, type,
12916 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12917 TREE_TYPE (arg0), arg0));
12919 /* !exp != 0 becomes !exp */
12920 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12921 && code == NE_EXPR)
12922 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12924 /* If this is an equality comparison of the address of two non-weak,
12925 unaliased symbols neither of which are extern (since we do not
12926 have access to attributes for externs), then we know the result. */
12927 if (TREE_CODE (arg0) == ADDR_EXPR
12928 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12929 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12930 && ! lookup_attribute ("alias",
12931 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12932 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12933 && TREE_CODE (arg1) == ADDR_EXPR
12934 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12935 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12936 && ! lookup_attribute ("alias",
12937 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12938 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12940 /* We know that we're looking at the address of two
12941 non-weak, unaliased, static _DECL nodes.
12943 It is both wasteful and incorrect to call operand_equal_p
12944 to compare the two ADDR_EXPR nodes. It is wasteful in that
12945 all we need to do is test pointer equality for the arguments
12946 to the two ADDR_EXPR nodes. It is incorrect to use
12947 operand_equal_p as that function is NOT equivalent to a
12948 C equality test. It can in fact return false for two
12949 objects which would test as equal using the C equality
12950 operator. */
12951 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12952 return constant_boolean_node (equal
12953 ? code == EQ_EXPR : code != EQ_EXPR,
12954 type);
12957 /* Similarly for a NEGATE_EXPR. */
12958 if (TREE_CODE (arg0) == NEGATE_EXPR
12959 && TREE_CODE (arg1) == INTEGER_CST
12960 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12961 arg1)))
12962 && TREE_CODE (tem) == INTEGER_CST
12963 && !TREE_OVERFLOW (tem))
12964 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12966 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12967 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12968 && TREE_CODE (arg1) == INTEGER_CST
12969 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12970 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12971 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12972 fold_convert_loc (loc,
12973 TREE_TYPE (arg0),
12974 arg1),
12975 TREE_OPERAND (arg0, 1)));
12977 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12978 if ((TREE_CODE (arg0) == PLUS_EXPR
12979 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12980 || TREE_CODE (arg0) == MINUS_EXPR)
12981 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12982 0)),
12983 arg1, 0)
12984 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12985 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12987 tree val = TREE_OPERAND (arg0, 1);
12988 return omit_two_operands_loc (loc, type,
12989 fold_build2_loc (loc, code, type,
12990 val,
12991 build_int_cst (TREE_TYPE (val),
12992 0)),
12993 TREE_OPERAND (arg0, 0), arg1);
12996 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12997 if (TREE_CODE (arg0) == MINUS_EXPR
12998 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12999 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
13000 1)),
13001 arg1, 0)
13002 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
13004 return omit_two_operands_loc (loc, type,
13005 code == NE_EXPR
13006 ? boolean_true_node : boolean_false_node,
13007 TREE_OPERAND (arg0, 1), arg1);
13010 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
13011 if (TREE_CODE (arg0) == ABS_EXPR
13012 && (integer_zerop (arg1) || real_zerop (arg1)))
13013 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
13015 /* If this is an EQ or NE comparison with zero and ARG0 is
13016 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
13017 two operations, but the latter can be done in one less insn
13018 on machines that have only two-operand insns or on which a
13019 constant cannot be the first operand. */
13020 if (TREE_CODE (arg0) == BIT_AND_EXPR
13021 && integer_zerop (arg1))
13023 tree arg00 = TREE_OPERAND (arg0, 0);
13024 tree arg01 = TREE_OPERAND (arg0, 1);
13025 if (TREE_CODE (arg00) == LSHIFT_EXPR
13026 && integer_onep (TREE_OPERAND (arg00, 0)))
13028 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
13029 arg01, TREE_OPERAND (arg00, 1));
13030 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13031 build_int_cst (TREE_TYPE (arg0), 1));
13032 return fold_build2_loc (loc, code, type,
13033 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13034 arg1);
13036 else if (TREE_CODE (arg01) == LSHIFT_EXPR
13037 && integer_onep (TREE_OPERAND (arg01, 0)))
13039 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
13040 arg00, TREE_OPERAND (arg01, 1));
13041 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13042 build_int_cst (TREE_TYPE (arg0), 1));
13043 return fold_build2_loc (loc, code, type,
13044 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13045 arg1);
13049 /* If this is an NE or EQ comparison of zero against the result of a
13050 signed MOD operation whose second operand is a power of 2, make
13051 the MOD operation unsigned since it is simpler and equivalent. */
13052 if (integer_zerop (arg1)
13053 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
13054 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
13055 || TREE_CODE (arg0) == CEIL_MOD_EXPR
13056 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
13057 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
13058 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13060 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
13061 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
13062 fold_convert_loc (loc, newtype,
13063 TREE_OPERAND (arg0, 0)),
13064 fold_convert_loc (loc, newtype,
13065 TREE_OPERAND (arg0, 1)));
13067 return fold_build2_loc (loc, code, type, newmod,
13068 fold_convert_loc (loc, newtype, arg1));
13071 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13072 C1 is a valid shift constant, and C2 is a power of two, i.e.
13073 a single bit. */
13074 if (TREE_CODE (arg0) == BIT_AND_EXPR
13075 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
13076 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
13077 == INTEGER_CST
13078 && integer_pow2p (TREE_OPERAND (arg0, 1))
13079 && integer_zerop (arg1))
13081 tree itype = TREE_TYPE (arg0);
13082 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
13083 prec = TYPE_PRECISION (itype);
13085 /* Check for a valid shift count. */
13086 if (wi::ltu_p (arg001, prec))
13088 tree arg01 = TREE_OPERAND (arg0, 1);
13089 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13090 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
13091 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13092 can be rewritten as (X & (C2 << C1)) != 0. */
13093 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
13095 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
13096 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
13097 return fold_build2_loc (loc, code, type, tem,
13098 fold_convert_loc (loc, itype, arg1));
13100 /* Otherwise, for signed (arithmetic) shifts,
13101 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13102 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13103 else if (!TYPE_UNSIGNED (itype))
13104 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
13105 arg000, build_int_cst (itype, 0));
13106 /* Otherwise, of unsigned (logical) shifts,
13107 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13108 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13109 else
13110 return omit_one_operand_loc (loc, type,
13111 code == EQ_EXPR ? integer_one_node
13112 : integer_zero_node,
13113 arg000);
13117 /* If we have (A & C) == C where C is a power of 2, convert this into
13118 (A & C) != 0. Similarly for NE_EXPR. */
13119 if (TREE_CODE (arg0) == BIT_AND_EXPR
13120 && integer_pow2p (TREE_OPERAND (arg0, 1))
13121 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13122 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13123 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13124 integer_zero_node));
13126 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13127 bit, then fold the expression into A < 0 or A >= 0. */
13128 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13129 if (tem)
13130 return tem;
13132 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13133 Similarly for NE_EXPR. */
13134 if (TREE_CODE (arg0) == BIT_AND_EXPR
13135 && TREE_CODE (arg1) == INTEGER_CST
13136 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13138 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13139 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13140 TREE_OPERAND (arg0, 1));
13141 tree dandnotc
13142 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13143 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13144 notc);
13145 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13146 if (integer_nonzerop (dandnotc))
13147 return omit_one_operand_loc (loc, type, rslt, arg0);
13150 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13151 Similarly for NE_EXPR. */
13152 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13153 && TREE_CODE (arg1) == INTEGER_CST
13154 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13156 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13157 tree candnotd
13158 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13159 TREE_OPERAND (arg0, 1),
13160 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13161 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13162 if (integer_nonzerop (candnotd))
13163 return omit_one_operand_loc (loc, type, rslt, arg0);
13166 /* If this is a comparison of a field, we may be able to simplify it. */
13167 if ((TREE_CODE (arg0) == COMPONENT_REF
13168 || TREE_CODE (arg0) == BIT_FIELD_REF)
13169 /* Handle the constant case even without -O
13170 to make sure the warnings are given. */
13171 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13173 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13174 if (t1)
13175 return t1;
13178 /* Optimize comparisons of strlen vs zero to a compare of the
13179 first character of the string vs zero. To wit,
13180 strlen(ptr) == 0 => *ptr == 0
13181 strlen(ptr) != 0 => *ptr != 0
13182 Other cases should reduce to one of these two (or a constant)
13183 due to the return value of strlen being unsigned. */
13184 if (TREE_CODE (arg0) == CALL_EXPR
13185 && integer_zerop (arg1))
13187 tree fndecl = get_callee_fndecl (arg0);
13189 if (fndecl
13190 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13191 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13192 && call_expr_nargs (arg0) == 1
13193 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13195 tree iref = build_fold_indirect_ref_loc (loc,
13196 CALL_EXPR_ARG (arg0, 0));
13197 return fold_build2_loc (loc, code, type, iref,
13198 build_int_cst (TREE_TYPE (iref), 0));
13202 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13203 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13204 if (TREE_CODE (arg0) == RSHIFT_EXPR
13205 && integer_zerop (arg1)
13206 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13208 tree arg00 = TREE_OPERAND (arg0, 0);
13209 tree arg01 = TREE_OPERAND (arg0, 1);
13210 tree itype = TREE_TYPE (arg00);
13211 if (wi::eq_p (arg01, TYPE_PRECISION (itype) - 1))
13213 if (TYPE_UNSIGNED (itype))
13215 itype = signed_type_for (itype);
13216 arg00 = fold_convert_loc (loc, itype, arg00);
13218 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13219 type, arg00, build_zero_cst (itype));
13223 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13224 if (integer_zerop (arg1)
13225 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13226 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13227 TREE_OPERAND (arg0, 1));
13229 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13230 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13231 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13232 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13233 build_zero_cst (TREE_TYPE (arg0)));
13234 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13235 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13236 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13237 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13238 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13239 build_zero_cst (TREE_TYPE (arg0)));
13241 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13242 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13243 && TREE_CODE (arg1) == INTEGER_CST
13244 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13245 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13246 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13247 TREE_OPERAND (arg0, 1), arg1));
13249 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13250 (X & C) == 0 when C is a single bit. */
13251 if (TREE_CODE (arg0) == BIT_AND_EXPR
13252 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13253 && integer_zerop (arg1)
13254 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13256 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13257 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13258 TREE_OPERAND (arg0, 1));
13259 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13260 type, tem,
13261 fold_convert_loc (loc, TREE_TYPE (arg0),
13262 arg1));
13265 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13266 constant C is a power of two, i.e. a single bit. */
13267 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13268 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13269 && integer_zerop (arg1)
13270 && integer_pow2p (TREE_OPERAND (arg0, 1))
13271 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13272 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13274 tree arg00 = TREE_OPERAND (arg0, 0);
13275 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13276 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13279 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13280 when is C is a power of two, i.e. a single bit. */
13281 if (TREE_CODE (arg0) == BIT_AND_EXPR
13282 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13283 && integer_zerop (arg1)
13284 && integer_pow2p (TREE_OPERAND (arg0, 1))
13285 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13286 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13288 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13289 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13290 arg000, TREE_OPERAND (arg0, 1));
13291 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13292 tem, build_int_cst (TREE_TYPE (tem), 0));
13295 if (integer_zerop (arg1)
13296 && tree_expr_nonzero_p (arg0))
13298 tree res = constant_boolean_node (code==NE_EXPR, type);
13299 return omit_one_operand_loc (loc, type, res, arg0);
13302 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13303 if (TREE_CODE (arg0) == NEGATE_EXPR
13304 && TREE_CODE (arg1) == NEGATE_EXPR)
13305 return fold_build2_loc (loc, code, type,
13306 TREE_OPERAND (arg0, 0),
13307 fold_convert_loc (loc, TREE_TYPE (arg0),
13308 TREE_OPERAND (arg1, 0)));
13310 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13311 if (TREE_CODE (arg0) == BIT_AND_EXPR
13312 && TREE_CODE (arg1) == BIT_AND_EXPR)
13314 tree arg00 = TREE_OPERAND (arg0, 0);
13315 tree arg01 = TREE_OPERAND (arg0, 1);
13316 tree arg10 = TREE_OPERAND (arg1, 0);
13317 tree arg11 = TREE_OPERAND (arg1, 1);
13318 tree itype = TREE_TYPE (arg0);
13320 if (operand_equal_p (arg01, arg11, 0))
13321 return fold_build2_loc (loc, code, type,
13322 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13323 fold_build2_loc (loc,
13324 BIT_XOR_EXPR, itype,
13325 arg00, arg10),
13326 arg01),
13327 build_zero_cst (itype));
13329 if (operand_equal_p (arg01, arg10, 0))
13330 return fold_build2_loc (loc, code, type,
13331 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13332 fold_build2_loc (loc,
13333 BIT_XOR_EXPR, itype,
13334 arg00, arg11),
13335 arg01),
13336 build_zero_cst (itype));
13338 if (operand_equal_p (arg00, arg11, 0))
13339 return fold_build2_loc (loc, code, type,
13340 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13341 fold_build2_loc (loc,
13342 BIT_XOR_EXPR, itype,
13343 arg01, arg10),
13344 arg00),
13345 build_zero_cst (itype));
13347 if (operand_equal_p (arg00, arg10, 0))
13348 return fold_build2_loc (loc, code, type,
13349 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13350 fold_build2_loc (loc,
13351 BIT_XOR_EXPR, itype,
13352 arg01, arg11),
13353 arg00),
13354 build_zero_cst (itype));
13357 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13358 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13360 tree arg00 = TREE_OPERAND (arg0, 0);
13361 tree arg01 = TREE_OPERAND (arg0, 1);
13362 tree arg10 = TREE_OPERAND (arg1, 0);
13363 tree arg11 = TREE_OPERAND (arg1, 1);
13364 tree itype = TREE_TYPE (arg0);
13366 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13367 operand_equal_p guarantees no side-effects so we don't need
13368 to use omit_one_operand on Z. */
13369 if (operand_equal_p (arg01, arg11, 0))
13370 return fold_build2_loc (loc, code, type, arg00,
13371 fold_convert_loc (loc, TREE_TYPE (arg00),
13372 arg10));
13373 if (operand_equal_p (arg01, arg10, 0))
13374 return fold_build2_loc (loc, code, type, arg00,
13375 fold_convert_loc (loc, TREE_TYPE (arg00),
13376 arg11));
13377 if (operand_equal_p (arg00, arg11, 0))
13378 return fold_build2_loc (loc, code, type, arg01,
13379 fold_convert_loc (loc, TREE_TYPE (arg01),
13380 arg10));
13381 if (operand_equal_p (arg00, arg10, 0))
13382 return fold_build2_loc (loc, code, type, arg01,
13383 fold_convert_loc (loc, TREE_TYPE (arg01),
13384 arg11));
13386 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13387 if (TREE_CODE (arg01) == INTEGER_CST
13388 && TREE_CODE (arg11) == INTEGER_CST)
13390 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13391 fold_convert_loc (loc, itype, arg11));
13392 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13393 return fold_build2_loc (loc, code, type, tem,
13394 fold_convert_loc (loc, itype, arg10));
13398 /* Attempt to simplify equality/inequality comparisons of complex
13399 values. Only lower the comparison if the result is known or
13400 can be simplified to a single scalar comparison. */
13401 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13402 || TREE_CODE (arg0) == COMPLEX_CST)
13403 && (TREE_CODE (arg1) == COMPLEX_EXPR
13404 || TREE_CODE (arg1) == COMPLEX_CST))
13406 tree real0, imag0, real1, imag1;
13407 tree rcond, icond;
13409 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13411 real0 = TREE_OPERAND (arg0, 0);
13412 imag0 = TREE_OPERAND (arg0, 1);
13414 else
13416 real0 = TREE_REALPART (arg0);
13417 imag0 = TREE_IMAGPART (arg0);
13420 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13422 real1 = TREE_OPERAND (arg1, 0);
13423 imag1 = TREE_OPERAND (arg1, 1);
13425 else
13427 real1 = TREE_REALPART (arg1);
13428 imag1 = TREE_IMAGPART (arg1);
13431 rcond = fold_binary_loc (loc, code, type, real0, real1);
13432 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13434 if (integer_zerop (rcond))
13436 if (code == EQ_EXPR)
13437 return omit_two_operands_loc (loc, type, boolean_false_node,
13438 imag0, imag1);
13439 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13441 else
13443 if (code == NE_EXPR)
13444 return omit_two_operands_loc (loc, type, boolean_true_node,
13445 imag0, imag1);
13446 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13450 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13451 if (icond && TREE_CODE (icond) == INTEGER_CST)
13453 if (integer_zerop (icond))
13455 if (code == EQ_EXPR)
13456 return omit_two_operands_loc (loc, type, boolean_false_node,
13457 real0, real1);
13458 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13460 else
13462 if (code == NE_EXPR)
13463 return omit_two_operands_loc (loc, type, boolean_true_node,
13464 real0, real1);
13465 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13470 return NULL_TREE;
13472 case LT_EXPR:
13473 case GT_EXPR:
13474 case LE_EXPR:
13475 case GE_EXPR:
13476 tem = fold_comparison (loc, code, type, op0, op1);
13477 if (tem != NULL_TREE)
13478 return tem;
13480 /* Transform comparisons of the form X +- C CMP X. */
13481 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13482 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13483 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13484 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13485 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13486 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13488 tree arg01 = TREE_OPERAND (arg0, 1);
13489 enum tree_code code0 = TREE_CODE (arg0);
13490 int is_positive;
13492 if (TREE_CODE (arg01) == REAL_CST)
13493 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13494 else
13495 is_positive = tree_int_cst_sgn (arg01);
13497 /* (X - c) > X becomes false. */
13498 if (code == GT_EXPR
13499 && ((code0 == MINUS_EXPR && is_positive >= 0)
13500 || (code0 == PLUS_EXPR && is_positive <= 0)))
13502 if (TREE_CODE (arg01) == INTEGER_CST
13503 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13504 fold_overflow_warning (("assuming signed overflow does not "
13505 "occur when assuming that (X - c) > X "
13506 "is always false"),
13507 WARN_STRICT_OVERFLOW_ALL);
13508 return constant_boolean_node (0, type);
13511 /* Likewise (X + c) < X becomes false. */
13512 if (code == LT_EXPR
13513 && ((code0 == PLUS_EXPR && is_positive >= 0)
13514 || (code0 == MINUS_EXPR && is_positive <= 0)))
13516 if (TREE_CODE (arg01) == INTEGER_CST
13517 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13518 fold_overflow_warning (("assuming signed overflow does not "
13519 "occur when assuming that "
13520 "(X + c) < X is always false"),
13521 WARN_STRICT_OVERFLOW_ALL);
13522 return constant_boolean_node (0, type);
13525 /* Convert (X - c) <= X to true. */
13526 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13527 && code == LE_EXPR
13528 && ((code0 == MINUS_EXPR && is_positive >= 0)
13529 || (code0 == PLUS_EXPR && is_positive <= 0)))
13531 if (TREE_CODE (arg01) == INTEGER_CST
13532 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13533 fold_overflow_warning (("assuming signed overflow does not "
13534 "occur when assuming that "
13535 "(X - c) <= X is always true"),
13536 WARN_STRICT_OVERFLOW_ALL);
13537 return constant_boolean_node (1, type);
13540 /* Convert (X + c) >= X to true. */
13541 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13542 && code == GE_EXPR
13543 && ((code0 == PLUS_EXPR && is_positive >= 0)
13544 || (code0 == MINUS_EXPR && is_positive <= 0)))
13546 if (TREE_CODE (arg01) == INTEGER_CST
13547 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13548 fold_overflow_warning (("assuming signed overflow does not "
13549 "occur when assuming that "
13550 "(X + c) >= X is always true"),
13551 WARN_STRICT_OVERFLOW_ALL);
13552 return constant_boolean_node (1, type);
13555 if (TREE_CODE (arg01) == INTEGER_CST)
13557 /* Convert X + c > X and X - c < X to true for integers. */
13558 if (code == GT_EXPR
13559 && ((code0 == PLUS_EXPR && is_positive > 0)
13560 || (code0 == MINUS_EXPR && is_positive < 0)))
13562 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13563 fold_overflow_warning (("assuming signed overflow does "
13564 "not occur when assuming that "
13565 "(X + c) > X is always true"),
13566 WARN_STRICT_OVERFLOW_ALL);
13567 return constant_boolean_node (1, type);
13570 if (code == LT_EXPR
13571 && ((code0 == MINUS_EXPR && is_positive > 0)
13572 || (code0 == PLUS_EXPR && is_positive < 0)))
13574 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13575 fold_overflow_warning (("assuming signed overflow does "
13576 "not occur when assuming that "
13577 "(X - c) < X is always true"),
13578 WARN_STRICT_OVERFLOW_ALL);
13579 return constant_boolean_node (1, type);
13582 /* Convert X + c <= X and X - c >= X to false for integers. */
13583 if (code == LE_EXPR
13584 && ((code0 == PLUS_EXPR && is_positive > 0)
13585 || (code0 == MINUS_EXPR && is_positive < 0)))
13587 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13588 fold_overflow_warning (("assuming signed overflow does "
13589 "not occur when assuming that "
13590 "(X + c) <= X is always false"),
13591 WARN_STRICT_OVERFLOW_ALL);
13592 return constant_boolean_node (0, type);
13595 if (code == GE_EXPR
13596 && ((code0 == MINUS_EXPR && is_positive > 0)
13597 || (code0 == PLUS_EXPR && is_positive < 0)))
13599 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13600 fold_overflow_warning (("assuming signed overflow does "
13601 "not occur when assuming that "
13602 "(X - c) >= X is always false"),
13603 WARN_STRICT_OVERFLOW_ALL);
13604 return constant_boolean_node (0, type);
13609 /* Comparisons with the highest or lowest possible integer of
13610 the specified precision will have known values. */
13612 tree arg1_type = TREE_TYPE (arg1);
13613 unsigned int prec = TYPE_PRECISION (arg1_type);
13615 if (TREE_CODE (arg1) == INTEGER_CST
13616 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13618 wide_int max = wi::max_value (arg1_type);
13619 wide_int signed_max = wi::max_value (prec, SIGNED);
13620 wide_int min = wi::min_value (arg1_type);
13622 if (wi::eq_p (arg1, max))
13623 switch (code)
13625 case GT_EXPR:
13626 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13628 case GE_EXPR:
13629 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13631 case LE_EXPR:
13632 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13634 case LT_EXPR:
13635 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13637 /* The GE_EXPR and LT_EXPR cases above are not normally
13638 reached because of previous transformations. */
13640 default:
13641 break;
13643 else if (wi::eq_p (arg1, max - 1))
13644 switch (code)
13646 case GT_EXPR:
13647 arg1 = const_binop (PLUS_EXPR, arg1,
13648 build_int_cst (TREE_TYPE (arg1), 1));
13649 return fold_build2_loc (loc, EQ_EXPR, type,
13650 fold_convert_loc (loc,
13651 TREE_TYPE (arg1), arg0),
13652 arg1);
13653 case LE_EXPR:
13654 arg1 = const_binop (PLUS_EXPR, arg1,
13655 build_int_cst (TREE_TYPE (arg1), 1));
13656 return fold_build2_loc (loc, NE_EXPR, type,
13657 fold_convert_loc (loc, TREE_TYPE (arg1),
13658 arg0),
13659 arg1);
13660 default:
13661 break;
13663 else if (wi::eq_p (arg1, min))
13664 switch (code)
13666 case LT_EXPR:
13667 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13669 case LE_EXPR:
13670 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13672 case GE_EXPR:
13673 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13675 case GT_EXPR:
13676 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13678 default:
13679 break;
13681 else if (wi::eq_p (arg1, min + 1))
13682 switch (code)
13684 case GE_EXPR:
13685 arg1 = const_binop (MINUS_EXPR, arg1,
13686 build_int_cst (TREE_TYPE (arg1), 1));
13687 return fold_build2_loc (loc, NE_EXPR, type,
13688 fold_convert_loc (loc,
13689 TREE_TYPE (arg1), arg0),
13690 arg1);
13691 case LT_EXPR:
13692 arg1 = const_binop (MINUS_EXPR, arg1,
13693 build_int_cst (TREE_TYPE (arg1), 1));
13694 return fold_build2_loc (loc, EQ_EXPR, type,
13695 fold_convert_loc (loc, TREE_TYPE (arg1),
13696 arg0),
13697 arg1);
13698 default:
13699 break;
13702 else if (wi::eq_p (arg1, signed_max)
13703 && TYPE_UNSIGNED (arg1_type)
13704 /* We will flip the signedness of the comparison operator
13705 associated with the mode of arg1, so the sign bit is
13706 specified by this mode. Check that arg1 is the signed
13707 max associated with this sign bit. */
13708 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13709 /* signed_type does not work on pointer types. */
13710 && INTEGRAL_TYPE_P (arg1_type))
13712 /* The following case also applies to X < signed_max+1
13713 and X >= signed_max+1 because previous transformations. */
13714 if (code == LE_EXPR || code == GT_EXPR)
13716 tree st = signed_type_for (arg1_type);
13717 return fold_build2_loc (loc,
13718 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13719 type, fold_convert_loc (loc, st, arg0),
13720 build_int_cst (st, 0));
13726 /* If we are comparing an ABS_EXPR with a constant, we can
13727 convert all the cases into explicit comparisons, but they may
13728 well not be faster than doing the ABS and one comparison.
13729 But ABS (X) <= C is a range comparison, which becomes a subtraction
13730 and a comparison, and is probably faster. */
13731 if (code == LE_EXPR
13732 && TREE_CODE (arg1) == INTEGER_CST
13733 && TREE_CODE (arg0) == ABS_EXPR
13734 && ! TREE_SIDE_EFFECTS (arg0)
13735 && (0 != (tem = negate_expr (arg1)))
13736 && TREE_CODE (tem) == INTEGER_CST
13737 && !TREE_OVERFLOW (tem))
13738 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13739 build2 (GE_EXPR, type,
13740 TREE_OPERAND (arg0, 0), tem),
13741 build2 (LE_EXPR, type,
13742 TREE_OPERAND (arg0, 0), arg1));
13744 /* Convert ABS_EXPR<x> >= 0 to true. */
13745 strict_overflow_p = false;
13746 if (code == GE_EXPR
13747 && (integer_zerop (arg1)
13748 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13749 && real_zerop (arg1)))
13750 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13752 if (strict_overflow_p)
13753 fold_overflow_warning (("assuming signed overflow does not occur "
13754 "when simplifying comparison of "
13755 "absolute value and zero"),
13756 WARN_STRICT_OVERFLOW_CONDITIONAL);
13757 return omit_one_operand_loc (loc, type,
13758 constant_boolean_node (true, type),
13759 arg0);
13762 /* Convert ABS_EXPR<x> < 0 to false. */
13763 strict_overflow_p = false;
13764 if (code == LT_EXPR
13765 && (integer_zerop (arg1) || real_zerop (arg1))
13766 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13768 if (strict_overflow_p)
13769 fold_overflow_warning (("assuming signed overflow does not occur "
13770 "when simplifying comparison of "
13771 "absolute value and zero"),
13772 WARN_STRICT_OVERFLOW_CONDITIONAL);
13773 return omit_one_operand_loc (loc, type,
13774 constant_boolean_node (false, type),
13775 arg0);
13778 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13779 and similarly for >= into !=. */
13780 if ((code == LT_EXPR || code == GE_EXPR)
13781 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13782 && TREE_CODE (arg1) == LSHIFT_EXPR
13783 && integer_onep (TREE_OPERAND (arg1, 0)))
13784 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13785 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13786 TREE_OPERAND (arg1, 1)),
13787 build_zero_cst (TREE_TYPE (arg0)));
13789 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13790 otherwise Y might be >= # of bits in X's type and thus e.g.
13791 (unsigned char) (1 << Y) for Y 15 might be 0.
13792 If the cast is widening, then 1 << Y should have unsigned type,
13793 otherwise if Y is number of bits in the signed shift type minus 1,
13794 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13795 31 might be 0xffffffff80000000. */
13796 if ((code == LT_EXPR || code == GE_EXPR)
13797 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13798 && CONVERT_EXPR_P (arg1)
13799 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13800 && (TYPE_PRECISION (TREE_TYPE (arg1))
13801 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13802 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13803 || (TYPE_PRECISION (TREE_TYPE (arg1))
13804 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13805 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13807 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13808 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13809 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13810 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13811 build_zero_cst (TREE_TYPE (arg0)));
13814 return NULL_TREE;
13816 case UNORDERED_EXPR:
13817 case ORDERED_EXPR:
13818 case UNLT_EXPR:
13819 case UNLE_EXPR:
13820 case UNGT_EXPR:
13821 case UNGE_EXPR:
13822 case UNEQ_EXPR:
13823 case LTGT_EXPR:
13824 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13826 t1 = fold_relational_const (code, type, arg0, arg1);
13827 if (t1 != NULL_TREE)
13828 return t1;
13831 /* If the first operand is NaN, the result is constant. */
13832 if (TREE_CODE (arg0) == REAL_CST
13833 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13834 && (code != LTGT_EXPR || ! flag_trapping_math))
13836 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13837 ? integer_zero_node
13838 : integer_one_node;
13839 return omit_one_operand_loc (loc, type, t1, arg1);
13842 /* If the second operand is NaN, the result is constant. */
13843 if (TREE_CODE (arg1) == REAL_CST
13844 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13845 && (code != LTGT_EXPR || ! flag_trapping_math))
13847 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13848 ? integer_zero_node
13849 : integer_one_node;
13850 return omit_one_operand_loc (loc, type, t1, arg0);
13853 /* Simplify unordered comparison of something with itself. */
13854 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13855 && operand_equal_p (arg0, arg1, 0))
13856 return constant_boolean_node (1, type);
13858 if (code == LTGT_EXPR
13859 && !flag_trapping_math
13860 && operand_equal_p (arg0, arg1, 0))
13861 return constant_boolean_node (0, type);
13863 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13865 tree targ0 = strip_float_extensions (arg0);
13866 tree targ1 = strip_float_extensions (arg1);
13867 tree newtype = TREE_TYPE (targ0);
13869 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13870 newtype = TREE_TYPE (targ1);
13872 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13873 return fold_build2_loc (loc, code, type,
13874 fold_convert_loc (loc, newtype, targ0),
13875 fold_convert_loc (loc, newtype, targ1));
13878 return NULL_TREE;
13880 case COMPOUND_EXPR:
13881 /* When pedantic, a compound expression can be neither an lvalue
13882 nor an integer constant expression. */
13883 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13884 return NULL_TREE;
13885 /* Don't let (0, 0) be null pointer constant. */
13886 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13887 : fold_convert_loc (loc, type, arg1);
13888 return pedantic_non_lvalue_loc (loc, tem);
13890 case COMPLEX_EXPR:
13891 if ((TREE_CODE (arg0) == REAL_CST
13892 && TREE_CODE (arg1) == REAL_CST)
13893 || (TREE_CODE (arg0) == INTEGER_CST
13894 && TREE_CODE (arg1) == INTEGER_CST))
13895 return build_complex (type, arg0, arg1);
13896 if (TREE_CODE (arg0) == REALPART_EXPR
13897 && TREE_CODE (arg1) == IMAGPART_EXPR
13898 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13899 && operand_equal_p (TREE_OPERAND (arg0, 0),
13900 TREE_OPERAND (arg1, 0), 0))
13901 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13902 TREE_OPERAND (arg1, 0));
13903 return NULL_TREE;
13905 case ASSERT_EXPR:
13906 /* An ASSERT_EXPR should never be passed to fold_binary. */
13907 gcc_unreachable ();
13909 case VEC_PACK_TRUNC_EXPR:
13910 case VEC_PACK_FIX_TRUNC_EXPR:
13912 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13913 tree *elts;
13915 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13916 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13917 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13918 return NULL_TREE;
13920 elts = XALLOCAVEC (tree, nelts);
13921 if (!vec_cst_ctor_to_array (arg0, elts)
13922 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13923 return NULL_TREE;
13925 for (i = 0; i < nelts; i++)
13927 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13928 ? NOP_EXPR : FIX_TRUNC_EXPR,
13929 TREE_TYPE (type), elts[i]);
13930 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13931 return NULL_TREE;
13934 return build_vector (type, elts);
13937 case VEC_WIDEN_MULT_LO_EXPR:
13938 case VEC_WIDEN_MULT_HI_EXPR:
13939 case VEC_WIDEN_MULT_EVEN_EXPR:
13940 case VEC_WIDEN_MULT_ODD_EXPR:
13942 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13943 unsigned int out, ofs, scale;
13944 tree *elts;
13946 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13947 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13948 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13949 return NULL_TREE;
13951 elts = XALLOCAVEC (tree, nelts * 4);
13952 if (!vec_cst_ctor_to_array (arg0, elts)
13953 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13954 return NULL_TREE;
13956 if (code == VEC_WIDEN_MULT_LO_EXPR)
13957 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13958 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13959 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13960 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13961 scale = 1, ofs = 0;
13962 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13963 scale = 1, ofs = 1;
13965 for (out = 0; out < nelts; out++)
13967 unsigned int in1 = (out << scale) + ofs;
13968 unsigned int in2 = in1 + nelts * 2;
13969 tree t1, t2;
13971 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13972 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13974 if (t1 == NULL_TREE || t2 == NULL_TREE)
13975 return NULL_TREE;
13976 elts[out] = const_binop (MULT_EXPR, t1, t2);
13977 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13978 return NULL_TREE;
13981 return build_vector (type, elts);
13984 default:
13985 return NULL_TREE;
13986 } /* switch (code) */
13989 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13990 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13991 of GOTO_EXPR. */
13993 static tree
13994 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13996 switch (TREE_CODE (*tp))
13998 case LABEL_EXPR:
13999 return *tp;
14001 case GOTO_EXPR:
14002 *walk_subtrees = 0;
14004 /* ... fall through ... */
14006 default:
14007 return NULL_TREE;
14011 /* Return whether the sub-tree ST contains a label which is accessible from
14012 outside the sub-tree. */
14014 static bool
14015 contains_label_p (tree st)
14017 return
14018 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
14021 /* Fold a ternary expression of code CODE and type TYPE with operands
14022 OP0, OP1, and OP2. Return the folded expression if folding is
14023 successful. Otherwise, return NULL_TREE. */
14025 tree
14026 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
14027 tree op0, tree op1, tree op2)
14029 tree tem;
14030 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
14031 enum tree_code_class kind = TREE_CODE_CLASS (code);
14033 gcc_assert (IS_EXPR_CODE_CLASS (kind)
14034 && TREE_CODE_LENGTH (code) == 3);
14036 /* Strip any conversions that don't change the mode. This is safe
14037 for every expression, except for a comparison expression because
14038 its signedness is derived from its operands. So, in the latter
14039 case, only strip conversions that don't change the signedness.
14041 Note that this is done as an internal manipulation within the
14042 constant folder, in order to find the simplest representation of
14043 the arguments so that their form can be studied. In any cases,
14044 the appropriate type conversions should be put back in the tree
14045 that will get out of the constant folder. */
14046 if (op0)
14048 arg0 = op0;
14049 STRIP_NOPS (arg0);
14052 if (op1)
14054 arg1 = op1;
14055 STRIP_NOPS (arg1);
14058 if (op2)
14060 arg2 = op2;
14061 STRIP_NOPS (arg2);
14064 switch (code)
14066 case COMPONENT_REF:
14067 if (TREE_CODE (arg0) == CONSTRUCTOR
14068 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14070 unsigned HOST_WIDE_INT idx;
14071 tree field, value;
14072 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14073 if (field == arg1)
14074 return value;
14076 return NULL_TREE;
14078 case COND_EXPR:
14079 case VEC_COND_EXPR:
14080 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14081 so all simple results must be passed through pedantic_non_lvalue. */
14082 if (TREE_CODE (arg0) == INTEGER_CST)
14084 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14085 tem = integer_zerop (arg0) ? op2 : op1;
14086 /* Only optimize constant conditions when the selected branch
14087 has the same type as the COND_EXPR. This avoids optimizing
14088 away "c ? x : throw", where the throw has a void type.
14089 Avoid throwing away that operand which contains label. */
14090 if ((!TREE_SIDE_EFFECTS (unused_op)
14091 || !contains_label_p (unused_op))
14092 && (! VOID_TYPE_P (TREE_TYPE (tem))
14093 || VOID_TYPE_P (type)))
14094 return pedantic_non_lvalue_loc (loc, tem);
14095 return NULL_TREE;
14097 else if (TREE_CODE (arg0) == VECTOR_CST)
14099 if (integer_all_onesp (arg0))
14100 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14101 if (integer_zerop (arg0))
14102 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14104 if ((TREE_CODE (arg1) == VECTOR_CST
14105 || TREE_CODE (arg1) == CONSTRUCTOR)
14106 && (TREE_CODE (arg2) == VECTOR_CST
14107 || TREE_CODE (arg2) == CONSTRUCTOR))
14109 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14110 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14111 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14112 for (i = 0; i < nelts; i++)
14114 tree val = VECTOR_CST_ELT (arg0, i);
14115 if (integer_all_onesp (val))
14116 sel[i] = i;
14117 else if (integer_zerop (val))
14118 sel[i] = nelts + i;
14119 else /* Currently unreachable. */
14120 return NULL_TREE;
14122 tree t = fold_vec_perm (type, arg1, arg2, sel);
14123 if (t != NULL_TREE)
14124 return t;
14128 if (operand_equal_p (arg1, op2, 0))
14129 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14131 /* If we have A op B ? A : C, we may be able to convert this to a
14132 simpler expression, depending on the operation and the values
14133 of B and C. Signed zeros prevent all of these transformations,
14134 for reasons given above each one.
14136 Also try swapping the arguments and inverting the conditional. */
14137 if (COMPARISON_CLASS_P (arg0)
14138 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14139 arg1, TREE_OPERAND (arg0, 1))
14140 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14142 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14143 if (tem)
14144 return tem;
14147 if (COMPARISON_CLASS_P (arg0)
14148 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14149 op2,
14150 TREE_OPERAND (arg0, 1))
14151 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14153 location_t loc0 = expr_location_or (arg0, loc);
14154 tem = fold_invert_truthvalue (loc0, arg0);
14155 if (tem && COMPARISON_CLASS_P (tem))
14157 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14158 if (tem)
14159 return tem;
14163 /* If the second operand is simpler than the third, swap them
14164 since that produces better jump optimization results. */
14165 if (truth_value_p (TREE_CODE (arg0))
14166 && tree_swap_operands_p (op1, op2, false))
14168 location_t loc0 = expr_location_or (arg0, loc);
14169 /* See if this can be inverted. If it can't, possibly because
14170 it was a floating-point inequality comparison, don't do
14171 anything. */
14172 tem = fold_invert_truthvalue (loc0, arg0);
14173 if (tem)
14174 return fold_build3_loc (loc, code, type, tem, op2, op1);
14177 /* Convert A ? 1 : 0 to simply A. */
14178 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14179 : (integer_onep (op1)
14180 && !VECTOR_TYPE_P (type)))
14181 && integer_zerop (op2)
14182 /* If we try to convert OP0 to our type, the
14183 call to fold will try to move the conversion inside
14184 a COND, which will recurse. In that case, the COND_EXPR
14185 is probably the best choice, so leave it alone. */
14186 && type == TREE_TYPE (arg0))
14187 return pedantic_non_lvalue_loc (loc, arg0);
14189 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14190 over COND_EXPR in cases such as floating point comparisons. */
14191 if (integer_zerop (op1)
14192 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14193 : (integer_onep (op2)
14194 && !VECTOR_TYPE_P (type)))
14195 && truth_value_p (TREE_CODE (arg0)))
14196 return pedantic_non_lvalue_loc (loc,
14197 fold_convert_loc (loc, type,
14198 invert_truthvalue_loc (loc,
14199 arg0)));
14201 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14202 if (TREE_CODE (arg0) == LT_EXPR
14203 && integer_zerop (TREE_OPERAND (arg0, 1))
14204 && integer_zerop (op2)
14205 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14207 /* sign_bit_p looks through both zero and sign extensions,
14208 but for this optimization only sign extensions are
14209 usable. */
14210 tree tem2 = TREE_OPERAND (arg0, 0);
14211 while (tem != tem2)
14213 if (TREE_CODE (tem2) != NOP_EXPR
14214 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
14216 tem = NULL_TREE;
14217 break;
14219 tem2 = TREE_OPERAND (tem2, 0);
14221 /* sign_bit_p only checks ARG1 bits within A's precision.
14222 If <sign bit of A> has wider type than A, bits outside
14223 of A's precision in <sign bit of A> need to be checked.
14224 If they are all 0, this optimization needs to be done
14225 in unsigned A's type, if they are all 1 in signed A's type,
14226 otherwise this can't be done. */
14227 if (tem
14228 && TYPE_PRECISION (TREE_TYPE (tem))
14229 < TYPE_PRECISION (TREE_TYPE (arg1))
14230 && TYPE_PRECISION (TREE_TYPE (tem))
14231 < TYPE_PRECISION (type))
14233 int inner_width, outer_width;
14234 tree tem_type;
14236 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14237 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14238 if (outer_width > TYPE_PRECISION (type))
14239 outer_width = TYPE_PRECISION (type);
14241 wide_int mask = wi::shifted_mask
14242 (inner_width, outer_width - inner_width, false,
14243 TYPE_PRECISION (TREE_TYPE (arg1)));
14245 wide_int common = mask & arg1;
14246 if (common == mask)
14248 tem_type = signed_type_for (TREE_TYPE (tem));
14249 tem = fold_convert_loc (loc, tem_type, tem);
14251 else if (common == 0)
14253 tem_type = unsigned_type_for (TREE_TYPE (tem));
14254 tem = fold_convert_loc (loc, tem_type, tem);
14256 else
14257 tem = NULL;
14260 if (tem)
14261 return
14262 fold_convert_loc (loc, type,
14263 fold_build2_loc (loc, BIT_AND_EXPR,
14264 TREE_TYPE (tem), tem,
14265 fold_convert_loc (loc,
14266 TREE_TYPE (tem),
14267 arg1)));
14270 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14271 already handled above. */
14272 if (TREE_CODE (arg0) == BIT_AND_EXPR
14273 && integer_onep (TREE_OPERAND (arg0, 1))
14274 && integer_zerop (op2)
14275 && integer_pow2p (arg1))
14277 tree tem = TREE_OPERAND (arg0, 0);
14278 STRIP_NOPS (tem);
14279 if (TREE_CODE (tem) == RSHIFT_EXPR
14280 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
14281 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14282 tree_to_uhwi (TREE_OPERAND (tem, 1)))
14283 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14284 TREE_OPERAND (tem, 0), arg1);
14287 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14288 is probably obsolete because the first operand should be a
14289 truth value (that's why we have the two cases above), but let's
14290 leave it in until we can confirm this for all front-ends. */
14291 if (integer_zerop (op2)
14292 && TREE_CODE (arg0) == NE_EXPR
14293 && integer_zerop (TREE_OPERAND (arg0, 1))
14294 && integer_pow2p (arg1)
14295 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14296 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14297 arg1, OEP_ONLY_CONST))
14298 return pedantic_non_lvalue_loc (loc,
14299 fold_convert_loc (loc, type,
14300 TREE_OPERAND (arg0, 0)));
14302 /* Disable the transformations below for vectors, since
14303 fold_binary_op_with_conditional_arg may undo them immediately,
14304 yielding an infinite loop. */
14305 if (code == VEC_COND_EXPR)
14306 return NULL_TREE;
14308 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14309 if (integer_zerop (op2)
14310 && truth_value_p (TREE_CODE (arg0))
14311 && truth_value_p (TREE_CODE (arg1))
14312 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14313 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14314 : TRUTH_ANDIF_EXPR,
14315 type, fold_convert_loc (loc, type, arg0), arg1);
14317 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14318 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14319 && truth_value_p (TREE_CODE (arg0))
14320 && truth_value_p (TREE_CODE (arg1))
14321 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14323 location_t loc0 = expr_location_or (arg0, loc);
14324 /* Only perform transformation if ARG0 is easily inverted. */
14325 tem = fold_invert_truthvalue (loc0, arg0);
14326 if (tem)
14327 return fold_build2_loc (loc, code == VEC_COND_EXPR
14328 ? BIT_IOR_EXPR
14329 : TRUTH_ORIF_EXPR,
14330 type, fold_convert_loc (loc, type, tem),
14331 arg1);
14334 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14335 if (integer_zerop (arg1)
14336 && truth_value_p (TREE_CODE (arg0))
14337 && truth_value_p (TREE_CODE (op2))
14338 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14340 location_t loc0 = expr_location_or (arg0, loc);
14341 /* Only perform transformation if ARG0 is easily inverted. */
14342 tem = fold_invert_truthvalue (loc0, arg0);
14343 if (tem)
14344 return fold_build2_loc (loc, code == VEC_COND_EXPR
14345 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14346 type, fold_convert_loc (loc, type, tem),
14347 op2);
14350 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14351 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14352 && truth_value_p (TREE_CODE (arg0))
14353 && truth_value_p (TREE_CODE (op2))
14354 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14355 return fold_build2_loc (loc, code == VEC_COND_EXPR
14356 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14357 type, fold_convert_loc (loc, type, arg0), op2);
14359 return NULL_TREE;
14361 case CALL_EXPR:
14362 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14363 of fold_ternary on them. */
14364 gcc_unreachable ();
14366 case BIT_FIELD_REF:
14367 if ((TREE_CODE (arg0) == VECTOR_CST
14368 || (TREE_CODE (arg0) == CONSTRUCTOR
14369 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14370 && (type == TREE_TYPE (TREE_TYPE (arg0))
14371 || (TREE_CODE (type) == VECTOR_TYPE
14372 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14374 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14375 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14376 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14377 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14379 if (n != 0
14380 && (idx % width) == 0
14381 && (n % width) == 0
14382 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14384 idx = idx / width;
14385 n = n / width;
14387 if (TREE_CODE (arg0) == VECTOR_CST)
14389 if (n == 1)
14390 return VECTOR_CST_ELT (arg0, idx);
14392 tree *vals = XALLOCAVEC (tree, n);
14393 for (unsigned i = 0; i < n; ++i)
14394 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14395 return build_vector (type, vals);
14398 /* Constructor elements can be subvectors. */
14399 unsigned HOST_WIDE_INT k = 1;
14400 if (CONSTRUCTOR_NELTS (arg0) != 0)
14402 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14403 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14404 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14407 /* We keep an exact subset of the constructor elements. */
14408 if ((idx % k) == 0 && (n % k) == 0)
14410 if (CONSTRUCTOR_NELTS (arg0) == 0)
14411 return build_constructor (type, NULL);
14412 idx /= k;
14413 n /= k;
14414 if (n == 1)
14416 if (idx < CONSTRUCTOR_NELTS (arg0))
14417 return CONSTRUCTOR_ELT (arg0, idx)->value;
14418 return build_zero_cst (type);
14421 vec<constructor_elt, va_gc> *vals;
14422 vec_alloc (vals, n);
14423 for (unsigned i = 0;
14424 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14425 ++i)
14426 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14427 CONSTRUCTOR_ELT
14428 (arg0, idx + i)->value);
14429 return build_constructor (type, vals);
14431 /* The bitfield references a single constructor element. */
14432 else if (idx + n <= (idx / k + 1) * k)
14434 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14435 return build_zero_cst (type);
14436 else if (n == k)
14437 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14438 else
14439 return fold_build3_loc (loc, code, type,
14440 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14441 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14446 /* A bit-field-ref that referenced the full argument can be stripped. */
14447 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14448 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14449 && integer_zerop (op2))
14450 return fold_convert_loc (loc, type, arg0);
14452 /* On constants we can use native encode/interpret to constant
14453 fold (nearly) all BIT_FIELD_REFs. */
14454 if (CONSTANT_CLASS_P (arg0)
14455 && can_native_interpret_type_p (type)
14456 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14457 /* This limitation should not be necessary, we just need to
14458 round this up to mode size. */
14459 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14460 /* Need bit-shifting of the buffer to relax the following. */
14461 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14463 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14464 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14465 unsigned HOST_WIDE_INT clen;
14466 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14467 /* ??? We cannot tell native_encode_expr to start at
14468 some random byte only. So limit us to a reasonable amount
14469 of work. */
14470 if (clen <= 4096)
14472 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14473 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14474 if (len > 0
14475 && len * BITS_PER_UNIT >= bitpos + bitsize)
14477 tree v = native_interpret_expr (type,
14478 b + bitpos / BITS_PER_UNIT,
14479 bitsize / BITS_PER_UNIT);
14480 if (v)
14481 return v;
14486 return NULL_TREE;
14488 case FMA_EXPR:
14489 /* For integers we can decompose the FMA if possible. */
14490 if (TREE_CODE (arg0) == INTEGER_CST
14491 && TREE_CODE (arg1) == INTEGER_CST)
14492 return fold_build2_loc (loc, PLUS_EXPR, type,
14493 const_binop (MULT_EXPR, arg0, arg1), arg2);
14494 if (integer_zerop (arg2))
14495 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14497 return fold_fma (loc, type, arg0, arg1, arg2);
14499 case VEC_PERM_EXPR:
14500 if (TREE_CODE (arg2) == VECTOR_CST)
14502 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14503 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14504 bool need_mask_canon = false;
14505 bool all_in_vec0 = true;
14506 bool all_in_vec1 = true;
14507 bool maybe_identity = true;
14508 bool single_arg = (op0 == op1);
14509 bool changed = false;
14511 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14512 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14513 for (i = 0; i < nelts; i++)
14515 tree val = VECTOR_CST_ELT (arg2, i);
14516 if (TREE_CODE (val) != INTEGER_CST)
14517 return NULL_TREE;
14519 /* Make sure that the perm value is in an acceptable
14520 range. */
14521 wide_int t = val;
14522 if (wi::gtu_p (t, mask))
14524 need_mask_canon = true;
14525 sel[i] = t.to_uhwi () & mask;
14527 else
14528 sel[i] = t.to_uhwi ();
14530 if (sel[i] < nelts)
14531 all_in_vec1 = false;
14532 else
14533 all_in_vec0 = false;
14535 if ((sel[i] & (nelts-1)) != i)
14536 maybe_identity = false;
14539 if (maybe_identity)
14541 if (all_in_vec0)
14542 return op0;
14543 if (all_in_vec1)
14544 return op1;
14547 if (all_in_vec0)
14548 op1 = op0;
14549 else if (all_in_vec1)
14551 op0 = op1;
14552 for (i = 0; i < nelts; i++)
14553 sel[i] -= nelts;
14554 need_mask_canon = true;
14557 if ((TREE_CODE (op0) == VECTOR_CST
14558 || TREE_CODE (op0) == CONSTRUCTOR)
14559 && (TREE_CODE (op1) == VECTOR_CST
14560 || TREE_CODE (op1) == CONSTRUCTOR))
14562 tree t = fold_vec_perm (type, op0, op1, sel);
14563 if (t != NULL_TREE)
14564 return t;
14567 if (op0 == op1 && !single_arg)
14568 changed = true;
14570 if (need_mask_canon && arg2 == op2)
14572 tree *tsel = XALLOCAVEC (tree, nelts);
14573 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14574 for (i = 0; i < nelts; i++)
14575 tsel[i] = build_int_cst (eltype, sel[i]);
14576 op2 = build_vector (TREE_TYPE (arg2), tsel);
14577 changed = true;
14580 if (changed)
14581 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14583 return NULL_TREE;
14585 default:
14586 return NULL_TREE;
14587 } /* switch (code) */
14590 /* Perform constant folding and related simplification of EXPR.
14591 The related simplifications include x*1 => x, x*0 => 0, etc.,
14592 and application of the associative law.
14593 NOP_EXPR conversions may be removed freely (as long as we
14594 are careful not to change the type of the overall expression).
14595 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14596 but we can constant-fold them if they have constant operands. */
14598 #ifdef ENABLE_FOLD_CHECKING
14599 # define fold(x) fold_1 (x)
14600 static tree fold_1 (tree);
14601 static
14602 #endif
14603 tree
14604 fold (tree expr)
14606 const tree t = expr;
14607 enum tree_code code = TREE_CODE (t);
14608 enum tree_code_class kind = TREE_CODE_CLASS (code);
14609 tree tem;
14610 location_t loc = EXPR_LOCATION (expr);
14612 /* Return right away if a constant. */
14613 if (kind == tcc_constant)
14614 return t;
14616 /* CALL_EXPR-like objects with variable numbers of operands are
14617 treated specially. */
14618 if (kind == tcc_vl_exp)
14620 if (code == CALL_EXPR)
14622 tem = fold_call_expr (loc, expr, false);
14623 return tem ? tem : expr;
14625 return expr;
14628 if (IS_EXPR_CODE_CLASS (kind))
14630 tree type = TREE_TYPE (t);
14631 tree op0, op1, op2;
14633 switch (TREE_CODE_LENGTH (code))
14635 case 1:
14636 op0 = TREE_OPERAND (t, 0);
14637 tem = fold_unary_loc (loc, code, type, op0);
14638 return tem ? tem : expr;
14639 case 2:
14640 op0 = TREE_OPERAND (t, 0);
14641 op1 = TREE_OPERAND (t, 1);
14642 tem = fold_binary_loc (loc, code, type, op0, op1);
14643 return tem ? tem : expr;
14644 case 3:
14645 op0 = TREE_OPERAND (t, 0);
14646 op1 = TREE_OPERAND (t, 1);
14647 op2 = TREE_OPERAND (t, 2);
14648 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14649 return tem ? tem : expr;
14650 default:
14651 break;
14655 switch (code)
14657 case ARRAY_REF:
14659 tree op0 = TREE_OPERAND (t, 0);
14660 tree op1 = TREE_OPERAND (t, 1);
14662 if (TREE_CODE (op1) == INTEGER_CST
14663 && TREE_CODE (op0) == CONSTRUCTOR
14664 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14666 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14667 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14668 unsigned HOST_WIDE_INT begin = 0;
14670 /* Find a matching index by means of a binary search. */
14671 while (begin != end)
14673 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14674 tree index = (*elts)[middle].index;
14676 if (TREE_CODE (index) == INTEGER_CST
14677 && tree_int_cst_lt (index, op1))
14678 begin = middle + 1;
14679 else if (TREE_CODE (index) == INTEGER_CST
14680 && tree_int_cst_lt (op1, index))
14681 end = middle;
14682 else if (TREE_CODE (index) == RANGE_EXPR
14683 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14684 begin = middle + 1;
14685 else if (TREE_CODE (index) == RANGE_EXPR
14686 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14687 end = middle;
14688 else
14689 return (*elts)[middle].value;
14693 return t;
14696 /* Return a VECTOR_CST if possible. */
14697 case CONSTRUCTOR:
14699 tree type = TREE_TYPE (t);
14700 if (TREE_CODE (type) != VECTOR_TYPE)
14701 return t;
14703 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14704 unsigned HOST_WIDE_INT idx, pos = 0;
14705 tree value;
14707 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14709 if (!CONSTANT_CLASS_P (value))
14710 return t;
14711 if (TREE_CODE (value) == VECTOR_CST)
14713 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14714 vec[pos++] = VECTOR_CST_ELT (value, i);
14716 else
14717 vec[pos++] = value;
14719 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14720 vec[pos] = build_zero_cst (TREE_TYPE (type));
14722 return build_vector (type, vec);
14725 case CONST_DECL:
14726 return fold (DECL_INITIAL (t));
14728 default:
14729 return t;
14730 } /* switch (code) */
14733 #ifdef ENABLE_FOLD_CHECKING
14734 #undef fold
14736 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14737 hash_table <pointer_hash <tree_node> >);
14738 static void fold_check_failed (const_tree, const_tree);
14739 void print_fold_checksum (const_tree);
14741 /* When --enable-checking=fold, compute a digest of expr before
14742 and after actual fold call to see if fold did not accidentally
14743 change original expr. */
14745 tree
14746 fold (tree expr)
14748 tree ret;
14749 struct md5_ctx ctx;
14750 unsigned char checksum_before[16], checksum_after[16];
14751 hash_table <pointer_hash <tree_node> > ht;
14753 ht.create (32);
14754 md5_init_ctx (&ctx);
14755 fold_checksum_tree (expr, &ctx, ht);
14756 md5_finish_ctx (&ctx, checksum_before);
14757 ht.empty ();
14759 ret = fold_1 (expr);
14761 md5_init_ctx (&ctx);
14762 fold_checksum_tree (expr, &ctx, ht);
14763 md5_finish_ctx (&ctx, checksum_after);
14764 ht.dispose ();
14766 if (memcmp (checksum_before, checksum_after, 16))
14767 fold_check_failed (expr, ret);
14769 return ret;
14772 void
14773 print_fold_checksum (const_tree expr)
14775 struct md5_ctx ctx;
14776 unsigned char checksum[16], cnt;
14777 hash_table <pointer_hash <tree_node> > ht;
14779 ht.create (32);
14780 md5_init_ctx (&ctx);
14781 fold_checksum_tree (expr, &ctx, ht);
14782 md5_finish_ctx (&ctx, checksum);
14783 ht.dispose ();
14784 for (cnt = 0; cnt < 16; ++cnt)
14785 fprintf (stderr, "%02x", checksum[cnt]);
14786 putc ('\n', stderr);
14789 static void
14790 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14792 internal_error ("fold check: original tree changed by fold");
14795 static void
14796 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14797 hash_table <pointer_hash <tree_node> > ht)
14799 tree_node **slot;
14800 enum tree_code code;
14801 union tree_node buf;
14802 int i, len;
14804 recursive_label:
14805 if (expr == NULL)
14806 return;
14807 slot = ht.find_slot (expr, INSERT);
14808 if (*slot != NULL)
14809 return;
14810 *slot = CONST_CAST_TREE (expr);
14811 code = TREE_CODE (expr);
14812 if (TREE_CODE_CLASS (code) == tcc_declaration
14813 && DECL_ASSEMBLER_NAME_SET_P (expr))
14815 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14816 memcpy ((char *) &buf, expr, tree_size (expr));
14817 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14818 expr = (tree) &buf;
14820 else if (TREE_CODE_CLASS (code) == tcc_type
14821 && (TYPE_POINTER_TO (expr)
14822 || TYPE_REFERENCE_TO (expr)
14823 || TYPE_CACHED_VALUES_P (expr)
14824 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14825 || TYPE_NEXT_VARIANT (expr)))
14827 /* Allow these fields to be modified. */
14828 tree tmp;
14829 memcpy ((char *) &buf, expr, tree_size (expr));
14830 expr = tmp = (tree) &buf;
14831 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14832 TYPE_POINTER_TO (tmp) = NULL;
14833 TYPE_REFERENCE_TO (tmp) = NULL;
14834 TYPE_NEXT_VARIANT (tmp) = NULL;
14835 if (TYPE_CACHED_VALUES_P (tmp))
14837 TYPE_CACHED_VALUES_P (tmp) = 0;
14838 TYPE_CACHED_VALUES (tmp) = NULL;
14841 md5_process_bytes (expr, tree_size (expr), ctx);
14842 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14843 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14844 if (TREE_CODE_CLASS (code) != tcc_type
14845 && TREE_CODE_CLASS (code) != tcc_declaration
14846 && code != TREE_LIST
14847 && code != SSA_NAME
14848 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14849 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14850 switch (TREE_CODE_CLASS (code))
14852 case tcc_constant:
14853 switch (code)
14855 case STRING_CST:
14856 md5_process_bytes (TREE_STRING_POINTER (expr),
14857 TREE_STRING_LENGTH (expr), ctx);
14858 break;
14859 case COMPLEX_CST:
14860 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14861 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14862 break;
14863 case VECTOR_CST:
14864 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14865 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14866 break;
14867 default:
14868 break;
14870 break;
14871 case tcc_exceptional:
14872 switch (code)
14874 case TREE_LIST:
14875 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14876 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14877 expr = TREE_CHAIN (expr);
14878 goto recursive_label;
14879 break;
14880 case TREE_VEC:
14881 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14882 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14883 break;
14884 default:
14885 break;
14887 break;
14888 case tcc_expression:
14889 case tcc_reference:
14890 case tcc_comparison:
14891 case tcc_unary:
14892 case tcc_binary:
14893 case tcc_statement:
14894 case tcc_vl_exp:
14895 len = TREE_OPERAND_LENGTH (expr);
14896 for (i = 0; i < len; ++i)
14897 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14898 break;
14899 case tcc_declaration:
14900 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14901 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14902 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14904 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14905 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14906 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14907 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14908 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14910 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14911 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14913 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14915 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14916 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14917 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14919 break;
14920 case tcc_type:
14921 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14922 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14923 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14924 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14925 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14926 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14927 if (INTEGRAL_TYPE_P (expr)
14928 || SCALAR_FLOAT_TYPE_P (expr))
14930 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14931 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14933 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14934 if (TREE_CODE (expr) == RECORD_TYPE
14935 || TREE_CODE (expr) == UNION_TYPE
14936 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14937 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14938 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14939 break;
14940 default:
14941 break;
14945 /* Helper function for outputting the checksum of a tree T. When
14946 debugging with gdb, you can "define mynext" to be "next" followed
14947 by "call debug_fold_checksum (op0)", then just trace down till the
14948 outputs differ. */
14950 DEBUG_FUNCTION void
14951 debug_fold_checksum (const_tree t)
14953 int i;
14954 unsigned char checksum[16];
14955 struct md5_ctx ctx;
14956 hash_table <pointer_hash <tree_node> > ht;
14957 ht.create (32);
14959 md5_init_ctx (&ctx);
14960 fold_checksum_tree (t, &ctx, ht);
14961 md5_finish_ctx (&ctx, checksum);
14962 ht.empty ();
14964 for (i = 0; i < 16; i++)
14965 fprintf (stderr, "%d ", checksum[i]);
14967 fprintf (stderr, "\n");
14970 #endif
14972 /* Fold a unary tree expression with code CODE of type TYPE with an
14973 operand OP0. LOC is the location of the resulting expression.
14974 Return a folded expression if successful. Otherwise, return a tree
14975 expression with code CODE of type TYPE with an operand OP0. */
14977 tree
14978 fold_build1_stat_loc (location_t loc,
14979 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14981 tree tem;
14982 #ifdef ENABLE_FOLD_CHECKING
14983 unsigned char checksum_before[16], checksum_after[16];
14984 struct md5_ctx ctx;
14985 hash_table <pointer_hash <tree_node> > ht;
14987 ht.create (32);
14988 md5_init_ctx (&ctx);
14989 fold_checksum_tree (op0, &ctx, ht);
14990 md5_finish_ctx (&ctx, checksum_before);
14991 ht.empty ();
14992 #endif
14994 tem = fold_unary_loc (loc, code, type, op0);
14995 if (!tem)
14996 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14998 #ifdef ENABLE_FOLD_CHECKING
14999 md5_init_ctx (&ctx);
15000 fold_checksum_tree (op0, &ctx, ht);
15001 md5_finish_ctx (&ctx, checksum_after);
15002 ht.dispose ();
15004 if (memcmp (checksum_before, checksum_after, 16))
15005 fold_check_failed (op0, tem);
15006 #endif
15007 return tem;
15010 /* Fold a binary tree expression with code CODE of type TYPE with
15011 operands OP0 and OP1. LOC is the location of the resulting
15012 expression. Return a folded expression if successful. Otherwise,
15013 return a tree expression with code CODE of type TYPE with operands
15014 OP0 and OP1. */
15016 tree
15017 fold_build2_stat_loc (location_t loc,
15018 enum tree_code code, tree type, tree op0, tree op1
15019 MEM_STAT_DECL)
15021 tree tem;
15022 #ifdef ENABLE_FOLD_CHECKING
15023 unsigned char checksum_before_op0[16],
15024 checksum_before_op1[16],
15025 checksum_after_op0[16],
15026 checksum_after_op1[16];
15027 struct md5_ctx ctx;
15028 hash_table <pointer_hash <tree_node> > ht;
15030 ht.create (32);
15031 md5_init_ctx (&ctx);
15032 fold_checksum_tree (op0, &ctx, ht);
15033 md5_finish_ctx (&ctx, checksum_before_op0);
15034 ht.empty ();
15036 md5_init_ctx (&ctx);
15037 fold_checksum_tree (op1, &ctx, ht);
15038 md5_finish_ctx (&ctx, checksum_before_op1);
15039 ht.empty ();
15040 #endif
15042 tem = fold_binary_loc (loc, code, type, op0, op1);
15043 if (!tem)
15044 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
15046 #ifdef ENABLE_FOLD_CHECKING
15047 md5_init_ctx (&ctx);
15048 fold_checksum_tree (op0, &ctx, ht);
15049 md5_finish_ctx (&ctx, checksum_after_op0);
15050 ht.empty ();
15052 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15053 fold_check_failed (op0, tem);
15055 md5_init_ctx (&ctx);
15056 fold_checksum_tree (op1, &ctx, ht);
15057 md5_finish_ctx (&ctx, checksum_after_op1);
15058 ht.dispose ();
15060 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15061 fold_check_failed (op1, tem);
15062 #endif
15063 return tem;
15066 /* Fold a ternary tree expression with code CODE of type TYPE with
15067 operands OP0, OP1, and OP2. Return a folded expression if
15068 successful. Otherwise, return a tree expression with code CODE of
15069 type TYPE with operands OP0, OP1, and OP2. */
15071 tree
15072 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
15073 tree op0, tree op1, tree op2 MEM_STAT_DECL)
15075 tree tem;
15076 #ifdef ENABLE_FOLD_CHECKING
15077 unsigned char checksum_before_op0[16],
15078 checksum_before_op1[16],
15079 checksum_before_op2[16],
15080 checksum_after_op0[16],
15081 checksum_after_op1[16],
15082 checksum_after_op2[16];
15083 struct md5_ctx ctx;
15084 hash_table <pointer_hash <tree_node> > ht;
15086 ht.create (32);
15087 md5_init_ctx (&ctx);
15088 fold_checksum_tree (op0, &ctx, ht);
15089 md5_finish_ctx (&ctx, checksum_before_op0);
15090 ht.empty ();
15092 md5_init_ctx (&ctx);
15093 fold_checksum_tree (op1, &ctx, ht);
15094 md5_finish_ctx (&ctx, checksum_before_op1);
15095 ht.empty ();
15097 md5_init_ctx (&ctx);
15098 fold_checksum_tree (op2, &ctx, ht);
15099 md5_finish_ctx (&ctx, checksum_before_op2);
15100 ht.empty ();
15101 #endif
15103 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15104 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15105 if (!tem)
15106 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15108 #ifdef ENABLE_FOLD_CHECKING
15109 md5_init_ctx (&ctx);
15110 fold_checksum_tree (op0, &ctx, ht);
15111 md5_finish_ctx (&ctx, checksum_after_op0);
15112 ht.empty ();
15114 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15115 fold_check_failed (op0, tem);
15117 md5_init_ctx (&ctx);
15118 fold_checksum_tree (op1, &ctx, ht);
15119 md5_finish_ctx (&ctx, checksum_after_op1);
15120 ht.empty ();
15122 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15123 fold_check_failed (op1, tem);
15125 md5_init_ctx (&ctx);
15126 fold_checksum_tree (op2, &ctx, ht);
15127 md5_finish_ctx (&ctx, checksum_after_op2);
15128 ht.dispose ();
15130 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15131 fold_check_failed (op2, tem);
15132 #endif
15133 return tem;
15136 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15137 arguments in ARGARRAY, and a null static chain.
15138 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15139 of type TYPE from the given operands as constructed by build_call_array. */
15141 tree
15142 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15143 int nargs, tree *argarray)
15145 tree tem;
15146 #ifdef ENABLE_FOLD_CHECKING
15147 unsigned char checksum_before_fn[16],
15148 checksum_before_arglist[16],
15149 checksum_after_fn[16],
15150 checksum_after_arglist[16];
15151 struct md5_ctx ctx;
15152 hash_table <pointer_hash <tree_node> > ht;
15153 int i;
15155 ht.create (32);
15156 md5_init_ctx (&ctx);
15157 fold_checksum_tree (fn, &ctx, ht);
15158 md5_finish_ctx (&ctx, checksum_before_fn);
15159 ht.empty ();
15161 md5_init_ctx (&ctx);
15162 for (i = 0; i < nargs; i++)
15163 fold_checksum_tree (argarray[i], &ctx, ht);
15164 md5_finish_ctx (&ctx, checksum_before_arglist);
15165 ht.empty ();
15166 #endif
15168 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15170 #ifdef ENABLE_FOLD_CHECKING
15171 md5_init_ctx (&ctx);
15172 fold_checksum_tree (fn, &ctx, ht);
15173 md5_finish_ctx (&ctx, checksum_after_fn);
15174 ht.empty ();
15176 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15177 fold_check_failed (fn, tem);
15179 md5_init_ctx (&ctx);
15180 for (i = 0; i < nargs; i++)
15181 fold_checksum_tree (argarray[i], &ctx, ht);
15182 md5_finish_ctx (&ctx, checksum_after_arglist);
15183 ht.dispose ();
15185 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15186 fold_check_failed (NULL_TREE, tem);
15187 #endif
15188 return tem;
15191 /* Perform constant folding and related simplification of initializer
15192 expression EXPR. These behave identically to "fold_buildN" but ignore
15193 potential run-time traps and exceptions that fold must preserve. */
15195 #define START_FOLD_INIT \
15196 int saved_signaling_nans = flag_signaling_nans;\
15197 int saved_trapping_math = flag_trapping_math;\
15198 int saved_rounding_math = flag_rounding_math;\
15199 int saved_trapv = flag_trapv;\
15200 int saved_folding_initializer = folding_initializer;\
15201 flag_signaling_nans = 0;\
15202 flag_trapping_math = 0;\
15203 flag_rounding_math = 0;\
15204 flag_trapv = 0;\
15205 folding_initializer = 1;
15207 #define END_FOLD_INIT \
15208 flag_signaling_nans = saved_signaling_nans;\
15209 flag_trapping_math = saved_trapping_math;\
15210 flag_rounding_math = saved_rounding_math;\
15211 flag_trapv = saved_trapv;\
15212 folding_initializer = saved_folding_initializer;
15214 tree
15215 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15216 tree type, tree op)
15218 tree result;
15219 START_FOLD_INIT;
15221 result = fold_build1_loc (loc, code, type, op);
15223 END_FOLD_INIT;
15224 return result;
15227 tree
15228 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15229 tree type, tree op0, tree op1)
15231 tree result;
15232 START_FOLD_INIT;
15234 result = fold_build2_loc (loc, code, type, op0, op1);
15236 END_FOLD_INIT;
15237 return result;
15240 tree
15241 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15242 int nargs, tree *argarray)
15244 tree result;
15245 START_FOLD_INIT;
15247 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15249 END_FOLD_INIT;
15250 return result;
15253 #undef START_FOLD_INIT
15254 #undef END_FOLD_INIT
15256 /* Determine if first argument is a multiple of second argument. Return 0 if
15257 it is not, or we cannot easily determined it to be.
15259 An example of the sort of thing we care about (at this point; this routine
15260 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15261 fold cases do now) is discovering that
15263 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15265 is a multiple of
15267 SAVE_EXPR (J * 8)
15269 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15271 This code also handles discovering that
15273 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15275 is a multiple of 8 so we don't have to worry about dealing with a
15276 possible remainder.
15278 Note that we *look* inside a SAVE_EXPR only to determine how it was
15279 calculated; it is not safe for fold to do much of anything else with the
15280 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15281 at run time. For example, the latter example above *cannot* be implemented
15282 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15283 evaluation time of the original SAVE_EXPR is not necessarily the same at
15284 the time the new expression is evaluated. The only optimization of this
15285 sort that would be valid is changing
15287 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15289 divided by 8 to
15291 SAVE_EXPR (I) * SAVE_EXPR (J)
15293 (where the same SAVE_EXPR (J) is used in the original and the
15294 transformed version). */
15297 multiple_of_p (tree type, const_tree top, const_tree bottom)
15299 if (operand_equal_p (top, bottom, 0))
15300 return 1;
15302 if (TREE_CODE (type) != INTEGER_TYPE)
15303 return 0;
15305 switch (TREE_CODE (top))
15307 case BIT_AND_EXPR:
15308 /* Bitwise and provides a power of two multiple. If the mask is
15309 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15310 if (!integer_pow2p (bottom))
15311 return 0;
15312 /* FALLTHRU */
15314 case MULT_EXPR:
15315 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15316 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15318 case PLUS_EXPR:
15319 case MINUS_EXPR:
15320 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15321 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15323 case LSHIFT_EXPR:
15324 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15326 tree op1, t1;
15328 op1 = TREE_OPERAND (top, 1);
15329 /* const_binop may not detect overflow correctly,
15330 so check for it explicitly here. */
15331 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
15332 && 0 != (t1 = fold_convert (type,
15333 const_binop (LSHIFT_EXPR,
15334 size_one_node,
15335 op1)))
15336 && !TREE_OVERFLOW (t1))
15337 return multiple_of_p (type, t1, bottom);
15339 return 0;
15341 case NOP_EXPR:
15342 /* Can't handle conversions from non-integral or wider integral type. */
15343 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15344 || (TYPE_PRECISION (type)
15345 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15346 return 0;
15348 /* .. fall through ... */
15350 case SAVE_EXPR:
15351 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15353 case COND_EXPR:
15354 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15355 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15357 case INTEGER_CST:
15358 if (TREE_CODE (bottom) != INTEGER_CST
15359 || integer_zerop (bottom)
15360 || (TYPE_UNSIGNED (type)
15361 && (tree_int_cst_sgn (top) < 0
15362 || tree_int_cst_sgn (bottom) < 0)))
15363 return 0;
15364 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
15365 SIGNED);
15367 default:
15368 return 0;
15372 /* Return true if CODE or TYPE is known to be non-negative. */
15374 static bool
15375 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15377 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15378 && truth_value_p (code))
15379 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15380 have a signed:1 type (where the value is -1 and 0). */
15381 return true;
15382 return false;
15385 /* Return true if (CODE OP0) is known to be non-negative. If the return
15386 value is based on the assumption that signed overflow is undefined,
15387 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15388 *STRICT_OVERFLOW_P. */
15390 bool
15391 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15392 bool *strict_overflow_p)
15394 if (TYPE_UNSIGNED (type))
15395 return true;
15397 switch (code)
15399 case ABS_EXPR:
15400 /* We can't return 1 if flag_wrapv is set because
15401 ABS_EXPR<INT_MIN> = INT_MIN. */
15402 if (!INTEGRAL_TYPE_P (type))
15403 return true;
15404 if (TYPE_OVERFLOW_UNDEFINED (type))
15406 *strict_overflow_p = true;
15407 return true;
15409 break;
15411 case NON_LVALUE_EXPR:
15412 case FLOAT_EXPR:
15413 case FIX_TRUNC_EXPR:
15414 return tree_expr_nonnegative_warnv_p (op0,
15415 strict_overflow_p);
15417 case NOP_EXPR:
15419 tree inner_type = TREE_TYPE (op0);
15420 tree outer_type = type;
15422 if (TREE_CODE (outer_type) == REAL_TYPE)
15424 if (TREE_CODE (inner_type) == REAL_TYPE)
15425 return tree_expr_nonnegative_warnv_p (op0,
15426 strict_overflow_p);
15427 if (INTEGRAL_TYPE_P (inner_type))
15429 if (TYPE_UNSIGNED (inner_type))
15430 return true;
15431 return tree_expr_nonnegative_warnv_p (op0,
15432 strict_overflow_p);
15435 else if (INTEGRAL_TYPE_P (outer_type))
15437 if (TREE_CODE (inner_type) == REAL_TYPE)
15438 return tree_expr_nonnegative_warnv_p (op0,
15439 strict_overflow_p);
15440 if (INTEGRAL_TYPE_P (inner_type))
15441 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15442 && TYPE_UNSIGNED (inner_type);
15445 break;
15447 default:
15448 return tree_simple_nonnegative_warnv_p (code, type);
15451 /* We don't know sign of `t', so be conservative and return false. */
15452 return false;
15455 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15456 value is based on the assumption that signed overflow is undefined,
15457 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15458 *STRICT_OVERFLOW_P. */
15460 bool
15461 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15462 tree op1, bool *strict_overflow_p)
15464 if (TYPE_UNSIGNED (type))
15465 return true;
15467 switch (code)
15469 case POINTER_PLUS_EXPR:
15470 case PLUS_EXPR:
15471 if (FLOAT_TYPE_P (type))
15472 return (tree_expr_nonnegative_warnv_p (op0,
15473 strict_overflow_p)
15474 && tree_expr_nonnegative_warnv_p (op1,
15475 strict_overflow_p));
15477 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15478 both unsigned and at least 2 bits shorter than the result. */
15479 if (TREE_CODE (type) == INTEGER_TYPE
15480 && TREE_CODE (op0) == NOP_EXPR
15481 && TREE_CODE (op1) == NOP_EXPR)
15483 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15484 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15485 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15486 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15488 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15489 TYPE_PRECISION (inner2)) + 1;
15490 return prec < TYPE_PRECISION (type);
15493 break;
15495 case MULT_EXPR:
15496 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15498 /* x * x is always non-negative for floating point x
15499 or without overflow. */
15500 if (operand_equal_p (op0, op1, 0)
15501 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15502 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15504 if (TYPE_OVERFLOW_UNDEFINED (type))
15505 *strict_overflow_p = true;
15506 return true;
15510 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15511 both unsigned and their total bits is shorter than the result. */
15512 if (TREE_CODE (type) == INTEGER_TYPE
15513 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15514 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15516 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15517 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15518 : TREE_TYPE (op0);
15519 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15520 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15521 : TREE_TYPE (op1);
15523 bool unsigned0 = TYPE_UNSIGNED (inner0);
15524 bool unsigned1 = TYPE_UNSIGNED (inner1);
15526 if (TREE_CODE (op0) == INTEGER_CST)
15527 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15529 if (TREE_CODE (op1) == INTEGER_CST)
15530 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15532 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15533 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15535 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15536 ? tree_int_cst_min_precision (op0, UNSIGNED)
15537 : TYPE_PRECISION (inner0);
15539 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15540 ? tree_int_cst_min_precision (op1, UNSIGNED)
15541 : TYPE_PRECISION (inner1);
15543 return precision0 + precision1 < TYPE_PRECISION (type);
15546 return false;
15548 case BIT_AND_EXPR:
15549 case MAX_EXPR:
15550 return (tree_expr_nonnegative_warnv_p (op0,
15551 strict_overflow_p)
15552 || tree_expr_nonnegative_warnv_p (op1,
15553 strict_overflow_p));
15555 case BIT_IOR_EXPR:
15556 case BIT_XOR_EXPR:
15557 case MIN_EXPR:
15558 case RDIV_EXPR:
15559 case TRUNC_DIV_EXPR:
15560 case CEIL_DIV_EXPR:
15561 case FLOOR_DIV_EXPR:
15562 case ROUND_DIV_EXPR:
15563 return (tree_expr_nonnegative_warnv_p (op0,
15564 strict_overflow_p)
15565 && tree_expr_nonnegative_warnv_p (op1,
15566 strict_overflow_p));
15568 case TRUNC_MOD_EXPR:
15569 case CEIL_MOD_EXPR:
15570 case FLOOR_MOD_EXPR:
15571 case ROUND_MOD_EXPR:
15572 return tree_expr_nonnegative_warnv_p (op0,
15573 strict_overflow_p);
15574 default:
15575 return tree_simple_nonnegative_warnv_p (code, type);
15578 /* We don't know sign of `t', so be conservative and return false. */
15579 return false;
15582 /* Return true if T is known to be non-negative. If the return
15583 value is based on the assumption that signed overflow is undefined,
15584 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15585 *STRICT_OVERFLOW_P. */
15587 bool
15588 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15590 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15591 return true;
15593 switch (TREE_CODE (t))
15595 case INTEGER_CST:
15596 return tree_int_cst_sgn (t) >= 0;
15598 case REAL_CST:
15599 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15601 case FIXED_CST:
15602 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15604 case COND_EXPR:
15605 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15606 strict_overflow_p)
15607 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15608 strict_overflow_p));
15609 default:
15610 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15611 TREE_TYPE (t));
15613 /* We don't know sign of `t', so be conservative and return false. */
15614 return false;
15617 /* Return true if T is known to be non-negative. If the return
15618 value is based on the assumption that signed overflow is undefined,
15619 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15620 *STRICT_OVERFLOW_P. */
15622 bool
15623 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15624 tree arg0, tree arg1, bool *strict_overflow_p)
15626 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15627 switch (DECL_FUNCTION_CODE (fndecl))
15629 CASE_FLT_FN (BUILT_IN_ACOS):
15630 CASE_FLT_FN (BUILT_IN_ACOSH):
15631 CASE_FLT_FN (BUILT_IN_CABS):
15632 CASE_FLT_FN (BUILT_IN_COSH):
15633 CASE_FLT_FN (BUILT_IN_ERFC):
15634 CASE_FLT_FN (BUILT_IN_EXP):
15635 CASE_FLT_FN (BUILT_IN_EXP10):
15636 CASE_FLT_FN (BUILT_IN_EXP2):
15637 CASE_FLT_FN (BUILT_IN_FABS):
15638 CASE_FLT_FN (BUILT_IN_FDIM):
15639 CASE_FLT_FN (BUILT_IN_HYPOT):
15640 CASE_FLT_FN (BUILT_IN_POW10):
15641 CASE_INT_FN (BUILT_IN_FFS):
15642 CASE_INT_FN (BUILT_IN_PARITY):
15643 CASE_INT_FN (BUILT_IN_POPCOUNT):
15644 CASE_INT_FN (BUILT_IN_CLZ):
15645 CASE_INT_FN (BUILT_IN_CLRSB):
15646 case BUILT_IN_BSWAP32:
15647 case BUILT_IN_BSWAP64:
15648 /* Always true. */
15649 return true;
15651 CASE_FLT_FN (BUILT_IN_SQRT):
15652 /* sqrt(-0.0) is -0.0. */
15653 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15654 return true;
15655 return tree_expr_nonnegative_warnv_p (arg0,
15656 strict_overflow_p);
15658 CASE_FLT_FN (BUILT_IN_ASINH):
15659 CASE_FLT_FN (BUILT_IN_ATAN):
15660 CASE_FLT_FN (BUILT_IN_ATANH):
15661 CASE_FLT_FN (BUILT_IN_CBRT):
15662 CASE_FLT_FN (BUILT_IN_CEIL):
15663 CASE_FLT_FN (BUILT_IN_ERF):
15664 CASE_FLT_FN (BUILT_IN_EXPM1):
15665 CASE_FLT_FN (BUILT_IN_FLOOR):
15666 CASE_FLT_FN (BUILT_IN_FMOD):
15667 CASE_FLT_FN (BUILT_IN_FREXP):
15668 CASE_FLT_FN (BUILT_IN_ICEIL):
15669 CASE_FLT_FN (BUILT_IN_IFLOOR):
15670 CASE_FLT_FN (BUILT_IN_IRINT):
15671 CASE_FLT_FN (BUILT_IN_IROUND):
15672 CASE_FLT_FN (BUILT_IN_LCEIL):
15673 CASE_FLT_FN (BUILT_IN_LDEXP):
15674 CASE_FLT_FN (BUILT_IN_LFLOOR):
15675 CASE_FLT_FN (BUILT_IN_LLCEIL):
15676 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15677 CASE_FLT_FN (BUILT_IN_LLRINT):
15678 CASE_FLT_FN (BUILT_IN_LLROUND):
15679 CASE_FLT_FN (BUILT_IN_LRINT):
15680 CASE_FLT_FN (BUILT_IN_LROUND):
15681 CASE_FLT_FN (BUILT_IN_MODF):
15682 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15683 CASE_FLT_FN (BUILT_IN_RINT):
15684 CASE_FLT_FN (BUILT_IN_ROUND):
15685 CASE_FLT_FN (BUILT_IN_SCALB):
15686 CASE_FLT_FN (BUILT_IN_SCALBLN):
15687 CASE_FLT_FN (BUILT_IN_SCALBN):
15688 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15689 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15690 CASE_FLT_FN (BUILT_IN_SINH):
15691 CASE_FLT_FN (BUILT_IN_TANH):
15692 CASE_FLT_FN (BUILT_IN_TRUNC):
15693 /* True if the 1st argument is nonnegative. */
15694 return tree_expr_nonnegative_warnv_p (arg0,
15695 strict_overflow_p);
15697 CASE_FLT_FN (BUILT_IN_FMAX):
15698 /* True if the 1st OR 2nd arguments are nonnegative. */
15699 return (tree_expr_nonnegative_warnv_p (arg0,
15700 strict_overflow_p)
15701 || (tree_expr_nonnegative_warnv_p (arg1,
15702 strict_overflow_p)));
15704 CASE_FLT_FN (BUILT_IN_FMIN):
15705 /* True if the 1st AND 2nd arguments are nonnegative. */
15706 return (tree_expr_nonnegative_warnv_p (arg0,
15707 strict_overflow_p)
15708 && (tree_expr_nonnegative_warnv_p (arg1,
15709 strict_overflow_p)));
15711 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15712 /* True if the 2nd argument is nonnegative. */
15713 return tree_expr_nonnegative_warnv_p (arg1,
15714 strict_overflow_p);
15716 CASE_FLT_FN (BUILT_IN_POWI):
15717 /* True if the 1st argument is nonnegative or the second
15718 argument is an even integer. */
15719 if (TREE_CODE (arg1) == INTEGER_CST
15720 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15721 return true;
15722 return tree_expr_nonnegative_warnv_p (arg0,
15723 strict_overflow_p);
15725 CASE_FLT_FN (BUILT_IN_POW):
15726 /* True if the 1st argument is nonnegative or the second
15727 argument is an even integer valued real. */
15728 if (TREE_CODE (arg1) == REAL_CST)
15730 REAL_VALUE_TYPE c;
15731 HOST_WIDE_INT n;
15733 c = TREE_REAL_CST (arg1);
15734 n = real_to_integer (&c);
15735 if ((n & 1) == 0)
15737 REAL_VALUE_TYPE cint;
15738 real_from_integer (&cint, VOIDmode, n, SIGNED);
15739 if (real_identical (&c, &cint))
15740 return true;
15743 return tree_expr_nonnegative_warnv_p (arg0,
15744 strict_overflow_p);
15746 default:
15747 break;
15749 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15750 type);
15753 /* Return true if T is known to be non-negative. If the return
15754 value is based on the assumption that signed overflow is undefined,
15755 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15756 *STRICT_OVERFLOW_P. */
15758 static bool
15759 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15761 enum tree_code code = TREE_CODE (t);
15762 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15763 return true;
15765 switch (code)
15767 case TARGET_EXPR:
15769 tree temp = TARGET_EXPR_SLOT (t);
15770 t = TARGET_EXPR_INITIAL (t);
15772 /* If the initializer is non-void, then it's a normal expression
15773 that will be assigned to the slot. */
15774 if (!VOID_TYPE_P (t))
15775 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15777 /* Otherwise, the initializer sets the slot in some way. One common
15778 way is an assignment statement at the end of the initializer. */
15779 while (1)
15781 if (TREE_CODE (t) == BIND_EXPR)
15782 t = expr_last (BIND_EXPR_BODY (t));
15783 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15784 || TREE_CODE (t) == TRY_CATCH_EXPR)
15785 t = expr_last (TREE_OPERAND (t, 0));
15786 else if (TREE_CODE (t) == STATEMENT_LIST)
15787 t = expr_last (t);
15788 else
15789 break;
15791 if (TREE_CODE (t) == MODIFY_EXPR
15792 && TREE_OPERAND (t, 0) == temp)
15793 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15794 strict_overflow_p);
15796 return false;
15799 case CALL_EXPR:
15801 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15802 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15804 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15805 get_callee_fndecl (t),
15806 arg0,
15807 arg1,
15808 strict_overflow_p);
15810 case COMPOUND_EXPR:
15811 case MODIFY_EXPR:
15812 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15813 strict_overflow_p);
15814 case BIND_EXPR:
15815 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15816 strict_overflow_p);
15817 case SAVE_EXPR:
15818 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15819 strict_overflow_p);
15821 default:
15822 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15823 TREE_TYPE (t));
15826 /* We don't know sign of `t', so be conservative and return false. */
15827 return false;
15830 /* Return true if T is known to be non-negative. If the return
15831 value is based on the assumption that signed overflow is undefined,
15832 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15833 *STRICT_OVERFLOW_P. */
15835 bool
15836 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15838 enum tree_code code;
15839 if (t == error_mark_node)
15840 return false;
15842 code = TREE_CODE (t);
15843 switch (TREE_CODE_CLASS (code))
15845 case tcc_binary:
15846 case tcc_comparison:
15847 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15848 TREE_TYPE (t),
15849 TREE_OPERAND (t, 0),
15850 TREE_OPERAND (t, 1),
15851 strict_overflow_p);
15853 case tcc_unary:
15854 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15855 TREE_TYPE (t),
15856 TREE_OPERAND (t, 0),
15857 strict_overflow_p);
15859 case tcc_constant:
15860 case tcc_declaration:
15861 case tcc_reference:
15862 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15864 default:
15865 break;
15868 switch (code)
15870 case TRUTH_AND_EXPR:
15871 case TRUTH_OR_EXPR:
15872 case TRUTH_XOR_EXPR:
15873 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15874 TREE_TYPE (t),
15875 TREE_OPERAND (t, 0),
15876 TREE_OPERAND (t, 1),
15877 strict_overflow_p);
15878 case TRUTH_NOT_EXPR:
15879 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15880 TREE_TYPE (t),
15881 TREE_OPERAND (t, 0),
15882 strict_overflow_p);
15884 case COND_EXPR:
15885 case CONSTRUCTOR:
15886 case OBJ_TYPE_REF:
15887 case ASSERT_EXPR:
15888 case ADDR_EXPR:
15889 case WITH_SIZE_EXPR:
15890 case SSA_NAME:
15891 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15893 default:
15894 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15898 /* Return true if `t' is known to be non-negative. Handle warnings
15899 about undefined signed overflow. */
15901 bool
15902 tree_expr_nonnegative_p (tree t)
15904 bool ret, strict_overflow_p;
15906 strict_overflow_p = false;
15907 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15908 if (strict_overflow_p)
15909 fold_overflow_warning (("assuming signed overflow does not occur when "
15910 "determining that expression is always "
15911 "non-negative"),
15912 WARN_STRICT_OVERFLOW_MISC);
15913 return ret;
15917 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15918 For floating point we further ensure that T is not denormal.
15919 Similar logic is present in nonzero_address in rtlanal.h.
15921 If the return value is based on the assumption that signed overflow
15922 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15923 change *STRICT_OVERFLOW_P. */
15925 bool
15926 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15927 bool *strict_overflow_p)
15929 switch (code)
15931 case ABS_EXPR:
15932 return tree_expr_nonzero_warnv_p (op0,
15933 strict_overflow_p);
15935 case NOP_EXPR:
15937 tree inner_type = TREE_TYPE (op0);
15938 tree outer_type = type;
15940 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15941 && tree_expr_nonzero_warnv_p (op0,
15942 strict_overflow_p));
15944 break;
15946 case NON_LVALUE_EXPR:
15947 return tree_expr_nonzero_warnv_p (op0,
15948 strict_overflow_p);
15950 default:
15951 break;
15954 return false;
15957 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15958 For floating point we further ensure that T is not denormal.
15959 Similar logic is present in nonzero_address in rtlanal.h.
15961 If the return value is based on the assumption that signed overflow
15962 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15963 change *STRICT_OVERFLOW_P. */
15965 bool
15966 tree_binary_nonzero_warnv_p (enum tree_code code,
15967 tree type,
15968 tree op0,
15969 tree op1, bool *strict_overflow_p)
15971 bool sub_strict_overflow_p;
15972 switch (code)
15974 case POINTER_PLUS_EXPR:
15975 case PLUS_EXPR:
15976 if (TYPE_OVERFLOW_UNDEFINED (type))
15978 /* With the presence of negative values it is hard
15979 to say something. */
15980 sub_strict_overflow_p = false;
15981 if (!tree_expr_nonnegative_warnv_p (op0,
15982 &sub_strict_overflow_p)
15983 || !tree_expr_nonnegative_warnv_p (op1,
15984 &sub_strict_overflow_p))
15985 return false;
15986 /* One of operands must be positive and the other non-negative. */
15987 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15988 overflows, on a twos-complement machine the sum of two
15989 nonnegative numbers can never be zero. */
15990 return (tree_expr_nonzero_warnv_p (op0,
15991 strict_overflow_p)
15992 || tree_expr_nonzero_warnv_p (op1,
15993 strict_overflow_p));
15995 break;
15997 case MULT_EXPR:
15998 if (TYPE_OVERFLOW_UNDEFINED (type))
16000 if (tree_expr_nonzero_warnv_p (op0,
16001 strict_overflow_p)
16002 && tree_expr_nonzero_warnv_p (op1,
16003 strict_overflow_p))
16005 *strict_overflow_p = true;
16006 return true;
16009 break;
16011 case MIN_EXPR:
16012 sub_strict_overflow_p = false;
16013 if (tree_expr_nonzero_warnv_p (op0,
16014 &sub_strict_overflow_p)
16015 && tree_expr_nonzero_warnv_p (op1,
16016 &sub_strict_overflow_p))
16018 if (sub_strict_overflow_p)
16019 *strict_overflow_p = true;
16021 break;
16023 case MAX_EXPR:
16024 sub_strict_overflow_p = false;
16025 if (tree_expr_nonzero_warnv_p (op0,
16026 &sub_strict_overflow_p))
16028 if (sub_strict_overflow_p)
16029 *strict_overflow_p = true;
16031 /* When both operands are nonzero, then MAX must be too. */
16032 if (tree_expr_nonzero_warnv_p (op1,
16033 strict_overflow_p))
16034 return true;
16036 /* MAX where operand 0 is positive is positive. */
16037 return tree_expr_nonnegative_warnv_p (op0,
16038 strict_overflow_p);
16040 /* MAX where operand 1 is positive is positive. */
16041 else if (tree_expr_nonzero_warnv_p (op1,
16042 &sub_strict_overflow_p)
16043 && tree_expr_nonnegative_warnv_p (op1,
16044 &sub_strict_overflow_p))
16046 if (sub_strict_overflow_p)
16047 *strict_overflow_p = true;
16048 return true;
16050 break;
16052 case BIT_IOR_EXPR:
16053 return (tree_expr_nonzero_warnv_p (op1,
16054 strict_overflow_p)
16055 || tree_expr_nonzero_warnv_p (op0,
16056 strict_overflow_p));
16058 default:
16059 break;
16062 return false;
16065 /* Return true when T is an address and is known to be nonzero.
16066 For floating point we further ensure that T is not denormal.
16067 Similar logic is present in nonzero_address in rtlanal.h.
16069 If the return value is based on the assumption that signed overflow
16070 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16071 change *STRICT_OVERFLOW_P. */
16073 bool
16074 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16076 bool sub_strict_overflow_p;
16077 switch (TREE_CODE (t))
16079 case INTEGER_CST:
16080 return !integer_zerop (t);
16082 case ADDR_EXPR:
16084 tree base = TREE_OPERAND (t, 0);
16085 if (!DECL_P (base))
16086 base = get_base_address (base);
16088 if (!base)
16089 return false;
16091 /* Weak declarations may link to NULL. Other things may also be NULL
16092 so protect with -fdelete-null-pointer-checks; but not variables
16093 allocated on the stack. */
16094 if (DECL_P (base)
16095 && (flag_delete_null_pointer_checks
16096 || (DECL_CONTEXT (base)
16097 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16098 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16099 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16101 /* Constants are never weak. */
16102 if (CONSTANT_CLASS_P (base))
16103 return true;
16105 return false;
16108 case COND_EXPR:
16109 sub_strict_overflow_p = false;
16110 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16111 &sub_strict_overflow_p)
16112 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16113 &sub_strict_overflow_p))
16115 if (sub_strict_overflow_p)
16116 *strict_overflow_p = true;
16117 return true;
16119 break;
16121 default:
16122 break;
16124 return false;
16127 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16128 attempt to fold the expression to a constant without modifying TYPE,
16129 OP0 or OP1.
16131 If the expression could be simplified to a constant, then return
16132 the constant. If the expression would not be simplified to a
16133 constant, then return NULL_TREE. */
16135 tree
16136 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16138 tree tem = fold_binary (code, type, op0, op1);
16139 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16142 /* Given the components of a unary expression CODE, TYPE and OP0,
16143 attempt to fold the expression to a constant without modifying
16144 TYPE or OP0.
16146 If the expression could be simplified to a constant, then return
16147 the constant. If the expression would not be simplified to a
16148 constant, then return NULL_TREE. */
16150 tree
16151 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16153 tree tem = fold_unary (code, type, op0);
16154 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16157 /* If EXP represents referencing an element in a constant string
16158 (either via pointer arithmetic or array indexing), return the
16159 tree representing the value accessed, otherwise return NULL. */
16161 tree
16162 fold_read_from_constant_string (tree exp)
16164 if ((TREE_CODE (exp) == INDIRECT_REF
16165 || TREE_CODE (exp) == ARRAY_REF)
16166 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16168 tree exp1 = TREE_OPERAND (exp, 0);
16169 tree index;
16170 tree string;
16171 location_t loc = EXPR_LOCATION (exp);
16173 if (TREE_CODE (exp) == INDIRECT_REF)
16174 string = string_constant (exp1, &index);
16175 else
16177 tree low_bound = array_ref_low_bound (exp);
16178 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16180 /* Optimize the special-case of a zero lower bound.
16182 We convert the low_bound to sizetype to avoid some problems
16183 with constant folding. (E.g. suppose the lower bound is 1,
16184 and its mode is QI. Without the conversion,l (ARRAY
16185 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16186 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16187 if (! integer_zerop (low_bound))
16188 index = size_diffop_loc (loc, index,
16189 fold_convert_loc (loc, sizetype, low_bound));
16191 string = exp1;
16194 if (string
16195 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16196 && TREE_CODE (string) == STRING_CST
16197 && TREE_CODE (index) == INTEGER_CST
16198 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16199 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16200 == MODE_INT)
16201 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16202 return build_int_cst_type (TREE_TYPE (exp),
16203 (TREE_STRING_POINTER (string)
16204 [TREE_INT_CST_LOW (index)]));
16206 return NULL;
16209 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16210 an integer constant, real, or fixed-point constant.
16212 TYPE is the type of the result. */
16214 static tree
16215 fold_negate_const (tree arg0, tree type)
16217 tree t = NULL_TREE;
16219 switch (TREE_CODE (arg0))
16221 case INTEGER_CST:
16223 bool overflow;
16224 wide_int val = wi::neg (arg0, &overflow);
16225 t = force_fit_type (type, val, 1,
16226 (overflow | TREE_OVERFLOW (arg0))
16227 && !TYPE_UNSIGNED (type));
16228 break;
16231 case REAL_CST:
16232 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16233 break;
16235 case FIXED_CST:
16237 FIXED_VALUE_TYPE f;
16238 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16239 &(TREE_FIXED_CST (arg0)), NULL,
16240 TYPE_SATURATING (type));
16241 t = build_fixed (type, f);
16242 /* Propagate overflow flags. */
16243 if (overflow_p | TREE_OVERFLOW (arg0))
16244 TREE_OVERFLOW (t) = 1;
16245 break;
16248 default:
16249 gcc_unreachable ();
16252 return t;
16255 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16256 an integer constant or real constant.
16258 TYPE is the type of the result. */
16260 tree
16261 fold_abs_const (tree arg0, tree type)
16263 tree t = NULL_TREE;
16265 switch (TREE_CODE (arg0))
16267 case INTEGER_CST:
16269 /* If the value is unsigned or non-negative, then the absolute value
16270 is the same as the ordinary value. */
16271 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
16272 t = arg0;
16274 /* If the value is negative, then the absolute value is
16275 its negation. */
16276 else
16278 bool overflow;
16279 wide_int val = wi::neg (arg0, &overflow);
16280 t = force_fit_type (type, val, -1,
16281 overflow | TREE_OVERFLOW (arg0));
16284 break;
16286 case REAL_CST:
16287 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16288 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16289 else
16290 t = arg0;
16291 break;
16293 default:
16294 gcc_unreachable ();
16297 return t;
16300 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16301 constant. TYPE is the type of the result. */
16303 static tree
16304 fold_not_const (const_tree arg0, tree type)
16306 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16308 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
16311 /* Given CODE, a relational operator, the target type, TYPE and two
16312 constant operands OP0 and OP1, return the result of the
16313 relational operation. If the result is not a compile time
16314 constant, then return NULL_TREE. */
16316 static tree
16317 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16319 int result, invert;
16321 /* From here on, the only cases we handle are when the result is
16322 known to be a constant. */
16324 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16326 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16327 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16329 /* Handle the cases where either operand is a NaN. */
16330 if (real_isnan (c0) || real_isnan (c1))
16332 switch (code)
16334 case EQ_EXPR:
16335 case ORDERED_EXPR:
16336 result = 0;
16337 break;
16339 case NE_EXPR:
16340 case UNORDERED_EXPR:
16341 case UNLT_EXPR:
16342 case UNLE_EXPR:
16343 case UNGT_EXPR:
16344 case UNGE_EXPR:
16345 case UNEQ_EXPR:
16346 result = 1;
16347 break;
16349 case LT_EXPR:
16350 case LE_EXPR:
16351 case GT_EXPR:
16352 case GE_EXPR:
16353 case LTGT_EXPR:
16354 if (flag_trapping_math)
16355 return NULL_TREE;
16356 result = 0;
16357 break;
16359 default:
16360 gcc_unreachable ();
16363 return constant_boolean_node (result, type);
16366 return constant_boolean_node (real_compare (code, c0, c1), type);
16369 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16371 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16372 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16373 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16376 /* Handle equality/inequality of complex constants. */
16377 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16379 tree rcond = fold_relational_const (code, type,
16380 TREE_REALPART (op0),
16381 TREE_REALPART (op1));
16382 tree icond = fold_relational_const (code, type,
16383 TREE_IMAGPART (op0),
16384 TREE_IMAGPART (op1));
16385 if (code == EQ_EXPR)
16386 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16387 else if (code == NE_EXPR)
16388 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16389 else
16390 return NULL_TREE;
16393 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16395 unsigned count = VECTOR_CST_NELTS (op0);
16396 tree *elts = XALLOCAVEC (tree, count);
16397 gcc_assert (VECTOR_CST_NELTS (op1) == count
16398 && TYPE_VECTOR_SUBPARTS (type) == count);
16400 for (unsigned i = 0; i < count; i++)
16402 tree elem_type = TREE_TYPE (type);
16403 tree elem0 = VECTOR_CST_ELT (op0, i);
16404 tree elem1 = VECTOR_CST_ELT (op1, i);
16406 tree tem = fold_relational_const (code, elem_type,
16407 elem0, elem1);
16409 if (tem == NULL_TREE)
16410 return NULL_TREE;
16412 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16415 return build_vector (type, elts);
16418 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16420 To compute GT, swap the arguments and do LT.
16421 To compute GE, do LT and invert the result.
16422 To compute LE, swap the arguments, do LT and invert the result.
16423 To compute NE, do EQ and invert the result.
16425 Therefore, the code below must handle only EQ and LT. */
16427 if (code == LE_EXPR || code == GT_EXPR)
16429 tree tem = op0;
16430 op0 = op1;
16431 op1 = tem;
16432 code = swap_tree_comparison (code);
16435 /* Note that it is safe to invert for real values here because we
16436 have already handled the one case that it matters. */
16438 invert = 0;
16439 if (code == NE_EXPR || code == GE_EXPR)
16441 invert = 1;
16442 code = invert_tree_comparison (code, false);
16445 /* Compute a result for LT or EQ if args permit;
16446 Otherwise return T. */
16447 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16449 if (code == EQ_EXPR)
16450 result = tree_int_cst_equal (op0, op1);
16451 else
16452 result = tree_int_cst_lt (op0, op1);
16454 else
16455 return NULL_TREE;
16457 if (invert)
16458 result ^= 1;
16459 return constant_boolean_node (result, type);
16462 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16463 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16464 itself. */
16466 tree
16467 fold_build_cleanup_point_expr (tree type, tree expr)
16469 /* If the expression does not have side effects then we don't have to wrap
16470 it with a cleanup point expression. */
16471 if (!TREE_SIDE_EFFECTS (expr))
16472 return expr;
16474 /* If the expression is a return, check to see if the expression inside the
16475 return has no side effects or the right hand side of the modify expression
16476 inside the return. If either don't have side effects set we don't need to
16477 wrap the expression in a cleanup point expression. Note we don't check the
16478 left hand side of the modify because it should always be a return decl. */
16479 if (TREE_CODE (expr) == RETURN_EXPR)
16481 tree op = TREE_OPERAND (expr, 0);
16482 if (!op || !TREE_SIDE_EFFECTS (op))
16483 return expr;
16484 op = TREE_OPERAND (op, 1);
16485 if (!TREE_SIDE_EFFECTS (op))
16486 return expr;
16489 return build1 (CLEANUP_POINT_EXPR, type, expr);
16492 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16493 of an indirection through OP0, or NULL_TREE if no simplification is
16494 possible. */
16496 tree
16497 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16499 tree sub = op0;
16500 tree subtype;
16502 STRIP_NOPS (sub);
16503 subtype = TREE_TYPE (sub);
16504 if (!POINTER_TYPE_P (subtype))
16505 return NULL_TREE;
16507 if (TREE_CODE (sub) == ADDR_EXPR)
16509 tree op = TREE_OPERAND (sub, 0);
16510 tree optype = TREE_TYPE (op);
16511 /* *&CONST_DECL -> to the value of the const decl. */
16512 if (TREE_CODE (op) == CONST_DECL)
16513 return DECL_INITIAL (op);
16514 /* *&p => p; make sure to handle *&"str"[cst] here. */
16515 if (type == optype)
16517 tree fop = fold_read_from_constant_string (op);
16518 if (fop)
16519 return fop;
16520 else
16521 return op;
16523 /* *(foo *)&fooarray => fooarray[0] */
16524 else if (TREE_CODE (optype) == ARRAY_TYPE
16525 && type == TREE_TYPE (optype)
16526 && (!in_gimple_form
16527 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16529 tree type_domain = TYPE_DOMAIN (optype);
16530 tree min_val = size_zero_node;
16531 if (type_domain && TYPE_MIN_VALUE (type_domain))
16532 min_val = TYPE_MIN_VALUE (type_domain);
16533 if (in_gimple_form
16534 && TREE_CODE (min_val) != INTEGER_CST)
16535 return NULL_TREE;
16536 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16537 NULL_TREE, NULL_TREE);
16539 /* *(foo *)&complexfoo => __real__ complexfoo */
16540 else if (TREE_CODE (optype) == COMPLEX_TYPE
16541 && type == TREE_TYPE (optype))
16542 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16543 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16544 else if (TREE_CODE (optype) == VECTOR_TYPE
16545 && type == TREE_TYPE (optype))
16547 tree part_width = TYPE_SIZE (type);
16548 tree index = bitsize_int (0);
16549 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16553 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16554 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16556 tree op00 = TREE_OPERAND (sub, 0);
16557 tree op01 = TREE_OPERAND (sub, 1);
16559 STRIP_NOPS (op00);
16560 if (TREE_CODE (op00) == ADDR_EXPR)
16562 tree op00type;
16563 op00 = TREE_OPERAND (op00, 0);
16564 op00type = TREE_TYPE (op00);
16566 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16567 if (TREE_CODE (op00type) == VECTOR_TYPE
16568 && type == TREE_TYPE (op00type))
16570 HOST_WIDE_INT offset = tree_to_shwi (op01);
16571 tree part_width = TYPE_SIZE (type);
16572 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16573 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16574 tree index = bitsize_int (indexi);
16576 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16577 return fold_build3_loc (loc,
16578 BIT_FIELD_REF, type, op00,
16579 part_width, index);
16582 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16583 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16584 && type == TREE_TYPE (op00type))
16586 tree size = TYPE_SIZE_UNIT (type);
16587 if (tree_int_cst_equal (size, op01))
16588 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16590 /* ((foo *)&fooarray)[1] => fooarray[1] */
16591 else if (TREE_CODE (op00type) == ARRAY_TYPE
16592 && type == TREE_TYPE (op00type))
16594 tree type_domain = TYPE_DOMAIN (op00type);
16595 tree min_val = size_zero_node;
16596 if (type_domain && TYPE_MIN_VALUE (type_domain))
16597 min_val = TYPE_MIN_VALUE (type_domain);
16598 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16599 TYPE_SIZE_UNIT (type));
16600 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16601 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16602 NULL_TREE, NULL_TREE);
16607 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16608 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16609 && type == TREE_TYPE (TREE_TYPE (subtype))
16610 && (!in_gimple_form
16611 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16613 tree type_domain;
16614 tree min_val = size_zero_node;
16615 sub = build_fold_indirect_ref_loc (loc, sub);
16616 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16617 if (type_domain && TYPE_MIN_VALUE (type_domain))
16618 min_val = TYPE_MIN_VALUE (type_domain);
16619 if (in_gimple_form
16620 && TREE_CODE (min_val) != INTEGER_CST)
16621 return NULL_TREE;
16622 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16623 NULL_TREE);
16626 return NULL_TREE;
16629 /* Builds an expression for an indirection through T, simplifying some
16630 cases. */
16632 tree
16633 build_fold_indirect_ref_loc (location_t loc, tree t)
16635 tree type = TREE_TYPE (TREE_TYPE (t));
16636 tree sub = fold_indirect_ref_1 (loc, type, t);
16638 if (sub)
16639 return sub;
16641 return build1_loc (loc, INDIRECT_REF, type, t);
16644 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16646 tree
16647 fold_indirect_ref_loc (location_t loc, tree t)
16649 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16651 if (sub)
16652 return sub;
16653 else
16654 return t;
16657 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16658 whose result is ignored. The type of the returned tree need not be
16659 the same as the original expression. */
16661 tree
16662 fold_ignored_result (tree t)
16664 if (!TREE_SIDE_EFFECTS (t))
16665 return integer_zero_node;
16667 for (;;)
16668 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16670 case tcc_unary:
16671 t = TREE_OPERAND (t, 0);
16672 break;
16674 case tcc_binary:
16675 case tcc_comparison:
16676 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16677 t = TREE_OPERAND (t, 0);
16678 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16679 t = TREE_OPERAND (t, 1);
16680 else
16681 return t;
16682 break;
16684 case tcc_expression:
16685 switch (TREE_CODE (t))
16687 case COMPOUND_EXPR:
16688 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16689 return t;
16690 t = TREE_OPERAND (t, 0);
16691 break;
16693 case COND_EXPR:
16694 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16695 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16696 return t;
16697 t = TREE_OPERAND (t, 0);
16698 break;
16700 default:
16701 return t;
16703 break;
16705 default:
16706 return t;
16710 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16712 tree
16713 round_up_loc (location_t loc, tree value, int divisor)
16715 tree div = NULL_TREE;
16717 gcc_assert (divisor > 0);
16718 if (divisor == 1)
16719 return value;
16721 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16722 have to do anything. Only do this when we are not given a const,
16723 because in that case, this check is more expensive than just
16724 doing it. */
16725 if (TREE_CODE (value) != INTEGER_CST)
16727 div = build_int_cst (TREE_TYPE (value), divisor);
16729 if (multiple_of_p (TREE_TYPE (value), value, div))
16730 return value;
16733 /* If divisor is a power of two, simplify this to bit manipulation. */
16734 if (divisor == (divisor & -divisor))
16736 if (TREE_CODE (value) == INTEGER_CST)
16738 wide_int val = value;
16739 bool overflow_p;
16741 if ((val & (divisor - 1)) == 0)
16742 return value;
16744 overflow_p = TREE_OVERFLOW (value);
16745 val &= ~(divisor - 1);
16746 val += divisor;
16747 if (val == 0)
16748 overflow_p = true;
16750 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16752 else
16754 tree t;
16756 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16757 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16758 t = build_int_cst (TREE_TYPE (value), -divisor);
16759 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16762 else
16764 if (!div)
16765 div = build_int_cst (TREE_TYPE (value), divisor);
16766 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16767 value = size_binop_loc (loc, MULT_EXPR, value, div);
16770 return value;
16773 /* Likewise, but round down. */
16775 tree
16776 round_down_loc (location_t loc, tree value, int divisor)
16778 tree div = NULL_TREE;
16780 gcc_assert (divisor > 0);
16781 if (divisor == 1)
16782 return value;
16784 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16785 have to do anything. Only do this when we are not given a const,
16786 because in that case, this check is more expensive than just
16787 doing it. */
16788 if (TREE_CODE (value) != INTEGER_CST)
16790 div = build_int_cst (TREE_TYPE (value), divisor);
16792 if (multiple_of_p (TREE_TYPE (value), value, div))
16793 return value;
16796 /* If divisor is a power of two, simplify this to bit manipulation. */
16797 if (divisor == (divisor & -divisor))
16799 tree t;
16801 t = build_int_cst (TREE_TYPE (value), -divisor);
16802 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16804 else
16806 if (!div)
16807 div = build_int_cst (TREE_TYPE (value), divisor);
16808 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16809 value = size_binop_loc (loc, MULT_EXPR, value, div);
16812 return value;
16815 /* Returns the pointer to the base of the object addressed by EXP and
16816 extracts the information about the offset of the access, storing it
16817 to PBITPOS and POFFSET. */
16819 static tree
16820 split_address_to_core_and_offset (tree exp,
16821 HOST_WIDE_INT *pbitpos, tree *poffset)
16823 tree core;
16824 enum machine_mode mode;
16825 int unsignedp, volatilep;
16826 HOST_WIDE_INT bitsize;
16827 location_t loc = EXPR_LOCATION (exp);
16829 if (TREE_CODE (exp) == ADDR_EXPR)
16831 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16832 poffset, &mode, &unsignedp, &volatilep,
16833 false);
16834 core = build_fold_addr_expr_loc (loc, core);
16836 else
16838 core = exp;
16839 *pbitpos = 0;
16840 *poffset = NULL_TREE;
16843 return core;
16846 /* Returns true if addresses of E1 and E2 differ by a constant, false
16847 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16849 bool
16850 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16852 tree core1, core2;
16853 HOST_WIDE_INT bitpos1, bitpos2;
16854 tree toffset1, toffset2, tdiff, type;
16856 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16857 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16859 if (bitpos1 % BITS_PER_UNIT != 0
16860 || bitpos2 % BITS_PER_UNIT != 0
16861 || !operand_equal_p (core1, core2, 0))
16862 return false;
16864 if (toffset1 && toffset2)
16866 type = TREE_TYPE (toffset1);
16867 if (type != TREE_TYPE (toffset2))
16868 toffset2 = fold_convert (type, toffset2);
16870 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16871 if (!cst_and_fits_in_hwi (tdiff))
16872 return false;
16874 *diff = int_cst_value (tdiff);
16876 else if (toffset1 || toffset2)
16878 /* If only one of the offsets is non-constant, the difference cannot
16879 be a constant. */
16880 return false;
16882 else
16883 *diff = 0;
16885 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16886 return true;
16889 /* Simplify the floating point expression EXP when the sign of the
16890 result is not significant. Return NULL_TREE if no simplification
16891 is possible. */
16893 tree
16894 fold_strip_sign_ops (tree exp)
16896 tree arg0, arg1;
16897 location_t loc = EXPR_LOCATION (exp);
16899 switch (TREE_CODE (exp))
16901 case ABS_EXPR:
16902 case NEGATE_EXPR:
16903 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16904 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16906 case MULT_EXPR:
16907 case RDIV_EXPR:
16908 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16909 return NULL_TREE;
16910 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16911 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16912 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16913 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16914 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16915 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16916 break;
16918 case COMPOUND_EXPR:
16919 arg0 = TREE_OPERAND (exp, 0);
16920 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16921 if (arg1)
16922 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16923 break;
16925 case COND_EXPR:
16926 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16927 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16928 if (arg0 || arg1)
16929 return fold_build3_loc (loc,
16930 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16931 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16932 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16933 break;
16935 case CALL_EXPR:
16937 const enum built_in_function fcode = builtin_mathfn_code (exp);
16938 switch (fcode)
16940 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16941 /* Strip copysign function call, return the 1st argument. */
16942 arg0 = CALL_EXPR_ARG (exp, 0);
16943 arg1 = CALL_EXPR_ARG (exp, 1);
16944 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16946 default:
16947 /* Strip sign ops from the argument of "odd" math functions. */
16948 if (negate_mathfn_p (fcode))
16950 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16951 if (arg0)
16952 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16954 break;
16957 break;
16959 default:
16960 break;
16962 return NULL_TREE;